entities listlengths 1 8.61k | max_stars_repo_path stringlengths 7 172 | max_stars_repo_name stringlengths 5 89 | max_stars_count int64 0 82k | content stringlengths 14 1.05M | id stringlengths 2 6 | new_content stringlengths 15 1.05M | modified bool 1 class | references stringlengths 29 1.05M |
|---|---|---|---|---|---|---|---|---|
[
{
"context": "lize(\"YiujpEcIch86xDgsFFjQIsps3jvCimK9jarh4VBf\", \"GVuyJ8WqfzX8Wl2M4PCeXLiJujQvSMHpIYO260OI\");\n\n UserSch",
"end": 86,
"score": 0.6534258127212524,
"start": 82,
"tag": "KEY",
"value": "GVuy"
},
{
"context": "YiujpEcIch86xDgsFFjQIsps3jvCimK9jarh4VBf\", \"GVuyJ8WqfzX... | DB/ParseMng.coffee | juancjara/guitar-hero-firefox-os | 0 | ParseMng = (() ->
Parse.initialize("YiujpEcIch86xDgsFFjQIsps3jvCimK9jarh4VBf", "GVuyJ8WqfzX8Wl2M4PCeXLiJujQvSMHpIYO260OI");
UserSchema = Parse.Object.extend 'UserSchema'
handler = (cb) ->
res =
success: (obj) -> cb(null, obj)
error: (obj, err) -> cb(err)
handlerQuery = (cb) ->
res =
success: (obj) -> cb(null, obj)
error: (err) -> cb(err)
addUser = (name, cb) ->
query = new Parse.Query UserSchema
query.equalTo 'username', name
query.count(
success: (obj) ->
if obj > 0
err =
code: '9999'
message: 'Name already used'
return cb(err)
user = new UserSchema()
data =
username: name,
points: 0
user.save(data, handler(cb))
error: (err) ->
return cb(err)
)
updateUser = (id, points, cb) ->
query = new Parse.Query(UserSchema)
update = (err, user) ->
return cb(err) if err
user.set('points', points)
console.log('updateUser')
user.save(null, handler(cb))
query.get(id, handlerQuery(update))
getRank = (points, cb) ->
query = new Parse.Query(UserSchema)
query.descending('points')
query.greaterThan('points', points)
query.count handlerQuery(cb)
getUsers = (cb) ->
query = new Parse.Query(UserSchema)
query.limit(10)
query.descending('points')
query.find handlerQuery(cb)
getTopPlayer = (points, cb) ->
query = new Parse.Query(UserSchema)
query.limit(1)
query.greaterThan('points', points)
query.ascending('points')
query.find handlerQuery(cb)
return {
addUser: addUser
updateUser: updateUser
getUsers: getUsers
getRank: getRank
getTopPlayer: getTopPlayer
}
)()
module.exports.ParseMng = ParseMng | 20479 | ParseMng = (() ->
Parse.initialize("YiujpEcIch86xDgsFFjQIsps3jvCimK9jarh4VBf", "<KEY>J8<KEY>qf<KEY>");
UserSchema = Parse.Object.extend 'UserSchema'
handler = (cb) ->
res =
success: (obj) -> cb(null, obj)
error: (obj, err) -> cb(err)
handlerQuery = (cb) ->
res =
success: (obj) -> cb(null, obj)
error: (err) -> cb(err)
addUser = (name, cb) ->
query = new Parse.Query UserSchema
query.equalTo 'username', name
query.count(
success: (obj) ->
if obj > 0
err =
code: '9999'
message: 'Name already used'
return cb(err)
user = new UserSchema()
data =
username: name,
points: 0
user.save(data, handler(cb))
error: (err) ->
return cb(err)
)
updateUser = (id, points, cb) ->
query = new Parse.Query(UserSchema)
update = (err, user) ->
return cb(err) if err
user.set('points', points)
console.log('updateUser')
user.save(null, handler(cb))
query.get(id, handlerQuery(update))
getRank = (points, cb) ->
query = new Parse.Query(UserSchema)
query.descending('points')
query.greaterThan('points', points)
query.count handlerQuery(cb)
getUsers = (cb) ->
query = new Parse.Query(UserSchema)
query.limit(10)
query.descending('points')
query.find handlerQuery(cb)
getTopPlayer = (points, cb) ->
query = new Parse.Query(UserSchema)
query.limit(1)
query.greaterThan('points', points)
query.ascending('points')
query.find handlerQuery(cb)
return {
addUser: addUser
updateUser: updateUser
getUsers: getUsers
getRank: getRank
getTopPlayer: getTopPlayer
}
)()
module.exports.ParseMng = ParseMng | true | ParseMng = (() ->
Parse.initialize("YiujpEcIch86xDgsFFjQIsps3jvCimK9jarh4VBf", "PI:KEY:<KEY>END_PIJ8PI:KEY:<KEY>END_PIqfPI:KEY:<KEY>END_PI");
UserSchema = Parse.Object.extend 'UserSchema'
handler = (cb) ->
res =
success: (obj) -> cb(null, obj)
error: (obj, err) -> cb(err)
handlerQuery = (cb) ->
res =
success: (obj) -> cb(null, obj)
error: (err) -> cb(err)
addUser = (name, cb) ->
query = new Parse.Query UserSchema
query.equalTo 'username', name
query.count(
success: (obj) ->
if obj > 0
err =
code: '9999'
message: 'Name already used'
return cb(err)
user = new UserSchema()
data =
username: name,
points: 0
user.save(data, handler(cb))
error: (err) ->
return cb(err)
)
updateUser = (id, points, cb) ->
query = new Parse.Query(UserSchema)
update = (err, user) ->
return cb(err) if err
user.set('points', points)
console.log('updateUser')
user.save(null, handler(cb))
query.get(id, handlerQuery(update))
getRank = (points, cb) ->
query = new Parse.Query(UserSchema)
query.descending('points')
query.greaterThan('points', points)
query.count handlerQuery(cb)
getUsers = (cb) ->
query = new Parse.Query(UserSchema)
query.limit(10)
query.descending('points')
query.find handlerQuery(cb)
getTopPlayer = (points, cb) ->
query = new Parse.Query(UserSchema)
query.limit(1)
query.greaterThan('points', points)
query.ascending('points')
query.find handlerQuery(cb)
return {
addUser: addUser
updateUser: updateUser
getUsers: getUsers
getRank: getRank
getTopPlayer: getTopPlayer
}
)()
module.exports.ParseMng = ParseMng |
[
{
"context": " extensions unit tests\n#\n# Copyright (C) 2011-2013 Nikolay Nemshilov\n#\nLovely = require('lovely')\n{Test, assert} = Lov",
"end": 82,
"score": 0.9998872876167297,
"start": 65,
"tag": "NAME",
"value": "Nikolay Nemshilov"
}
] | stl/lang/test/number_test.coffee | lovely-io/lovely.io-stl | 2 | #
# The string extensions unit tests
#
# Copyright (C) 2011-2013 Nikolay Nemshilov
#
Lovely = require('lovely')
{Test, assert} = Lovely
eval(Test.build)
describe "Number extensions", ->
describe "#abs()", ->
it "should return the number itself for positive values",->
num = 16
assert.equal num, num.abs()
it "should return the absolute value for negative numbers",->
num = -16
assert.equal 16, num.abs()
describe "#round()", ->
it "should round 4.4 to 4", ->
assert.equal 4, 4.4.round()
it "should round 4.6 to 5", ->
assert.equal 5, 4.6.round()
it "should round a float to given size", ->
assert.equal 4.44, 4.4444.round(2)
assert.equal 4.45, 4.4466.round(2)
describe "#ceil()", ->
it "should always round the number to biggest integer", ->
assert.equal 5, 4.6.ceil()
assert.equal 5, 4.1.ceil()
describe "#floor()", ->
it "should always round the number to smallest integer", ->
assert.equal 4, 4.1.floor()
assert.equal 4, 4.9.floor()
describe "#min(value)", ->
it "should return the number itself when it's bigger than the limit", ->
assert.equal 4.44, 4.44.min(4)
it "should return the limit when the number is smaller than it", ->
assert.equal 4, 3.33.min(4)
describe "#max(value)", ->
it "should return the number itself when it's smaller than the limit", ->
assert.equal 2.22, 2.22.max(4)
it "should return the limit value if the number is bigger than that", ->
assert.equal 4, 4.44.max(4)
describe "#times(callback, scope)", ->
it "should call the callback with every number from 0 to it's value", ->
numbers = []
4.0.times (i)-> numbers.push(i)
assert.deepEqual [0,1,2,3], numbers
describe "#map(callback, scope)", ->
it "should return a list of results of calls on the callback", ->
assert.deepEqual [0,2,4,6], 4.0.map (i)-> i * 2
| 137400 | #
# The string extensions unit tests
#
# Copyright (C) 2011-2013 <NAME>
#
Lovely = require('lovely')
{Test, assert} = Lovely
eval(Test.build)
describe "Number extensions", ->
describe "#abs()", ->
it "should return the number itself for positive values",->
num = 16
assert.equal num, num.abs()
it "should return the absolute value for negative numbers",->
num = -16
assert.equal 16, num.abs()
describe "#round()", ->
it "should round 4.4 to 4", ->
assert.equal 4, 4.4.round()
it "should round 4.6 to 5", ->
assert.equal 5, 4.6.round()
it "should round a float to given size", ->
assert.equal 4.44, 4.4444.round(2)
assert.equal 4.45, 4.4466.round(2)
describe "#ceil()", ->
it "should always round the number to biggest integer", ->
assert.equal 5, 4.6.ceil()
assert.equal 5, 4.1.ceil()
describe "#floor()", ->
it "should always round the number to smallest integer", ->
assert.equal 4, 4.1.floor()
assert.equal 4, 4.9.floor()
describe "#min(value)", ->
it "should return the number itself when it's bigger than the limit", ->
assert.equal 4.44, 4.44.min(4)
it "should return the limit when the number is smaller than it", ->
assert.equal 4, 3.33.min(4)
describe "#max(value)", ->
it "should return the number itself when it's smaller than the limit", ->
assert.equal 2.22, 2.22.max(4)
it "should return the limit value if the number is bigger than that", ->
assert.equal 4, 4.44.max(4)
describe "#times(callback, scope)", ->
it "should call the callback with every number from 0 to it's value", ->
numbers = []
4.0.times (i)-> numbers.push(i)
assert.deepEqual [0,1,2,3], numbers
describe "#map(callback, scope)", ->
it "should return a list of results of calls on the callback", ->
assert.deepEqual [0,2,4,6], 4.0.map (i)-> i * 2
| true | #
# The string extensions unit tests
#
# Copyright (C) 2011-2013 PI:NAME:<NAME>END_PI
#
Lovely = require('lovely')
{Test, assert} = Lovely
eval(Test.build)
describe "Number extensions", ->
describe "#abs()", ->
it "should return the number itself for positive values",->
num = 16
assert.equal num, num.abs()
it "should return the absolute value for negative numbers",->
num = -16
assert.equal 16, num.abs()
describe "#round()", ->
it "should round 4.4 to 4", ->
assert.equal 4, 4.4.round()
it "should round 4.6 to 5", ->
assert.equal 5, 4.6.round()
it "should round a float to given size", ->
assert.equal 4.44, 4.4444.round(2)
assert.equal 4.45, 4.4466.round(2)
describe "#ceil()", ->
it "should always round the number to biggest integer", ->
assert.equal 5, 4.6.ceil()
assert.equal 5, 4.1.ceil()
describe "#floor()", ->
it "should always round the number to smallest integer", ->
assert.equal 4, 4.1.floor()
assert.equal 4, 4.9.floor()
describe "#min(value)", ->
it "should return the number itself when it's bigger than the limit", ->
assert.equal 4.44, 4.44.min(4)
it "should return the limit when the number is smaller than it", ->
assert.equal 4, 3.33.min(4)
describe "#max(value)", ->
it "should return the number itself when it's smaller than the limit", ->
assert.equal 2.22, 2.22.max(4)
it "should return the limit value if the number is bigger than that", ->
assert.equal 4, 4.44.max(4)
describe "#times(callback, scope)", ->
it "should call the callback with every number from 0 to it's value", ->
numbers = []
4.0.times (i)-> numbers.push(i)
assert.deepEqual [0,1,2,3], numbers
describe "#map(callback, scope)", ->
it "should return a list of results of calls on the callback", ->
assert.deepEqual [0,2,4,6], 4.0.map (i)-> i * 2
|
[
{
"context": "nie.ca',\n\t\t\t\tcredential: 'muazkh',\n\t\t\t\tusername: 'webrtc@live.com'\n\t\t\t},\n\t\t\t{\n\t\t\t\turl: 'turn:192.158.29.39:3478?tra",
"end": 1432,
"score": 0.9998894929885864,
"start": 1417,
"tag": "EMAIL",
"value": "webrtc@live.com"
},
{
"context": "name: 'webrt... | src/coffee/common.coffee | DakEnviy/taten | 0 | # Requires
PeerServer = require('peer').PeerServer
# Classes
server = PeerServer({port: 9000, path: '/taten'})
# Methods
getUserMedia = (callback) ->
browserUserMedia = navigator.getUserMedia ||
navigator.webkitGetUserMedia ||
navigator.mozGetUserMedia ||
navigator.msGetUserMedia
return callback "Your browser doesn`t support WebRTC" if not browserUserMedia
browserUserMedia.bind(navigator)
"audio": true
"video": true
(stream) ->
callback null, stream
(err) ->
callback err
# Main Code
peer = new Peer "2",
"host": "localhost"
"port": 9000
"path": "/taten"
"config":
"iceServers": [
{url:'stun:stun01.sipphone.com'},
{url:'stun:stun.ekiga.net'},
{url:'stun:stun.fwdnet.net'},
{url:'stun:stun.ideasip.com'},
{url:'stun:stun.iptel.org'},
{url:'stun:stun.rixtelecom.se'},
{url:'stun:stun.schlund.de'},
{url:'stun:stun.l.google.com:19302'},
{url:'stun:stun1.l.google.com:19302'},
{url:'stun:stun2.l.google.com:19302'},
{url:'stun:stun3.l.google.com:19302'},
{url:'stun:stun4.l.google.com:19302'},
{url:'stun:stunserver.org'},
{url:'stun:stun.softjoys.com'},
{url:'stun:stun.voiparound.com'},
{url:'stun:stun.voipbuster.com'},
{url:'stun:stun.voipstunt.com'},
{url:'stun:stun.voxgratia.org'},
{url:'stun:stun.xten.com'},
{
url: 'turn:numb.viagenie.ca',
credential: 'muazkh',
username: 'webrtc@live.com'
},
{
url: 'turn:192.158.29.39:3478?transport=udp',
credential: 'JZEOEt2V3Qb0y27GRntt2u2PAYA=',
username: '28224511:1379330808'
},
{
url: 'turn:192.158.29.39:3478?transport=tcp',
credential: 'JZEOEt2V3Qb0y27GRntt2u2PAYA=',
username: '28224511:1379330808'
}
]
peer.on "open", (id) ->
console.log "Connecting to the server with id: #{id}"
if id != 1
getUserMedia (err, stream) ->
return console.log err if err
call = peer.call "1", stream
call.on "stream", (cStream) ->
console.log "test"
$("video").attr "src", URL.createObjectURL cStream
call.on "error", (err) ->
console.log err
peer.on "call", (call) ->
console.log "connect"
getUserMedia (err, stream) ->
return console.log err if err
call.answer stream
call.on "stream", (cStream) ->
$("video").attr "src", URL.createObjectURL cStream
call.on "error", (err) ->
console.log err
peer.on "error", (err) ->
console.log err
| 27867 | # Requires
PeerServer = require('peer').PeerServer
# Classes
server = PeerServer({port: 9000, path: '/taten'})
# Methods
getUserMedia = (callback) ->
browserUserMedia = navigator.getUserMedia ||
navigator.webkitGetUserMedia ||
navigator.mozGetUserMedia ||
navigator.msGetUserMedia
return callback "Your browser doesn`t support WebRTC" if not browserUserMedia
browserUserMedia.bind(navigator)
"audio": true
"video": true
(stream) ->
callback null, stream
(err) ->
callback err
# Main Code
peer = new Peer "2",
"host": "localhost"
"port": 9000
"path": "/taten"
"config":
"iceServers": [
{url:'stun:stun01.sipphone.com'},
{url:'stun:stun.ekiga.net'},
{url:'stun:stun.fwdnet.net'},
{url:'stun:stun.ideasip.com'},
{url:'stun:stun.iptel.org'},
{url:'stun:stun.rixtelecom.se'},
{url:'stun:stun.schlund.de'},
{url:'stun:stun.l.google.com:19302'},
{url:'stun:stun1.l.google.com:19302'},
{url:'stun:stun2.l.google.com:19302'},
{url:'stun:stun3.l.google.com:19302'},
{url:'stun:stun4.l.google.com:19302'},
{url:'stun:stunserver.org'},
{url:'stun:stun.softjoys.com'},
{url:'stun:stun.voiparound.com'},
{url:'stun:stun.voipbuster.com'},
{url:'stun:stun.voipstunt.com'},
{url:'stun:stun.voxgratia.org'},
{url:'stun:stun.xten.com'},
{
url: 'turn:numb.viagenie.ca',
credential: 'muazkh',
username: '<EMAIL>'
},
{
url: 'turn:172.16.58.3:3478?transport=udp',
credential: 'JZEOEt2V3Qb0y27GRntt2u2PAYA=',
username: '28224511:1379330808'
},
{
url: 'turn:172.16.58.3:3478?transport=tcp',
credential: 'JZEOEt2V3Qb0y27GRntt2u2PAYA=',
username: '28224511:1379330808'
}
]
peer.on "open", (id) ->
console.log "Connecting to the server with id: #{id}"
if id != 1
getUserMedia (err, stream) ->
return console.log err if err
call = peer.call "1", stream
call.on "stream", (cStream) ->
console.log "test"
$("video").attr "src", URL.createObjectURL cStream
call.on "error", (err) ->
console.log err
peer.on "call", (call) ->
console.log "connect"
getUserMedia (err, stream) ->
return console.log err if err
call.answer stream
call.on "stream", (cStream) ->
$("video").attr "src", URL.createObjectURL cStream
call.on "error", (err) ->
console.log err
peer.on "error", (err) ->
console.log err
| true | # Requires
PeerServer = require('peer').PeerServer
# Classes
server = PeerServer({port: 9000, path: '/taten'})
# Methods
getUserMedia = (callback) ->
browserUserMedia = navigator.getUserMedia ||
navigator.webkitGetUserMedia ||
navigator.mozGetUserMedia ||
navigator.msGetUserMedia
return callback "Your browser doesn`t support WebRTC" if not browserUserMedia
browserUserMedia.bind(navigator)
"audio": true
"video": true
(stream) ->
callback null, stream
(err) ->
callback err
# Main Code
peer = new Peer "2",
"host": "localhost"
"port": 9000
"path": "/taten"
"config":
"iceServers": [
{url:'stun:stun01.sipphone.com'},
{url:'stun:stun.ekiga.net'},
{url:'stun:stun.fwdnet.net'},
{url:'stun:stun.ideasip.com'},
{url:'stun:stun.iptel.org'},
{url:'stun:stun.rixtelecom.se'},
{url:'stun:stun.schlund.de'},
{url:'stun:stun.l.google.com:19302'},
{url:'stun:stun1.l.google.com:19302'},
{url:'stun:stun2.l.google.com:19302'},
{url:'stun:stun3.l.google.com:19302'},
{url:'stun:stun4.l.google.com:19302'},
{url:'stun:stunserver.org'},
{url:'stun:stun.softjoys.com'},
{url:'stun:stun.voiparound.com'},
{url:'stun:stun.voipbuster.com'},
{url:'stun:stun.voipstunt.com'},
{url:'stun:stun.voxgratia.org'},
{url:'stun:stun.xten.com'},
{
url: 'turn:numb.viagenie.ca',
credential: 'muazkh',
username: 'PI:EMAIL:<EMAIL>END_PI'
},
{
url: 'turn:PI:IP_ADDRESS:172.16.58.3END_PI:3478?transport=udp',
credential: 'JZEOEt2V3Qb0y27GRntt2u2PAYA=',
username: '28224511:1379330808'
},
{
url: 'turn:PI:IP_ADDRESS:172.16.58.3END_PI:3478?transport=tcp',
credential: 'JZEOEt2V3Qb0y27GRntt2u2PAYA=',
username: '28224511:1379330808'
}
]
peer.on "open", (id) ->
console.log "Connecting to the server with id: #{id}"
if id != 1
getUserMedia (err, stream) ->
return console.log err if err
call = peer.call "1", stream
call.on "stream", (cStream) ->
console.log "test"
$("video").attr "src", URL.createObjectURL cStream
call.on "error", (err) ->
console.log err
peer.on "call", (call) ->
console.log "connect"
getUserMedia (err, stream) ->
return console.log err if err
call.answer stream
call.on "stream", (cStream) ->
$("video").attr "src", URL.createObjectURL cStream
call.on "error", (err) ->
console.log err
peer.on "error", (err) ->
console.log err
|
[
{
"context": " co =>\n yield @room.user.say 'alice', '@hubot show open bosun incidents'\n ",
"end": 762,
"score": 0.6915292143821716,
"start": 757,
"tag": "NAME",
"value": "alice"
},
{
"context": " expect(@room.messages).to.eql [\n ['alice... | test/bosun-test.coffee | lukaspustina/hubot-bosun | 1 | Helper = require('hubot-test-helper')
chai = require 'chai'
auth = require 'hubot-auth'
Promise = require('bluebird')
co = require('co')
http = require 'http'
expect = chai.expect
process.env.EXPRESS_PORT = 18080
api_call_delay = 20
customMessages = []
describe 'bosun without authorization', ->
beforeEach ->
[@room, @bosun] = setup_test_env {
hubot_bosun_host: "http://localhost:18070"
hubot_bosun_slack: "no"
hubot_bosun_log_level: "error"
hubot_bosun_relaTive_time: "no"
}
afterEach ->
tear_down_test_env(@room, @bosun)
context "incidents", ->
context "show incidents", ->
context "show incidents for authorized user", ->
beforeEach ->
co =>
yield @room.user.say 'alice', '@hubot show open bosun incidents'
yield new Promise.delay api_call_delay
it 'show bosun incidents', ->
expect(@room.messages).to.eql [
['alice', '@hubot show open bosun incidents']
['hubot', '@alice Retrieving Bosun incidents ...']
['hubot', '@alice Yippie. Done.']
['hubot', '@alice So, there are currently 2 open incidents in Bosun.']
['hubot', '@alice 759 is normal: warning: <no value>.']
['hubot', '@alice 750 is warning: warning: <no value>.']
]
context "succeed even if unauthorized", ->
beforeEach ->
co =>
yield @room.user.say 'bob', '@hubot show open bosun incidents'
yield new Promise.delay api_call_delay
it 'show open bosun incidents for unauthorized bob', ->
expect(@room.messages).to.eql [
['bob', '@hubot show open bosun incidents']
['hubot', '@bob Retrieving Bosun incidents ...']
['hubot', '@bob Yippie. Done.']
['hubot', '@bob So, there are currently 2 open incidents in Bosun.']
['hubot', '@bob 759 is normal: warning: <no value>.']
['hubot', '@bob 750 is warning: warning: <no value>.']
]
describe 'bosun', ->
beforeEach ->
[@room, @bosun] = setup_test_env {
hubot_bosun_host: "http://localhost:18070"
hubot_bosun_role: "bosun"
hubot_bosun_slack: "no"
hubot_bosun_log_level: "error"
hubot_bosun_relaTive_time: "no"
}
afterEach ->
tear_down_test_env(@room, @bosun)
context "incidents", ->
context "show incidents", ->
context "show incidents for authorized user", ->
beforeEach ->
co =>
yield @room.user.say 'alice', '@hubot show open bosun incidents'
yield new Promise.delay api_call_delay
it 'show bosun incidents', ->
expect(@room.messages).to.eql [
['alice', '@hubot show open bosun incidents']
['hubot', '@alice Retrieving Bosun incidents ...']
['hubot', '@alice Yippie. Done.']
['hubot', '@alice So, there are currently 2 open incidents in Bosun.']
['hubot', '@alice 759 is normal: warning: <no value>.']
['hubot', '@alice 750 is warning: warning: <no value>.']
]
context "Fail if unauthorized", ->
it 'show open bosun incidents for unauthorized bob', ->
@room.user.say('bob', '@hubot show open bosun incidents').then =>
expect(@room.messages).to.eql [
['bob', '@hubot show open bosun incidents']
['hubot', "@bob Sorry, you're not allowed to do that. You need the 'bosun' role."]
]
context "ack and close incidents", ->
context "ack single incident", ->
beforeEach ->
co =>
yield @room.user.say 'alice', '@hubot ack bosun incident 123 because it is normal again.'
yield new Promise.delay api_call_delay
it 'ack bosun alarm', ->
expect(@room.messages).to.eql [
['alice', '@hubot ack bosun incident 123 because it is normal again.']
['hubot', '@alice Trying to ack Bosun incident 123 ...']
['hubot', '@alice Yippie. Done.']
]
context "fail to ack single incident", ->
beforeEach ->
co =>
yield @room.user.say 'alice', '@hubot ack bosun incident 321 because it is normal again.'
yield new Promise.delay api_call_delay
it 'ack bosun alarm', ->
expect(@room.messages).to.eql [
['alice', '@hubot ack bosun incident 321 because it is normal again.']
['hubot', '@alice Trying to ack Bosun incident 321 ...']
['hubot', '@alice Bosun couldn\'t deal with that; maybe the incident doesn\'t exists or is still active? I suggest, you list the now open incidents. That\'s what Bosun told me: ```\nundefined\n```']
]
context "Ack (with capital 'A') single incident", ->
beforeEach ->
co =>
yield @room.user.say 'alice', '@hubot Ack bosun incident 123 because it is normal again.'
yield new Promise.delay api_call_delay
it 'ack bosun alarm', ->
expect(@room.messages).to.eql [
['alice', '@hubot Ack bosun incident 123 because it is normal again.']
['hubot', '@alice Trying to ack Bosun incident 123 ...']
['hubot', '@alice Yippie. Done.']
]
context "ack multiple incidents", ->
beforeEach ->
co =>
yield @room.user.say 'alice', '@hubot ack bosun incidents 123,234 because State is normal again.'
yield new Promise.delay api_call_delay
it 'ack bosun alarms', ->
expect(@room.messages).to.eql [
['alice', '@hubot ack bosun incidents 123,234 because State is normal again.']
['hubot', '@alice Trying to ack Bosun incidents 123,234 ...']
['hubot', '@alice Yippie. Done.']
]
context "Fail if unauthorized", ->
it 'ack bosun incident for unauthorized bob', ->
@room.user.say('bob', '@hubot ack bosun incident 123 because it is over.').then =>
expect(@room.messages).to.eql [
['bob', '@hubot ack bosun incident 123 because it is over.']
['hubot', "@bob Sorry, you're not allowed to do that. You need the 'bosun' role."]
]
context "silences", ->
context "show silences", ->
context "show silences for authorized user", ->
beforeEach ->
co =>
yield @room.user.say 'alice', '@hubot show bosun silences'
yield new Promise.delay api_call_delay
it 'show bosun silences', ->
expect(@room.messages).to.eql [
['alice', '@hubot show bosun silences']
['hubot', '@alice Retrieving Bosun silences ...']
['hubot', '@alice Yippie. Done.']
['hubot', '@alice So, there are currently 2 active silences in Bosun.']
['hubot', "@alice Silence 6e89533c74c3f9b74417b37e7cce75c384d29dc7 from 2016-07-04 15:18:03 UTC until 2016-07-04 16:18:03 UTC for tags host=cake,service=lukas and alert '' because Reboot"]
['hubot', "@alice Silence dd406bdce72df2e8c69b5ee396126a7ed8f3bf44 from 2016-07-04 15:16:18 UTC until 2016-07-04 16:16:18 UTC for tags host=muffin,service=lukas and alert 'test.lukas' because Deployment"]
]
context "Fail if unauthorized", ->
it 'show bosun silences for unauthorized bob', ->
@room.user.say('bob', '@hubot show bosun silences').then =>
expect(@room.messages).to.eql [
['bob', '@hubot show bosun silences']
['hubot', "@bob Sorry, you're not allowed to do that. You need the 'bosun' role."]
]
context "set|test silences", ->
context "test silence with alert and tags for authorized user", ->
beforeEach ->
co =>
yield @room.user.say 'alice', '@hubot test bosun silence for alert=test.lukas,host=muffin,service=lukas for 1h because Deployment.'
yield new Promise.delay api_call_delay
it 'test bosun silences', ->
expect(@room.messages).to.eql [
['alice', '@hubot test bosun silence for alert=test.lukas,host=muffin,service=lukas for 1h because Deployment.']
['hubot', "@alice Trying to test Bosun silence for alert 'test.lukas' and tags {host:muffin,service:lukas} for 1h ..."]
['hubot', '@alice Yippie. Done. That alarm will work.']
]
context "fail to test silence with alert and tags for authorized use", ->
beforeEach ->
co =>
yield @room.user.say 'alice', '@hubot test bosun silence for alert=test.fail,host=muffin,service=lukas for 1h because Deployment'
yield new Promise.delay api_call_delay
it 'test bosun silence', ->
expect(@room.messages).to.eql [
['alice', '@hubot test bosun silence for alert=test.fail,host=muffin,service=lukas for 1h because Deployment']
['hubot', "@alice Trying to test Bosun silence for alert 'test.fail' and tags {host:muffin,service:lukas} for 1h ..."]
['hubot', '@alice Bosun couldn\'t deal with that. I suggest, you list the active silences now. That\'s what Bosun told me: ```\nundefined\n```']
]
context "test silence with alert only for authorized user", ->
beforeEach ->
co =>
yield @room.user.say 'alice', '@hubot test bosun silence for alert=test.lukas for 1h because Deployment.'
yield new Promise.delay api_call_delay
it 'test bosun silences', ->
expect(@room.messages).to.eql [
['alice', '@hubot test bosun silence for alert=test.lukas for 1h because Deployment.']
['hubot', "@alice Trying to test Bosun silence for alert 'test.lukas' for 1h ..."]
['hubot', '@alice Yippie. Done. That alarm will work.']
]
context "test silence with tags only for authorized user", ->
beforeEach ->
co =>
yield @room.user.say 'alice', '@hubot test bosun silence for host=muffin,service=lukas for 1h because Deployment.'
yield new Promise.delay api_call_delay
it 'test bosun silences', ->
expect(@room.messages).to.eql [
['alice', '@hubot test bosun silence for host=muffin,service=lukas for 1h because Deployment.']
['hubot', '@alice Trying to test Bosun silence for tags {host:muffin,service:lukas} for 1h ...']
['hubot', '@alice Yippie. Done. That alarm will work.']
]
context "set silence with alert and tags for authorized user", ->
beforeEach ->
co =>
yield @room.user.say 'alice', '@hubot set bosun silence for alert=test.lukas,host=muffin,service=lukas for 1h because Deployment.'
yield new Promise.delay api_call_delay
it 'set bosun silences', ->
expect(@room.messages).to.eql [
['alice', '@hubot set bosun silence for alert=test.lukas,host=muffin,service=lukas for 1h because Deployment.']
['hubot', "@alice Trying to set Bosun silence for alert 'test.lukas' and tags {host:muffin,service:lukas} for 1h ..."]
['hubot', '@alice Yippie. Done. Admire your alarm at http://localhost:18070/silence.']
]
context "set silence with alert only for authorized user", ->
beforeEach ->
co =>
yield @room.user.say 'alice', '@hubot set bosun silence for alert=test.lukas for 1h because Deployment.'
yield new Promise.delay api_call_delay
it 'set bosun silences', ->
expect(@room.messages).to.eql [
['alice', '@hubot set bosun silence for alert=test.lukas for 1h because Deployment.']
['hubot', "@alice Trying to set Bosun silence for alert 'test.lukas' for 1h ..."]
['hubot', '@alice Yippie. Done. Admire your alarm at http://localhost:18070/silence.']
]
context "set silence with tags only for authorized user", ->
beforeEach ->
co =>
yield @room.user.say 'alice', '@hubot set bosun silence for host=muffin,service=lukas for 1h because Deployment.'
yield new Promise.delay api_call_delay
it 'set bosun silences', ->
expect(@room.messages).to.eql [
['alice', '@hubot set bosun silence for host=muffin,service=lukas for 1h because Deployment.']
['hubot', '@alice Trying to set Bosun silence for tags {host:muffin,service:lukas} for 1h ...']
['hubot', '@alice Yippie. Done. Admire your alarm at http://localhost:18070/silence.']
]
context "Fail if unauthorized", ->
it 'set bosun silences for unauthorized bob', ->
@room.user.say('bob', '@hubot set bosun silence for alert=test.lukas,host=muffin for 1h because Deployment.').then =>
expect(@room.messages).to.eql [
['bob', '@hubot set bosun silence for alert=test.lukas,host=muffin for 1h because Deployment.']
['hubot', "@bob Sorry, you're not allowed to do that. You need the 'bosun' role."]
]
context "clear silences", ->
context "clear silence for authorized user", ->
beforeEach ->
co =>
yield @room.user.say 'alice', '@hubot clear bosun silence 6e89533c74c3f9b74417b37e7cce75c384d29dc7'
yield new Promise.delay api_call_delay
it 'clear bosun silence', ->
expect(@room.messages).to.eql [
['alice', '@hubot clear bosun silence 6e89533c74c3f9b74417b37e7cce75c384d29dc7']
['hubot', '@alice Trying to clear Bosun silence 6e89533c74c3f9b74417b37e7cce75c384d29dc7 ...']
['hubot', '@alice Yippie. Done.']
]
context "fail to clear silence for authorized user", ->
beforeEach ->
co =>
yield @room.user.say 'alice', '@hubot clear bosun silence xxx9533c74c3f9b74417b37e7cce75c384d29dc7'
yield new Promise.delay api_call_delay
it 'clear silence', ->
expect(@room.messages).to.eql [
['alice', '@hubot clear bosun silence xxx9533c74c3f9b74417b37e7cce75c384d29dc7']
['hubot', '@alice Trying to clear Bosun silence xxx9533c74c3f9b74417b37e7cce75c384d29dc7 ...']
['hubot', "@alice Bosun couldn't deal with that; maybe the silence doesn't exists? I suggest, you list the open silences now. That's what Bosun told me: ```\nundefined\n```"]
]
context "Fail if unauthorized", ->
it 'clear bosun silence for unauthorized bob', ->
@room.user.say('bob', '@hubot clear bosun silence 6e89533c74c3f9b74417b37e7cce75c384d29dc7').then =>
expect(@room.messages).to.eql [
['bob', '@hubot clear bosun silence 6e89533c74c3f9b74417b37e7cce75c384d29dc7']
['hubot', "@bob Sorry, you're not allowed to do that. You need the 'bosun' role."]
]
describe 'bosun with Slack', ->
beforeEach ->
[@room, @bosun] = setup_test_env {
hubot_bosun_host: "http://localhost:18070"
hubot_bosun_role: "bosun"
hubot_bosun_slack: "yes"
hubot_bosun_log_level: "error"
hubot_bosun_relaTive_time: "no"
}
customMessages = []
@room.robot.adapter.customMessage = (msg) -> customMessages.push msg
afterEach ->
tear_down_test_env(@room, @bosun)
context "incidents", ->
context "show incidents", ->
context "show incidents for authorized user", ->
beforeEach ->
co =>
yield @room.user.say 'alice', '@hubot show open bosun incidents'
yield new Promise.delay api_call_delay
it 'show bosun incidents', ->
expect(@room.messages).to.eql [
['alice', '@hubot show open bosun incidents']
['hubot', '@alice Retrieving Bosun incidents ...']
['hubot', '@alice Yippie. Done.']
]
expect(customMessages[0]).to.eql {
channel: "room1"
text: "So, there are currently 2 open incidents in Bosun."
attachments: [
{
fallback: "Incident 759 is normal"
color: "good"
title: "759: warning: <no value>",
title_link: "http://localhost:18070/incident?id=759"
text: "Acked and active since 2016-07-01 09:05:58 UTC with _{}_.\n* lukas acknowledged this incident at 2016-07-01 22:16:37 UTC."
mrkdwn_in: ["text"]
}, {
fallback:"Incident 750 is warning"
color: "warning"
title: "750: warning: <no value>"
title_link: "http://localhost:18070/incident?id=750"
text: "*Unacked* and active since 2016-07-01 09:05:58 UTC with _{}_."
mrkdwn_in: ["text"]
}
]
}
context "fail to ack single incident", ->
beforeEach ->
co =>
yield @room.user.say 'alice', '@hubot ack bosun incident 321 because it is normal again.'
yield new Promise.delay api_call_delay
it 'ack bosun alarm', ->
expect(@room.messages).to.eql [
['alice', '@hubot ack bosun incident 321 because it is normal again.']
['hubot', '@alice Trying to ack Bosun incident 321 ...']
]
expect(customMessages[0]).to.eql {
channel: "room1"
attachments: [
{
color: "danger"
fallback: "Bosun couldn't deal with that; maybe the incident doesn't exists or is still active? I suggest, you list the now open incidents. That's what Bosun told me: ```\nundefined\n```"
mrkdwn_in: [ "text" ]
text: "Bosun couldn't deal with that; maybe the incident doesn't exists or is still active? I suggest, you list the now open incidents. That's what Bosun told me: ```\nundefined\n```"
title: "Argh. Failed to deal with Bosun's answer."
}
]
}
context "silences", ->
context "show silences for authorized user", ->
beforeEach ->
co =>
yield @room.user.say 'alice', '@hubot show bosun silences'
yield new Promise.delay api_call_delay
it 'show bosun silences', ->
expect(@room.messages).to.eql [
['alice', '@hubot show bosun silences']
['hubot', '@alice Retrieving Bosun silences ...']
['hubot', '@alice Yippie. Done.']
]
expect(customMessages[0]).to.eql {
channel: "room1"
text: "So, there are currently 2 active silences in Bosun."
attachments: [
{
color: "danger"
fallback: "Slience 6e89533c74c3f9b74417b37e7cce75c384d29dc7 is active."
mrkdwn_in: [ "text" ]
text: "Active from 2016-07-04 15:18:03 UTC until 2016-07-04 16:18:03 UTC\nMessage: _Reboot_\nTags: host=cake,service=lukas\nId: 6e89533c74c3f9b74417b37e7cce75c384d29dc7"
title: "Slience is active."
title_link: "http://localhost:18070/silence"
}
{
color: "danger"
fallback: "Slience dd406bdce72df2e8c69b5ee396126a7ed8f3bf44 is active."
mrkdwn_in: [ "text" ]
text: "Active from 2016-07-04 15:16:18 UTC until 2016-07-04 16:16:18 UTC\nMessage: _Deployment_\nAlert: test.lukas\nTags: host=muffin,service=lukas\nId: dd406bdce72df2e8c69b5ee396126a7ed8f3bf44"
title: "Slience is active."
title_link: "http://localhost:18070/silence"
}
]
}
context "test silence with alert and tags for authorized user", ->
beforeEach ->
co =>
yield @room.user.say 'alice', '@hubot test bosun silence for alert=test.lukas,host=muffin,service=lukas for 1h because Deployment.'
yield new Promise.delay api_call_delay
it 'test bosun silences', ->
expect(@room.messages).to.eql [
['alice', '@hubot test bosun silence for alert=test.lukas,host=muffin,service=lukas for 1h because Deployment.']
['hubot', "@alice Trying to test Bosun silence for alert 'test.lukas' and tags {host:muffin,service:lukas} for 1h ..."]
['hubot', '@alice Yippie. Done. That alarm will work.']
]
context "fail to test silence with alert and tags for authorized use", ->
beforeEach ->
co =>
yield @room.user.say 'alice', '@hubot test bosun silence for alert=test.fail,host=muffin,service=lukas for 1h because Deployment'
yield new Promise.delay api_call_delay
it 'test silence', ->
expect(@room.messages).to.eql [
['alice', '@hubot test bosun silence for alert=test.fail,host=muffin,service=lukas for 1h because Deployment']
['hubot', "@alice Trying to test Bosun silence for alert 'test.fail' and tags {host:muffin,service:lukas} for 1h ..."]
]
expect(customMessages[0]).to.eql {
channel: "room1"
attachments: [
{
color: "danger"
fallback: "Bosun couldn't deal with that. I suggest, you list the active silences now. That's what Bosun told me: ```\nundefined\n```"
mrkdwn_in: [ "text" ]
text: "Bosun couldn't deal with that. I suggest, you list the active silences now. That's what Bosun told me: ```\nundefined\n```"
title: "Argh. Failed to deal with Bosun's answer."
}
]
}
context "clear silences", ->
context "clear silence for authorized user", ->
beforeEach ->
co =>
yield @room.user.say 'alice', '@hubot clear bosun silence 6e89533c74c3f9b74417b37e7cce75c384d29dc7'
yield new Promise.delay api_call_delay
it 'clear bosun silence', ->
expect(@room.messages).to.eql [
['alice', '@hubot clear bosun silence 6e89533c74c3f9b74417b37e7cce75c384d29dc7']
['hubot', '@alice Trying to clear Bosun silence 6e89533c74c3f9b74417b37e7cce75c384d29dc7 ...']
['hubot', '@alice Yippie. Done.']
]
context "fail to clear silence for authorized user", ->
beforeEach ->
co =>
yield @room.user.say 'alice', '@hubot clear bosun silence xxx9533c74c3f9b74417b37e7cce75c384d29dc7'
yield new Promise.delay api_call_delay
it 'clear bosun silence', ->
expect(@room.messages).to.eql [
['alice', '@hubot clear bosun silence xxx9533c74c3f9b74417b37e7cce75c384d29dc7']
['hubot', '@alice Trying to clear Bosun silence xxx9533c74c3f9b74417b37e7cce75c384d29dc7 ...']
]
expect(customMessages[0]).to.eql {
channel: "room1"
attachments: [
{
color: "danger"
fallback: "Bosun couldn't deal with that; maybe the silence doesn't exists? I suggest, you list the open silences now. That's what Bosun told me: ```\nundefined\n```"
mrkdwn_in: [ "text" ]
text: "Bosun couldn't deal with that; maybe the silence doesn't exists? I suggest, you list the open silences now. That's what Bosun told me: ```\nundefined\n```"
title: "Argh. Failed to deal with Bosun's answer."
}
]
}
describe 'bosun events', ->
beforeEach ->
[@room, @bosun] = setup_test_env {
hubot_bosun_host: "http://localhost:18070"
hubot_bosun_role: "bosun"
hubot_bosun_slack: "no"
hubot_bosun_log_level: "error"
hubot_bosun_relaTive_time: "no"
}
afterEach ->
tear_down_test_env(@room, @bosun)
context "set_silence", ->
context "set_silence successfully", ->
beforeEach ->
robot = @room.robot
@room.robot.on 'bosun.result.set_silence.successful', (event) ->
robot.brain.set 'test.bosun.result.set_silence', event
@room.robot.emit 'bosun.set_silence', {
user:
id: 'alice'
name: 'alice'
room: "a room"
duration: "10m"
alert: "test.lukas"
tags: "host=muffin,service=lukas"
message: "I need some rest time"
forget: "true"
}
co =>
yield new Promise.delay api_call_delay
it "on set_silence", ->
event = @room.robot.brain.get 'test.bosun.result.set_silence'
expect(event).not.to.eql null
expect(event).to.eql {
user:
id: 'alice'
name: 'alice'
room: "a room"
duration: "10m"
silence_id: "6e89533c74c3f9b74417b37e7cce75c384d29dc7"
}
context "set_silence failed", ->
beforeEach ->
robot = @room.robot
@room.robot.on 'bosun.result.set_silence.failed', (event) ->
robot.brain.set 'test.bosun.result.set_silence', event
@room.robot.emit 'bosun.set_silence', {
user:
id: 'alice'
name: 'alice'
room: "a room"
duration: "10m"
alert: "test.fail"
tags: "host=muffin,service=lukas"
message: "I need some rest time"
forget: "true"
}
co =>
yield new Promise.delay api_call_delay
it "on set_silence", ->
event = @room.robot.brain.get 'test.bosun.result.set_silence'
expect(event).not.to.eql null
expect(event).to.eql {
user:
id: 'alice'
name: 'alice'
room: "a room"
message: "API call failed with status code 500."
}
context "clear_silence", ->
context "clear_silence successfully", ->
beforeEach ->
robot = @room.robot
@room.robot.on 'bosun.result.clear_silence.successful', (event) ->
robot.brain.set 'test.bosun.result.clear_silence', event
@room.robot.emit 'bosun.clear_silence', {
user:
id: 'alice'
name: 'alice'
room: "a room"
silence_id: "6e89533c74c3f9b74417b37e7cce75c384d29dc7"
}
co =>
yield new Promise.delay api_call_delay
it "on clear_silence", ->
event = @room.robot.brain.get 'test.bosun.result.clear_silence'
expect(event).not.to.eql null
expect(event).to.eql {
user:
id: 'alice'
name: 'alice'
room: "a room"
silence_id: "6e89533c74c3f9b74417b37e7cce75c384d29dc7"
}
context "clear_silence failed", ->
beforeEach ->
robot = @room.robot
@room.robot.on 'bosun.result.clear_silence.failed', (event) ->
robot.brain.set 'test.bosun.result.clear_silence', event
@room.robot.emit 'bosun.clear_silence', {
user:
id: 'alice'
name: 'alice'
room: "a room"
silence_id: "xxx9533c74c3f9b74417b37e7cce75c384d29dc7"
}
co =>
yield new Promise.delay api_call_delay
it "on clear_silence", ->
event = @room.robot.brain.get 'test.bosun.result.clear_silence'
expect(event).not.to.eql null
expect(event).to.eql {
user:
id: 'alice'
name: 'alice'
room: "a room"
silence_id: "xxx9533c74c3f9b74417b37e7cce75c384d29dc7"
message: "API call failed with status code 500."
}
context "check_silence", ->
context "check_silence successfully", ->
context "silence is still active", ->
beforeEach ->
robot = @room.robot
@room.robot.on 'bosun.result.check_silence.successful', (event) ->
robot.brain.set 'test.bosun.result.check_silence', event
@room.robot.emit 'bosun.check_silence', {
user:
id: 'alice'
name: 'alice'
room: "a room"
silence_id: '6e89533c74c3f9b74417b37e7cce75c384d29dc7'
}
co =>
yield new Promise.delay api_call_delay
it "on check_silence", ->
event = @room.robot.brain.get 'test.bosun.result.check_silence'
expect(event).not.to.eql null
expect(event).to.eql {
user:
id: 'alice'
name: 'alice'
room: "a room"
silence_id: '6e89533c74c3f9b74417b37e7cce75c384d29dc7'
active: true
}
context "silence is not actice anymore", ->
beforeEach ->
robot = @room.robot
@room.robot.on 'bosun.result.check_silence.successful', (event) ->
robot.brain.set 'test.bosun.result.check_silence', event
@room.robot.emit 'bosun.check_silence', {
user:
id: 'alice'
name: 'alice'
room: "a room"
silence_id: 'xxx9533c74c3f9b74417b37e7cce75c384d29dc7'
}
co =>
yield new Promise.delay api_call_delay
it "on check_silence", ->
event = @room.robot.brain.get 'test.bosun.result.check_silence'
expect(event).not.to.eql null
expect(event).to.eql {
user:
id: 'alice'
name: 'alice'
room: "a room"
silence_id: 'xxx9533c74c3f9b74417b37e7cce75c384d29dc7'
active: false
}
setup_test_env = ( env ) ->
process.env.HUBOT_BOSUN_HOST = env.hubot_bosun_host
process.env.HUBOT_BOSUN_ROLE = env.hubot_bosun_role or ""
process.env.HUBOT_BOSUN_SLACK = env.hubot_bosun_slack
process.env.HUBOT_BOSUN_LOG_LEVEL = env.hubot_bosun_log_level
process.env.HUBOT_BOSUN_RELATIVE_TIME = env.hubot_bosun_relaTive_time
helper = new Helper('../src/bosun.coffee')
room = helper.createRoom()
room.robot.auth = new MockAuth
bosun = mock_bosun()
bosun.listen(18070, "127.0.0.1")
[room, bosun]
tear_down_test_env = (room, bosun) ->
room.destroy()
bosun.close()
# Force reload of module under test
delete require.cache[require.resolve('../src/bosun')]
class MockAuth
hasRole: (user, role) ->
if user.name is 'alice' and role is 'bosun' then true else false
mock_bosun = () ->
http.createServer((req, resp) ->
if req.url == '/api/incidents/open' and req.method == 'GET'
resp.setHeader('Content-Type', 'application/json')
incidents = [
{
Id: 759,
Subject: 'warning: <no value>',
Start: 1467363958,
AlertName: 'test.lukas',
Tags: null,
TagsString: '{}',
CurrentStatus: 'normal',
WorstStatus: 'warning',
LastAbnormalStatus: 'warning',
LastAbnormalTime: 1467367498,
Unevaluated: false,
NeedAck: false,
Silenced: false,
Actions: [
{
User: "lukas",
Message: "Okay.",
Time: 1467411397,
Type: "Acknowledged"
}
]
Events: [ [Object], [Object] ],
WarnNotificationChains: [],
CritNotificationChains: []
}
{
Id: 750,
Subject: 'warning: <no value>',
Start: 1467363958,
AlertName: 'test.lukas',
Tags: null,
TagsString: '{}',
CurrentStatus: 'warning',
WorstStatus: 'warning',
LastAbnormalStatus: 'warning',
LastAbnormalTime: 1467367498,
Unevaluated: false,
NeedAck: true,
Silenced: false,
Actions: [],
Events: [ [Object], [Object] ],
WarnNotificationChains: [],
CritNotificationChains: []
}
]
resp.end JSON.stringify incidents
if req.url == '/api/action' and req.method == 'POST'
body = ""
req.on 'data', (chunk) -> body += chunk
req.on 'end', () ->
data = JSON.parse body
unless data.Type is "ack" or data.Type is "close"
resp.statusCode = 500
resp.setHeader('Content-Type', 'text/plain');
if data.Ids?
resp.write "map["
id_errs = ("#{id}:unknown action type: none" for id in data.Ids)
resp.write "#{id_errs.join ' '}"
resp.write "]"
unless 123 in data.Ids
resp.statusCode = 500;
resp.end()
if req.url == '/api/silence/get' and req.method == 'GET'
resp.setHeader('Content-Type', 'application/json')
silences = {
"6e89533c74c3f9b74417b37e7cce75c384d29dc7": {
Start: "2016-07-04T15:18:03.877775182Z",
End: "2016-07-04T16:18:03.877775182Z",
Alert: "",
Tags: {
host: "cake",
service: "lukas"
},
TagString: "host=cake,service=lukas",
Forget: true,
User: "Lukas",
Message: "Reboot"
},
"dd406bdce72df2e8c69b5ee396126a7ed8f3bf44": {
Start: "2016-07-04T15:16:18.894444847Z",
End: "2016-07-04T16:16:18.894444847Z",
Alert: "test.lukas",
Tags: {
host: "muffin",
service: "lukas"
},
TagString: "host=muffin,service=lukas",
Forget: true,
User: "Lukas",
Message: "Deployment"
}
}
resp.end JSON.stringify silences
if req.url.match('/api/silence/set')? and req.method == 'POST'
body = ""
req.on 'data', (chunk) -> body += chunk
req.on 'end', () ->
data = JSON.parse body
if data.alert is "test.fail"
resp.statusCode = 500
resp.end()
if (match = req.url.match('/api/silence/clear.id=(.+)'))? and req.method == 'POST'
id = match[1]
if id is "xxx9533c74c3f9b74417b37e7cce75c384d29dc7"
resp.statusCode = 500
resp.end ""
)
| 15577 | Helper = require('hubot-test-helper')
chai = require 'chai'
auth = require 'hubot-auth'
Promise = require('bluebird')
co = require('co')
http = require 'http'
expect = chai.expect
process.env.EXPRESS_PORT = 18080
api_call_delay = 20
customMessages = []
describe 'bosun without authorization', ->
beforeEach ->
[@room, @bosun] = setup_test_env {
hubot_bosun_host: "http://localhost:18070"
hubot_bosun_slack: "no"
hubot_bosun_log_level: "error"
hubot_bosun_relaTive_time: "no"
}
afterEach ->
tear_down_test_env(@room, @bosun)
context "incidents", ->
context "show incidents", ->
context "show incidents for authorized user", ->
beforeEach ->
co =>
yield @room.user.say '<NAME>', '@hubot show open bosun incidents'
yield new Promise.delay api_call_delay
it 'show bosun incidents', ->
expect(@room.messages).to.eql [
['alice', '@hubot show open bosun incidents']
['hubot', '@alice Retrieving Bosun incidents ...']
['hubot', '@alice <NAME>. Done.']
['hubot', '@alice So, there are currently 2 open incidents in Bosun.']
['hubot', '@alice 759 is normal: warning: <no value>.']
['hubot', '@alice 750 is warning: warning: <no value>.']
]
context "succeed even if unauthorized", ->
beforeEach ->
co =>
yield @room.user.say '<NAME>', '@hubot show open bosun incidents'
yield new Promise.delay api_call_delay
it 'show open bosun incidents for unauthorized bob', ->
expect(@room.messages).to.eql [
['bob', '@hubot show open bosun incidents']
['hubot', '@bob Retrieving Bosun incidents ...']
['hubot', '@bob <NAME>. Done.']
['hubot', '@bob So, there are currently 2 open incidents in Bosun.']
['hubot', '@bob 759 is normal: warning: <no value>.']
['hubot', '@bob 750 is warning: warning: <no value>.']
]
describe 'bosun', ->
beforeEach ->
[@room, @bosun] = setup_test_env {
hubot_bosun_host: "http://localhost:18070"
hubot_bosun_role: "bosun"
hubot_bosun_slack: "no"
hubot_bosun_log_level: "error"
hubot_bosun_relaTive_time: "no"
}
afterEach ->
tear_down_test_env(@room, @bosun)
context "incidents", ->
context "show incidents", ->
context "show incidents for authorized user", ->
beforeEach ->
co =>
yield @room.user.say '<NAME>', '@hubot show open bosun incidents'
yield new Promise.delay api_call_delay
it 'show bosun incidents', ->
expect(@room.messages).to.eql [
['alice', '@hubot show open bosun incidents']
['hubot', '@alice Retrieving Bosun incidents ...']
['hubot', '@alice <NAME>. Done.']
['hubot', '@alice So, there are currently 2 open incidents in Bosun.']
['hubot', '@alice 759 is normal: warning: <no value>.']
['hubot', '@alice 750 is warning: warning: <no value>.']
]
context "Fail if unauthorized", ->
it 'show open bosun incidents for unauthorized bob', ->
@room.user.say('<NAME>', '@hubot show open bosun incidents').then =>
expect(@room.messages).to.eql [
['bob', '@hubot show open bosun incidents']
['hubot', "@bob Sorry, you're not allowed to do that. You need the 'bosun' role."]
]
context "ack and close incidents", ->
context "ack single incident", ->
beforeEach ->
co =>
yield @room.user.say 'alice', '@hubot ack bosun incident 123 because it is normal again.'
yield new Promise.delay api_call_delay
it 'ack bosun alarm', ->
expect(@room.messages).to.eql [
['alice', '@hubot ack bosun incident 123 because it is normal again.']
['hubot', '@alice Trying to ack Bosun incident 123 ...']
['hubot', '@alice <NAME>. Done.']
]
context "fail to ack single incident", ->
beforeEach ->
co =>
yield @room.user.say 'alice', '@hubot ack bosun incident 321 because it is normal again.'
yield new Promise.delay api_call_delay
it 'ack bosun alarm', ->
expect(@room.messages).to.eql [
['alice', '@hubot ack bosun incident 321 because it is normal again.']
['hubot', '@alice Trying to ack Bosun incident 321 ...']
['hubot', '@alice Bosun couldn\'t deal with that; maybe the incident doesn\'t exists or is still active? I suggest, you list the now open incidents. That\'s what Bosun told me: ```\nundefined\n```']
]
context "Ack (with capital 'A') single incident", ->
beforeEach ->
co =>
yield @room.user.say 'alice', '@hubot Ack bosun incident 123 because it is normal again.'
yield new Promise.delay api_call_delay
it 'ack bosun alarm', ->
expect(@room.messages).to.eql [
['alice', '@hubot Ack bosun incident 123 because it is normal again.']
['hubot', '@alice Trying to ack Bosun incident 123 ...']
['hubot', '@alice <NAME>. Done.']
]
context "ack multiple incidents", ->
beforeEach ->
co =>
yield @room.user.say 'alice', '@hubot ack bosun incidents 123,234 because State is normal again.'
yield new Promise.delay api_call_delay
it 'ack bosun alarms', ->
expect(@room.messages).to.eql [
['alice', '@hubot ack bosun incidents 123,234 because State is normal again.']
['hubot', '@alice Trying to ack Bosun incidents 123,234 ...']
['hubot', '@alice <NAME>. Done.']
]
context "Fail if unauthorized", ->
it 'ack bosun incident for unauthorized bob', ->
@room.user.say('bob', '@hubot ack bosun incident 123 because it is over.').then =>
expect(@room.messages).to.eql [
['bob', '@hubot ack bosun incident 123 because it is over.']
['hubot', "@bob Sorry, you're not allowed to do that. You need the 'bosun' role."]
]
context "silences", ->
context "show silences", ->
context "show silences for authorized user", ->
beforeEach ->
co =>
yield @room.user.say '<NAME>', '@hubot show bosun silences'
yield new Promise.delay api_call_delay
it 'show bosun silences', ->
expect(@room.messages).to.eql [
['alice', '@hubot show bosun silences']
['hubot', '@alice Retrieving Bosun silences ...']
['hubot', '@alice <NAME>. Done.']
['hubot', '@alice So, there are currently 2 active silences in Bosun.']
['hubot', "@alice Silence 6e89533c74c3f9b74417b37e7cce75c384d29dc7 from 2016-07-04 15:18:03 UTC until 2016-07-04 16:18:03 UTC for tags host=cake,service=lukas and alert '' because Reboot"]
['hubot', "@alice Silence dd406bdce72df2e8c69b5ee396126a7ed8f3bf44 from 2016-07-04 15:16:18 UTC until 2016-07-04 16:16:18 UTC for tags host=muffin,service=lukas and alert 'test.lukas' because Deployment"]
]
context "Fail if unauthorized", ->
it 'show bosun silences for unauthorized bob', ->
@room.user.say('bob', '@hubot show bosun silences').then =>
expect(@room.messages).to.eql [
['bob', '@hubot show bosun silences']
['hubot', "@bob Sorry, you're not allowed to do that. You need the 'bosun' role."]
]
context "set|test silences", ->
context "test silence with alert and tags for authorized user", ->
beforeEach ->
co =>
yield @room.user.say 'alice', '@hubot test bosun silence for alert=test.lukas,host=muffin,service=lukas for 1h because Deployment.'
yield new Promise.delay api_call_delay
it 'test bosun silences', ->
expect(@room.messages).to.eql [
['alice', '@hubot test bosun silence for alert=test.lukas,host=muffin,service=lukas for 1h because Deployment.']
['hubot', "@alice Trying to test Bosun silence for alert 'test.lukas' and tags {host:muffin,service:lukas} for 1h ..."]
['hubot', '@alice <NAME>ie. Done. That alarm will work.']
]
context "fail to test silence with alert and tags for authorized use", ->
beforeEach ->
co =>
yield @room.user.say 'alice', '@hubot test bosun silence for alert=test.fail,host=muffin,service=lukas for 1h because Deployment'
yield new Promise.delay api_call_delay
it 'test bosun silence', ->
expect(@room.messages).to.eql [
['alice', '@hubot test bosun silence for alert=test.fail,host=muffin,service=lukas for 1h because Deployment']
['hubot', "@alice Trying to test Bosun silence for alert 'test.fail' and tags {host:muffin,service:lukas} for 1h ..."]
['hubot', '@alice Bosun couldn\'t deal with that. I suggest, you list the active silences now. That\'s what Bosun told me: ```\nundefined\n```']
]
context "test silence with alert only for authorized user", ->
beforeEach ->
co =>
yield @room.user.say 'alice', '@hubot test bosun silence for alert=test.lukas for 1h because Deployment.'
yield new Promise.delay api_call_delay
it 'test bosun silences', ->
expect(@room.messages).to.eql [
['alice', '@hubot test bosun silence for alert=test.lukas for 1h because Deployment.']
['hubot', "@alice Trying to test Bosun silence for alert 'test.lukas' for 1h ..."]
['hubot', '@alice <NAME>. Done. That alarm will work.']
]
context "test silence with tags only for authorized user", ->
beforeEach ->
co =>
yield @room.user.say 'alice', '@hubot test bosun silence for host=muffin,service=lukas for 1h because Deployment.'
yield new Promise.delay api_call_delay
it 'test bosun silences', ->
expect(@room.messages).to.eql [
['alice', '@hubot test bosun silence for host=muffin,service=lukas for 1h because Deployment.']
['hubot', '@alice Trying to test Bosun silence for tags {host:muffin,service:lukas} for 1h ...']
['hubot', '@alice <NAME>. Done. That alarm will work.']
]
context "set silence with alert and tags for authorized user", ->
beforeEach ->
co =>
yield @room.user.say 'alice', '@hubot set bosun silence for alert=test.lukas,host=muffin,service=lukas for 1h because Deployment.'
yield new Promise.delay api_call_delay
it 'set bosun silences', ->
expect(@room.messages).to.eql [
['alice', '@hubot set bosun silence for alert=test.lukas,host=muffin,service=lukas for 1h because Deployment.']
['hubot', "@alice Trying to set Bosun silence for alert 'test.lukas' and tags {host:muffin,service:lukas} for 1h ..."]
['hubot', '@alice <NAME>. Done. Admire your alarm at http://localhost:18070/silence.']
]
context "set silence with alert only for authorized user", ->
beforeEach ->
co =>
yield @room.user.say 'alice', '@hubot set bosun silence for alert=test.lukas for 1h because Deployment.'
yield new Promise.delay api_call_delay
it 'set bosun silences', ->
expect(@room.messages).to.eql [
['alice', '@hubot set bosun silence for alert=test.lukas for 1h because Deployment.']
['hubot', "@alice Trying to set Bosun silence for alert 'test.lukas' for 1h ..."]
['hubot', '@alice <NAME>. Done. Admire your alarm at http://localhost:18070/silence.']
]
context "set silence with tags only for authorized user", ->
beforeEach ->
co =>
yield @room.user.say 'alice', '@hubot set bosun silence for host=muffin,service=lukas for 1h because Deployment.'
yield new Promise.delay api_call_delay
it 'set bosun silences', ->
expect(@room.messages).to.eql [
['alice', '@hubot set bosun silence for host=muffin,service=lukas for 1h because Deployment.']
['hubot', '@alice Trying to set Bosun silence for tags {host:muffin,service:lukas} for 1h ...']
['hubot', '@alice <NAME>. Done. Admire your alarm at http://localhost:18070/silence.']
]
context "Fail if unauthorized", ->
it 'set bosun silences for unauthorized bob', ->
@room.user.say('bob', '@hubot set bosun silence for alert=test.lukas,host=muffin for 1h because Deployment.').then =>
expect(@room.messages).to.eql [
['bob', '@hubot set bosun silence for alert=test.lukas,host=muffin for 1h because Deployment.']
['hubot', "@bob Sorry, you're not allowed to do that. You need the 'bosun' role."]
]
context "clear silences", ->
context "clear silence for authorized user", ->
beforeEach ->
co =>
yield @room.user.say 'alice', '@hubot clear bosun silence 6e89533c74c3f9b74417b37e7cce75c384d29dc7'
yield new Promise.delay api_call_delay
it 'clear bosun silence', ->
expect(@room.messages).to.eql [
['alice', '@hubot clear bosun silence 6e89533c74c3f9b74417b37e7cce75c384d29dc7']
['hubot', '@alice Trying to clear Bosun silence 6e89533c74c3f9b74417b37e7cce75c384d29dc7 ...']
['hubot', '@alice <NAME>. Done.']
]
context "fail to clear silence for authorized user", ->
beforeEach ->
co =>
yield @room.user.say 'alice', '@hubot clear bosun silence xxx9533c74c3f9b74417b37e7cce75c384d29dc7'
yield new Promise.delay api_call_delay
it 'clear silence', ->
expect(@room.messages).to.eql [
['alice', '@hubot clear bosun silence xxx9533c74c3f9b74417b37e7cce75c384d29dc7']
['hubot', '@alice Trying to clear Bosun silence xxx9533c74c3f9b74417b37e7cce75c384d29dc7 ...']
['hubot', "@alice Bosun couldn't deal with that; maybe the silence doesn't exists? I suggest, you list the open silences now. That's what Bosun told me: ```\nundefined\n```"]
]
context "Fail if unauthorized", ->
it 'clear bosun silence for unauthorized bob', ->
@room.user.say('bob', '@hubot clear bosun silence 6e89533c74c3f9b74417b37e7cce75c384d29dc7').then =>
expect(@room.messages).to.eql [
['bob', '@hubot clear bosun silence 6e89533c74c3f9b74417b37e7cce75c384d29dc7']
['hubot', "@bob Sorry, you're not allowed to do that. You need the 'bosun' role."]
]
describe 'bosun with Slack', ->
beforeEach ->
[@room, @bosun] = setup_test_env {
hubot_bosun_host: "http://localhost:18070"
hubot_bosun_role: "bosun"
hubot_bosun_slack: "yes"
hubot_bosun_log_level: "error"
hubot_bosun_relaTive_time: "no"
}
customMessages = []
@room.robot.adapter.customMessage = (msg) -> customMessages.push msg
afterEach ->
tear_down_test_env(@room, @bosun)
context "incidents", ->
context "show incidents", ->
context "show incidents for authorized user", ->
beforeEach ->
co =>
yield @room.user.say 'alice', '@hubot show open bosun incidents'
yield new Promise.delay api_call_delay
it 'show bosun incidents', ->
expect(@room.messages).to.eql [
['alice', '@hubot show open bosun incidents']
['hubot', '@alice Retrieving Bosun incidents ...']
['hubot', '@alice <NAME>. Done.']
]
expect(customMessages[0]).to.eql {
channel: "room1"
text: "So, there are currently 2 open incidents in Bosun."
attachments: [
{
fallback: "Incident 759 is normal"
color: "good"
title: "759: warning: <no value>",
title_link: "http://localhost:18070/incident?id=759"
text: "Acked and active since 2016-07-01 09:05:58 UTC with _{}_.\n* lukas acknowledged this incident at 2016-07-01 22:16:37 UTC."
mrkdwn_in: ["text"]
}, {
fallback:"Incident 750 is warning"
color: "warning"
title: "750: warning: <no value>"
title_link: "http://localhost:18070/incident?id=750"
text: "*Unacked* and active since 2016-07-01 09:05:58 UTC with _{}_."
mrkdwn_in: ["text"]
}
]
}
context "fail to ack single incident", ->
beforeEach ->
co =>
yield @room.user.say 'alice', '@hubot ack bosun incident 321 because it is normal again.'
yield new Promise.delay api_call_delay
it 'ack bosun alarm', ->
expect(@room.messages).to.eql [
['alice', '@hubot ack bosun incident 321 because it is normal again.']
['hubot', '@alice Trying to ack Bosun incident 321 ...']
]
expect(customMessages[0]).to.eql {
channel: "room1"
attachments: [
{
color: "danger"
fallback: "Bosun couldn't deal with that; maybe the incident doesn't exists or is still active? I suggest, you list the now open incidents. That's what Bosun told me: ```\nundefined\n```"
mrkdwn_in: [ "text" ]
text: "Bosun couldn't deal with that; maybe the incident doesn't exists or is still active? I suggest, you list the now open incidents. That's what Bosun told me: ```\nundefined\n```"
title: "Argh. Failed to deal with Bosun's answer."
}
]
}
context "silences", ->
context "show silences for authorized user", ->
beforeEach ->
co =>
yield @room.user.say '<NAME>', '@hubot show bosun silences'
yield new Promise.delay api_call_delay
it 'show bosun silences', ->
expect(@room.messages).to.eql [
['alice', '@hubot show bosun silences']
['hubot', '@alice Retrieving Bosun silences ...']
['hubot', '@alice <NAME>. Done.']
]
expect(customMessages[0]).to.eql {
channel: "room1"
text: "So, there are currently 2 active silences in Bosun."
attachments: [
{
color: "danger"
fallback: "Slience 6e89533c74c3f9b74417b37e7cce75c384d29dc7 is active."
mrkdwn_in: [ "text" ]
text: "Active from 2016-07-04 15:18:03 UTC until 2016-07-04 16:18:03 UTC\nMessage: _Reboot_\nTags: host=cake,service=lukas\nId: 6e89533c74c3f9b74417b37e7cce75c384d29dc7"
title: "Slience is active."
title_link: "http://localhost:18070/silence"
}
{
color: "danger"
fallback: "Slience dd406bdce72df2e8c69b5ee396126a7ed8f3bf44 is active."
mrkdwn_in: [ "text" ]
text: "Active from 2016-07-04 15:16:18 UTC until 2016-07-04 16:16:18 UTC\nMessage: _Deployment_\nAlert: test.lukas\nTags: host=muffin,service=lukas\nId: dd406bdce72df2e8c69b5ee396126a7ed8f3bf44"
title: "Slience is active."
title_link: "http://localhost:18070/silence"
}
]
}
context "test silence with alert and tags for authorized user", ->
beforeEach ->
co =>
yield @room.user.say 'alice', '@hubot test bosun silence for alert=test.lukas,host=muffin,service=lukas for 1h because Deployment.'
yield new Promise.delay api_call_delay
it 'test bosun silences', ->
expect(@room.messages).to.eql [
['alice', '@hubot test bosun silence for alert=test.lukas,host=muffin,service=lukas for 1h because Deployment.']
['hubot', "@alice Trying to test Bosun silence for alert 'test.lukas' and tags {host:muffin,service:lukas} for 1h ..."]
['hubot', '@alice <NAME>. Done. That alarm will work.']
]
context "fail to test silence with alert and tags for authorized use", ->
beforeEach ->
co =>
yield @room.user.say 'alice', '@hubot test bosun silence for alert=test.fail,host=muffin,service=lukas for 1h because Deployment'
yield new Promise.delay api_call_delay
it 'test silence', ->
expect(@room.messages).to.eql [
['alice', '@hubot test bosun silence for alert=test.fail,host=muffin,service=lukas for 1h because Deployment']
['hubot', "@alice Trying to test Bosun silence for alert 'test.fail' and tags {host:muffin,service:lukas} for 1h ..."]
]
expect(customMessages[0]).to.eql {
channel: "room1"
attachments: [
{
color: "danger"
fallback: "Bosun couldn't deal with that. I suggest, you list the active silences now. That's what Bosun told me: ```\nundefined\n```"
mrkdwn_in: [ "text" ]
text: "Bosun couldn't deal with that. I suggest, you list the active silences now. That's what Bosun told me: ```\nundefined\n```"
title: "Argh. Failed to deal with Bosun's answer."
}
]
}
context "clear silences", ->
context "clear silence for authorized user", ->
beforeEach ->
co =>
yield @room.user.say 'alice', '@hubot clear bosun silence 6e89533c74c3f9b74417b37e7cce75c384d29dc7'
yield new Promise.delay api_call_delay
it 'clear bosun silence', ->
expect(@room.messages).to.eql [
['alice', '@hubot clear bosun silence 6e89533c74c3f9b74417b37e7cce75c384d29dc7']
['hubot', '@alice Trying to clear Bosun silence 6e89533c74c3f9b74417b37e7cce75c384d29dc7 ...']
['hubot', '@alice <NAME>ippie. Done.']
]
context "fail to clear silence for authorized user", ->
beforeEach ->
co =>
yield @room.user.say 'alice', '@hubot clear bosun silence xxx9533c74c3f9b74417b37e7cce75c384d29dc7'
yield new Promise.delay api_call_delay
it 'clear bosun silence', ->
expect(@room.messages).to.eql [
['alice', '@hubot clear bosun silence xxx9533c74c3f9b74417b37e7cce75c384d29dc7']
['hubot', '@alice Trying to clear Bosun silence xxx9533c74c3f9b74417b37e7cce75c384d29dc7 ...']
]
expect(customMessages[0]).to.eql {
channel: "room1"
attachments: [
{
color: "danger"
fallback: "Bosun couldn't deal with that; maybe the silence doesn't exists? I suggest, you list the open silences now. That's what Bosun told me: ```\nundefined\n```"
mrkdwn_in: [ "text" ]
text: "Bosun couldn't deal with that; maybe the silence doesn't exists? I suggest, you list the open silences now. That's what Bosun told me: ```\nundefined\n```"
title: "Argh. Failed to deal with Bosun's answer."
}
]
}
describe 'bosun events', ->
beforeEach ->
[@room, @bosun] = setup_test_env {
hubot_bosun_host: "http://localhost:18070"
hubot_bosun_role: "bosun"
hubot_bosun_slack: "no"
hubot_bosun_log_level: "error"
hubot_bosun_relaTive_time: "no"
}
afterEach ->
tear_down_test_env(@room, @bosun)
context "set_silence", ->
context "set_silence successfully", ->
beforeEach ->
robot = @room.robot
@room.robot.on 'bosun.result.set_silence.successful', (event) ->
robot.brain.set 'test.bosun.result.set_silence', event
@room.robot.emit 'bosun.set_silence', {
user:
id: 'alice'
name: 'alice'
room: "a room"
duration: "10m"
alert: "test.lukas"
tags: "host=muffin,service=lukas"
message: "I need some rest time"
forget: "true"
}
co =>
yield new Promise.delay api_call_delay
it "on set_silence", ->
event = @room.robot.brain.get 'test.bosun.result.set_silence'
expect(event).not.to.eql null
expect(event).to.eql {
user:
id: 'alice'
name: '<NAME>'
room: "a room"
duration: "10m"
silence_id: "6e89533c74c3f9b74417b37e7cce75c384d29dc7"
}
context "set_silence failed", ->
beforeEach ->
robot = @room.robot
@room.robot.on 'bosun.result.set_silence.failed', (event) ->
robot.brain.set 'test.bosun.result.set_silence', event
@room.robot.emit 'bosun.set_silence', {
user:
id: 'alice'
name: '<NAME>'
room: "a room"
duration: "10m"
alert: "test.fail"
tags: "host=muffin,service=lukas"
message: "I need some rest time"
forget: "true"
}
co =>
yield new Promise.delay api_call_delay
it "on set_silence", ->
event = @room.robot.brain.get 'test.bosun.result.set_silence'
expect(event).not.to.eql null
expect(event).to.eql {
user:
id: 'alice'
name: 'alice'
room: "a room"
message: "API call failed with status code 500."
}
context "clear_silence", ->
context "clear_silence successfully", ->
beforeEach ->
robot = @room.robot
@room.robot.on 'bosun.result.clear_silence.successful', (event) ->
robot.brain.set 'test.bosun.result.clear_silence', event
@room.robot.emit 'bosun.clear_silence', {
user:
id: 'alice'
name: 'alice'
room: "a room"
silence_id: "6e89533c74c3f9b74417b37e7cce75c384d29dc7"
}
co =>
yield new Promise.delay api_call_delay
it "on clear_silence", ->
event = @room.robot.brain.get 'test.bosun.result.clear_silence'
expect(event).not.to.eql null
expect(event).to.eql {
user:
id: 'alice'
name: 'alice'
room: "a room"
silence_id: "6e89533c74c3f9b74417b37e7cce75c384d29dc7"
}
context "clear_silence failed", ->
beforeEach ->
robot = @room.robot
@room.robot.on 'bosun.result.clear_silence.failed', (event) ->
robot.brain.set 'test.bosun.result.clear_silence', event
@room.robot.emit 'bosun.clear_silence', {
user:
id: 'alice'
name: 'alice'
room: "a room"
silence_id: "xxx9533c74c3f9b74417b37e7cce75c384d29dc7"
}
co =>
yield new Promise.delay api_call_delay
it "on clear_silence", ->
event = @room.robot.brain.get 'test.bosun.result.clear_silence'
expect(event).not.to.eql null
expect(event).to.eql {
user:
id: 'alice'
name: 'alice'
room: "a room"
silence_id: "xxx9533c74c3f9b74417b37e7cce75c384d29dc7"
message: "API call failed with status code 500."
}
context "check_silence", ->
context "check_silence successfully", ->
context "silence is still active", ->
beforeEach ->
robot = @room.robot
@room.robot.on 'bosun.result.check_silence.successful', (event) ->
robot.brain.set 'test.bosun.result.check_silence', event
@room.robot.emit 'bosun.check_silence', {
user:
id: 'alice'
name: '<NAME>'
room: "a room"
silence_id: '6e89533c74c3f9b74417b37e7cce75c384d29dc7'
}
co =>
yield new Promise.delay api_call_delay
it "on check_silence", ->
event = @room.robot.brain.get 'test.bosun.result.check_silence'
expect(event).not.to.eql null
expect(event).to.eql {
user:
id: 'alice'
name: 'alice'
room: "a room"
silence_id: '6e89533c74c3f9b74417b37e7cce75c384d29dc7'
active: true
}
context "silence is not actice anymore", ->
beforeEach ->
robot = @room.robot
@room.robot.on 'bosun.result.check_silence.successful', (event) ->
robot.brain.set 'test.bosun.result.check_silence', event
@room.robot.emit 'bosun.check_silence', {
user:
id: 'alice'
name: 'alice'
room: "a room"
silence_id: 'xxx9533c74c3f9b74417b37e7cce75c384d29dc7'
}
co =>
yield new Promise.delay api_call_delay
it "on check_silence", ->
event = @room.robot.brain.get 'test.bosun.result.check_silence'
expect(event).not.to.eql null
expect(event).to.eql {
user:
id: 'alice'
name: '<NAME>'
room: "a room"
silence_id: 'xxx9533c74c3f9b74417b37e7cce75c384d29dc7'
active: false
}
setup_test_env = ( env ) ->
process.env.HUBOT_BOSUN_HOST = env.hubot_bosun_host
process.env.HUBOT_BOSUN_ROLE = env.hubot_bosun_role or ""
process.env.HUBOT_BOSUN_SLACK = env.hubot_bosun_slack
process.env.HUBOT_BOSUN_LOG_LEVEL = env.hubot_bosun_log_level
process.env.HUBOT_BOSUN_RELATIVE_TIME = env.hubot_bosun_relaTive_time
helper = new Helper('../src/bosun.coffee')
room = helper.createRoom()
room.robot.auth = new MockAuth
bosun = mock_bosun()
bosun.listen(18070, "127.0.0.1")
[room, bosun]
tear_down_test_env = (room, bosun) ->
room.destroy()
bosun.close()
# Force reload of module under test
delete require.cache[require.resolve('../src/bosun')]
class MockAuth
hasRole: (user, role) ->
if user.name is 'alice' and role is 'bosun' then true else false
mock_bosun = () ->
http.createServer((req, resp) ->
if req.url == '/api/incidents/open' and req.method == 'GET'
resp.setHeader('Content-Type', 'application/json')
incidents = [
{
Id: 759,
Subject: 'warning: <no value>',
Start: 1467363958,
AlertName: 'test.lukas',
Tags: null,
TagsString: '{}',
CurrentStatus: 'normal',
WorstStatus: 'warning',
LastAbnormalStatus: 'warning',
LastAbnormalTime: 1467367498,
Unevaluated: false,
NeedAck: false,
Silenced: false,
Actions: [
{
User: "lukas",
Message: "Okay.",
Time: 1467411397,
Type: "Acknowledged"
}
]
Events: [ [Object], [Object] ],
WarnNotificationChains: [],
CritNotificationChains: []
}
{
Id: 750,
Subject: 'warning: <no value>',
Start: 1467363958,
AlertName: 'test.lukas',
Tags: null,
TagsString: '{}',
CurrentStatus: 'warning',
WorstStatus: 'warning',
LastAbnormalStatus: 'warning',
LastAbnormalTime: 1467367498,
Unevaluated: false,
NeedAck: true,
Silenced: false,
Actions: [],
Events: [ [Object], [Object] ],
WarnNotificationChains: [],
CritNotificationChains: []
}
]
resp.end JSON.stringify incidents
if req.url == '/api/action' and req.method == 'POST'
body = ""
req.on 'data', (chunk) -> body += chunk
req.on 'end', () ->
data = JSON.parse body
unless data.Type is "ack" or data.Type is "close"
resp.statusCode = 500
resp.setHeader('Content-Type', 'text/plain');
if data.Ids?
resp.write "map["
id_errs = ("#{id}:unknown action type: none" for id in data.Ids)
resp.write "#{id_errs.join ' '}"
resp.write "]"
unless 123 in data.Ids
resp.statusCode = 500;
resp.end()
if req.url == '/api/silence/get' and req.method == 'GET'
resp.setHeader('Content-Type', 'application/json')
silences = {
"6e89533c74c3f9b74417b37e7cce75c384d29dc7": {
Start: "2016-07-04T15:18:03.877775182Z",
End: "2016-07-04T16:18:03.877775182Z",
Alert: "",
Tags: {
host: "cake",
service: "lukas"
},
TagString: "host=cake,service=lukas",
Forget: true,
User: "Lukas",
Message: "Reboot"
},
"dd406bdce72df2e8c69b5ee396126a7ed8f3bf44": {
Start: "2016-07-04T15:16:18.894444847Z",
End: "2016-07-04T16:16:18.894444847Z",
Alert: "test.lukas",
Tags: {
host: "muffin",
service: "lukas"
},
TagString: "host=muffin,service=lukas",
Forget: true,
User: "Lukas",
Message: "Deployment"
}
}
resp.end JSON.stringify silences
if req.url.match('/api/silence/set')? and req.method == 'POST'
body = ""
req.on 'data', (chunk) -> body += chunk
req.on 'end', () ->
data = JSON.parse body
if data.alert is "test.fail"
resp.statusCode = 500
resp.end()
if (match = req.url.match('/api/silence/clear.id=(.+)'))? and req.method == 'POST'
id = match[1]
if id is "xxx9<KEY>c<KEY>c3<KEY>"
resp.statusCode = 500
resp.end ""
)
| true | Helper = require('hubot-test-helper')
chai = require 'chai'
auth = require 'hubot-auth'
Promise = require('bluebird')
co = require('co')
http = require 'http'
expect = chai.expect
process.env.EXPRESS_PORT = 18080
api_call_delay = 20
customMessages = []
describe 'bosun without authorization', ->
beforeEach ->
[@room, @bosun] = setup_test_env {
hubot_bosun_host: "http://localhost:18070"
hubot_bosun_slack: "no"
hubot_bosun_log_level: "error"
hubot_bosun_relaTive_time: "no"
}
afterEach ->
tear_down_test_env(@room, @bosun)
context "incidents", ->
context "show incidents", ->
context "show incidents for authorized user", ->
beforeEach ->
co =>
yield @room.user.say 'PI:NAME:<NAME>END_PI', '@hubot show open bosun incidents'
yield new Promise.delay api_call_delay
it 'show bosun incidents', ->
expect(@room.messages).to.eql [
['alice', '@hubot show open bosun incidents']
['hubot', '@alice Retrieving Bosun incidents ...']
['hubot', '@alice PI:NAME:<NAME>END_PI. Done.']
['hubot', '@alice So, there are currently 2 open incidents in Bosun.']
['hubot', '@alice 759 is normal: warning: <no value>.']
['hubot', '@alice 750 is warning: warning: <no value>.']
]
context "succeed even if unauthorized", ->
beforeEach ->
co =>
yield @room.user.say 'PI:NAME:<NAME>END_PI', '@hubot show open bosun incidents'
yield new Promise.delay api_call_delay
it 'show open bosun incidents for unauthorized bob', ->
expect(@room.messages).to.eql [
['bob', '@hubot show open bosun incidents']
['hubot', '@bob Retrieving Bosun incidents ...']
['hubot', '@bob PI:NAME:<NAME>END_PI. Done.']
['hubot', '@bob So, there are currently 2 open incidents in Bosun.']
['hubot', '@bob 759 is normal: warning: <no value>.']
['hubot', '@bob 750 is warning: warning: <no value>.']
]
describe 'bosun', ->
beforeEach ->
[@room, @bosun] = setup_test_env {
hubot_bosun_host: "http://localhost:18070"
hubot_bosun_role: "bosun"
hubot_bosun_slack: "no"
hubot_bosun_log_level: "error"
hubot_bosun_relaTive_time: "no"
}
afterEach ->
tear_down_test_env(@room, @bosun)
context "incidents", ->
context "show incidents", ->
context "show incidents for authorized user", ->
beforeEach ->
co =>
yield @room.user.say 'PI:NAME:<NAME>END_PI', '@hubot show open bosun incidents'
yield new Promise.delay api_call_delay
it 'show bosun incidents', ->
expect(@room.messages).to.eql [
['alice', '@hubot show open bosun incidents']
['hubot', '@alice Retrieving Bosun incidents ...']
['hubot', '@alice PI:NAME:<NAME>END_PI. Done.']
['hubot', '@alice So, there are currently 2 open incidents in Bosun.']
['hubot', '@alice 759 is normal: warning: <no value>.']
['hubot', '@alice 750 is warning: warning: <no value>.']
]
context "Fail if unauthorized", ->
it 'show open bosun incidents for unauthorized bob', ->
@room.user.say('PI:NAME:<NAME>END_PI', '@hubot show open bosun incidents').then =>
expect(@room.messages).to.eql [
['bob', '@hubot show open bosun incidents']
['hubot', "@bob Sorry, you're not allowed to do that. You need the 'bosun' role."]
]
context "ack and close incidents", ->
context "ack single incident", ->
beforeEach ->
co =>
yield @room.user.say 'alice', '@hubot ack bosun incident 123 because it is normal again.'
yield new Promise.delay api_call_delay
it 'ack bosun alarm', ->
expect(@room.messages).to.eql [
['alice', '@hubot ack bosun incident 123 because it is normal again.']
['hubot', '@alice Trying to ack Bosun incident 123 ...']
['hubot', '@alice PI:NAME:<NAME>END_PI. Done.']
]
context "fail to ack single incident", ->
beforeEach ->
co =>
yield @room.user.say 'alice', '@hubot ack bosun incident 321 because it is normal again.'
yield new Promise.delay api_call_delay
it 'ack bosun alarm', ->
expect(@room.messages).to.eql [
['alice', '@hubot ack bosun incident 321 because it is normal again.']
['hubot', '@alice Trying to ack Bosun incident 321 ...']
['hubot', '@alice Bosun couldn\'t deal with that; maybe the incident doesn\'t exists or is still active? I suggest, you list the now open incidents. That\'s what Bosun told me: ```\nundefined\n```']
]
context "Ack (with capital 'A') single incident", ->
beforeEach ->
co =>
yield @room.user.say 'alice', '@hubot Ack bosun incident 123 because it is normal again.'
yield new Promise.delay api_call_delay
it 'ack bosun alarm', ->
expect(@room.messages).to.eql [
['alice', '@hubot Ack bosun incident 123 because it is normal again.']
['hubot', '@alice Trying to ack Bosun incident 123 ...']
['hubot', '@alice PI:NAME:<NAME>END_PI. Done.']
]
context "ack multiple incidents", ->
beforeEach ->
co =>
yield @room.user.say 'alice', '@hubot ack bosun incidents 123,234 because State is normal again.'
yield new Promise.delay api_call_delay
it 'ack bosun alarms', ->
expect(@room.messages).to.eql [
['alice', '@hubot ack bosun incidents 123,234 because State is normal again.']
['hubot', '@alice Trying to ack Bosun incidents 123,234 ...']
['hubot', '@alice PI:NAME:<NAME>END_PI. Done.']
]
context "Fail if unauthorized", ->
it 'ack bosun incident for unauthorized bob', ->
@room.user.say('bob', '@hubot ack bosun incident 123 because it is over.').then =>
expect(@room.messages).to.eql [
['bob', '@hubot ack bosun incident 123 because it is over.']
['hubot', "@bob Sorry, you're not allowed to do that. You need the 'bosun' role."]
]
context "silences", ->
context "show silences", ->
context "show silences for authorized user", ->
beforeEach ->
co =>
yield @room.user.say 'PI:NAME:<NAME>END_PI', '@hubot show bosun silences'
yield new Promise.delay api_call_delay
it 'show bosun silences', ->
expect(@room.messages).to.eql [
['alice', '@hubot show bosun silences']
['hubot', '@alice Retrieving Bosun silences ...']
['hubot', '@alice PI:NAME:<NAME>END_PI. Done.']
['hubot', '@alice So, there are currently 2 active silences in Bosun.']
['hubot', "@alice Silence 6e89533c74c3f9b74417b37e7cce75c384d29dc7 from 2016-07-04 15:18:03 UTC until 2016-07-04 16:18:03 UTC for tags host=cake,service=lukas and alert '' because Reboot"]
['hubot', "@alice Silence dd406bdce72df2e8c69b5ee396126a7ed8f3bf44 from 2016-07-04 15:16:18 UTC until 2016-07-04 16:16:18 UTC for tags host=muffin,service=lukas and alert 'test.lukas' because Deployment"]
]
context "Fail if unauthorized", ->
it 'show bosun silences for unauthorized bob', ->
@room.user.say('bob', '@hubot show bosun silences').then =>
expect(@room.messages).to.eql [
['bob', '@hubot show bosun silences']
['hubot', "@bob Sorry, you're not allowed to do that. You need the 'bosun' role."]
]
context "set|test silences", ->
context "test silence with alert and tags for authorized user", ->
beforeEach ->
co =>
yield @room.user.say 'alice', '@hubot test bosun silence for alert=test.lukas,host=muffin,service=lukas for 1h because Deployment.'
yield new Promise.delay api_call_delay
it 'test bosun silences', ->
expect(@room.messages).to.eql [
['alice', '@hubot test bosun silence for alert=test.lukas,host=muffin,service=lukas for 1h because Deployment.']
['hubot', "@alice Trying to test Bosun silence for alert 'test.lukas' and tags {host:muffin,service:lukas} for 1h ..."]
['hubot', '@alice PI:NAME:<NAME>END_PIie. Done. That alarm will work.']
]
context "fail to test silence with alert and tags for authorized use", ->
beforeEach ->
co =>
yield @room.user.say 'alice', '@hubot test bosun silence for alert=test.fail,host=muffin,service=lukas for 1h because Deployment'
yield new Promise.delay api_call_delay
it 'test bosun silence', ->
expect(@room.messages).to.eql [
['alice', '@hubot test bosun silence for alert=test.fail,host=muffin,service=lukas for 1h because Deployment']
['hubot', "@alice Trying to test Bosun silence for alert 'test.fail' and tags {host:muffin,service:lukas} for 1h ..."]
['hubot', '@alice Bosun couldn\'t deal with that. I suggest, you list the active silences now. That\'s what Bosun told me: ```\nundefined\n```']
]
context "test silence with alert only for authorized user", ->
beforeEach ->
co =>
yield @room.user.say 'alice', '@hubot test bosun silence for alert=test.lukas for 1h because Deployment.'
yield new Promise.delay api_call_delay
it 'test bosun silences', ->
expect(@room.messages).to.eql [
['alice', '@hubot test bosun silence for alert=test.lukas for 1h because Deployment.']
['hubot', "@alice Trying to test Bosun silence for alert 'test.lukas' for 1h ..."]
['hubot', '@alice PI:NAME:<NAME>END_PI. Done. That alarm will work.']
]
context "test silence with tags only for authorized user", ->
beforeEach ->
co =>
yield @room.user.say 'alice', '@hubot test bosun silence for host=muffin,service=lukas for 1h because Deployment.'
yield new Promise.delay api_call_delay
it 'test bosun silences', ->
expect(@room.messages).to.eql [
['alice', '@hubot test bosun silence for host=muffin,service=lukas for 1h because Deployment.']
['hubot', '@alice Trying to test Bosun silence for tags {host:muffin,service:lukas} for 1h ...']
['hubot', '@alice PI:NAME:<NAME>END_PI. Done. That alarm will work.']
]
context "set silence with alert and tags for authorized user", ->
beforeEach ->
co =>
yield @room.user.say 'alice', '@hubot set bosun silence for alert=test.lukas,host=muffin,service=lukas for 1h because Deployment.'
yield new Promise.delay api_call_delay
it 'set bosun silences', ->
expect(@room.messages).to.eql [
['alice', '@hubot set bosun silence for alert=test.lukas,host=muffin,service=lukas for 1h because Deployment.']
['hubot', "@alice Trying to set Bosun silence for alert 'test.lukas' and tags {host:muffin,service:lukas} for 1h ..."]
['hubot', '@alice PI:NAME:<NAME>END_PI. Done. Admire your alarm at http://localhost:18070/silence.']
]
context "set silence with alert only for authorized user", ->
beforeEach ->
co =>
yield @room.user.say 'alice', '@hubot set bosun silence for alert=test.lukas for 1h because Deployment.'
yield new Promise.delay api_call_delay
it 'set bosun silences', ->
expect(@room.messages).to.eql [
['alice', '@hubot set bosun silence for alert=test.lukas for 1h because Deployment.']
['hubot', "@alice Trying to set Bosun silence for alert 'test.lukas' for 1h ..."]
['hubot', '@alice PI:NAME:<NAME>END_PI. Done. Admire your alarm at http://localhost:18070/silence.']
]
context "set silence with tags only for authorized user", ->
beforeEach ->
co =>
yield @room.user.say 'alice', '@hubot set bosun silence for host=muffin,service=lukas for 1h because Deployment.'
yield new Promise.delay api_call_delay
it 'set bosun silences', ->
expect(@room.messages).to.eql [
['alice', '@hubot set bosun silence for host=muffin,service=lukas for 1h because Deployment.']
['hubot', '@alice Trying to set Bosun silence for tags {host:muffin,service:lukas} for 1h ...']
['hubot', '@alice PI:NAME:<NAME>END_PI. Done. Admire your alarm at http://localhost:18070/silence.']
]
context "Fail if unauthorized", ->
it 'set bosun silences for unauthorized bob', ->
@room.user.say('bob', '@hubot set bosun silence for alert=test.lukas,host=muffin for 1h because Deployment.').then =>
expect(@room.messages).to.eql [
['bob', '@hubot set bosun silence for alert=test.lukas,host=muffin for 1h because Deployment.']
['hubot', "@bob Sorry, you're not allowed to do that. You need the 'bosun' role."]
]
context "clear silences", ->
context "clear silence for authorized user", ->
beforeEach ->
co =>
yield @room.user.say 'alice', '@hubot clear bosun silence 6e89533c74c3f9b74417b37e7cce75c384d29dc7'
yield new Promise.delay api_call_delay
it 'clear bosun silence', ->
expect(@room.messages).to.eql [
['alice', '@hubot clear bosun silence 6e89533c74c3f9b74417b37e7cce75c384d29dc7']
['hubot', '@alice Trying to clear Bosun silence 6e89533c74c3f9b74417b37e7cce75c384d29dc7 ...']
['hubot', '@alice PI:NAME:<NAME>END_PI. Done.']
]
context "fail to clear silence for authorized user", ->
beforeEach ->
co =>
yield @room.user.say 'alice', '@hubot clear bosun silence xxx9533c74c3f9b74417b37e7cce75c384d29dc7'
yield new Promise.delay api_call_delay
it 'clear silence', ->
expect(@room.messages).to.eql [
['alice', '@hubot clear bosun silence xxx9533c74c3f9b74417b37e7cce75c384d29dc7']
['hubot', '@alice Trying to clear Bosun silence xxx9533c74c3f9b74417b37e7cce75c384d29dc7 ...']
['hubot', "@alice Bosun couldn't deal with that; maybe the silence doesn't exists? I suggest, you list the open silences now. That's what Bosun told me: ```\nundefined\n```"]
]
context "Fail if unauthorized", ->
it 'clear bosun silence for unauthorized bob', ->
@room.user.say('bob', '@hubot clear bosun silence 6e89533c74c3f9b74417b37e7cce75c384d29dc7').then =>
expect(@room.messages).to.eql [
['bob', '@hubot clear bosun silence 6e89533c74c3f9b74417b37e7cce75c384d29dc7']
['hubot', "@bob Sorry, you're not allowed to do that. You need the 'bosun' role."]
]
describe 'bosun with Slack', ->
beforeEach ->
[@room, @bosun] = setup_test_env {
hubot_bosun_host: "http://localhost:18070"
hubot_bosun_role: "bosun"
hubot_bosun_slack: "yes"
hubot_bosun_log_level: "error"
hubot_bosun_relaTive_time: "no"
}
customMessages = []
@room.robot.adapter.customMessage = (msg) -> customMessages.push msg
afterEach ->
tear_down_test_env(@room, @bosun)
context "incidents", ->
context "show incidents", ->
context "show incidents for authorized user", ->
beforeEach ->
co =>
yield @room.user.say 'alice', '@hubot show open bosun incidents'
yield new Promise.delay api_call_delay
it 'show bosun incidents', ->
expect(@room.messages).to.eql [
['alice', '@hubot show open bosun incidents']
['hubot', '@alice Retrieving Bosun incidents ...']
['hubot', '@alice PI:NAME:<NAME>END_PI. Done.']
]
expect(customMessages[0]).to.eql {
channel: "room1"
text: "So, there are currently 2 open incidents in Bosun."
attachments: [
{
fallback: "Incident 759 is normal"
color: "good"
title: "759: warning: <no value>",
title_link: "http://localhost:18070/incident?id=759"
text: "Acked and active since 2016-07-01 09:05:58 UTC with _{}_.\n* lukas acknowledged this incident at 2016-07-01 22:16:37 UTC."
mrkdwn_in: ["text"]
}, {
fallback:"Incident 750 is warning"
color: "warning"
title: "750: warning: <no value>"
title_link: "http://localhost:18070/incident?id=750"
text: "*Unacked* and active since 2016-07-01 09:05:58 UTC with _{}_."
mrkdwn_in: ["text"]
}
]
}
context "fail to ack single incident", ->
beforeEach ->
co =>
yield @room.user.say 'alice', '@hubot ack bosun incident 321 because it is normal again.'
yield new Promise.delay api_call_delay
it 'ack bosun alarm', ->
expect(@room.messages).to.eql [
['alice', '@hubot ack bosun incident 321 because it is normal again.']
['hubot', '@alice Trying to ack Bosun incident 321 ...']
]
expect(customMessages[0]).to.eql {
channel: "room1"
attachments: [
{
color: "danger"
fallback: "Bosun couldn't deal with that; maybe the incident doesn't exists or is still active? I suggest, you list the now open incidents. That's what Bosun told me: ```\nundefined\n```"
mrkdwn_in: [ "text" ]
text: "Bosun couldn't deal with that; maybe the incident doesn't exists or is still active? I suggest, you list the now open incidents. That's what Bosun told me: ```\nundefined\n```"
title: "Argh. Failed to deal with Bosun's answer."
}
]
}
context "silences", ->
context "show silences for authorized user", ->
beforeEach ->
co =>
yield @room.user.say 'PI:NAME:<NAME>END_PI', '@hubot show bosun silences'
yield new Promise.delay api_call_delay
it 'show bosun silences', ->
expect(@room.messages).to.eql [
['alice', '@hubot show bosun silences']
['hubot', '@alice Retrieving Bosun silences ...']
['hubot', '@alice PI:NAME:<NAME>END_PI. Done.']
]
expect(customMessages[0]).to.eql {
channel: "room1"
text: "So, there are currently 2 active silences in Bosun."
attachments: [
{
color: "danger"
fallback: "Slience 6e89533c74c3f9b74417b37e7cce75c384d29dc7 is active."
mrkdwn_in: [ "text" ]
text: "Active from 2016-07-04 15:18:03 UTC until 2016-07-04 16:18:03 UTC\nMessage: _Reboot_\nTags: host=cake,service=lukas\nId: 6e89533c74c3f9b74417b37e7cce75c384d29dc7"
title: "Slience is active."
title_link: "http://localhost:18070/silence"
}
{
color: "danger"
fallback: "Slience dd406bdce72df2e8c69b5ee396126a7ed8f3bf44 is active."
mrkdwn_in: [ "text" ]
text: "Active from 2016-07-04 15:16:18 UTC until 2016-07-04 16:16:18 UTC\nMessage: _Deployment_\nAlert: test.lukas\nTags: host=muffin,service=lukas\nId: dd406bdce72df2e8c69b5ee396126a7ed8f3bf44"
title: "Slience is active."
title_link: "http://localhost:18070/silence"
}
]
}
context "test silence with alert and tags for authorized user", ->
beforeEach ->
co =>
yield @room.user.say 'alice', '@hubot test bosun silence for alert=test.lukas,host=muffin,service=lukas for 1h because Deployment.'
yield new Promise.delay api_call_delay
it 'test bosun silences', ->
expect(@room.messages).to.eql [
['alice', '@hubot test bosun silence for alert=test.lukas,host=muffin,service=lukas for 1h because Deployment.']
['hubot', "@alice Trying to test Bosun silence for alert 'test.lukas' and tags {host:muffin,service:lukas} for 1h ..."]
['hubot', '@alice PI:NAME:<NAME>END_PI. Done. That alarm will work.']
]
context "fail to test silence with alert and tags for authorized use", ->
beforeEach ->
co =>
yield @room.user.say 'alice', '@hubot test bosun silence for alert=test.fail,host=muffin,service=lukas for 1h because Deployment'
yield new Promise.delay api_call_delay
it 'test silence', ->
expect(@room.messages).to.eql [
['alice', '@hubot test bosun silence for alert=test.fail,host=muffin,service=lukas for 1h because Deployment']
['hubot', "@alice Trying to test Bosun silence for alert 'test.fail' and tags {host:muffin,service:lukas} for 1h ..."]
]
expect(customMessages[0]).to.eql {
channel: "room1"
attachments: [
{
color: "danger"
fallback: "Bosun couldn't deal with that. I suggest, you list the active silences now. That's what Bosun told me: ```\nundefined\n```"
mrkdwn_in: [ "text" ]
text: "Bosun couldn't deal with that. I suggest, you list the active silences now. That's what Bosun told me: ```\nundefined\n```"
title: "Argh. Failed to deal with Bosun's answer."
}
]
}
context "clear silences", ->
context "clear silence for authorized user", ->
beforeEach ->
co =>
yield @room.user.say 'alice', '@hubot clear bosun silence 6e89533c74c3f9b74417b37e7cce75c384d29dc7'
yield new Promise.delay api_call_delay
it 'clear bosun silence', ->
expect(@room.messages).to.eql [
['alice', '@hubot clear bosun silence 6e89533c74c3f9b74417b37e7cce75c384d29dc7']
['hubot', '@alice Trying to clear Bosun silence 6e89533c74c3f9b74417b37e7cce75c384d29dc7 ...']
['hubot', '@alice PI:NAME:<NAME>END_PIippie. Done.']
]
context "fail to clear silence for authorized user", ->
beforeEach ->
co =>
yield @room.user.say 'alice', '@hubot clear bosun silence xxx9533c74c3f9b74417b37e7cce75c384d29dc7'
yield new Promise.delay api_call_delay
it 'clear bosun silence', ->
expect(@room.messages).to.eql [
['alice', '@hubot clear bosun silence xxx9533c74c3f9b74417b37e7cce75c384d29dc7']
['hubot', '@alice Trying to clear Bosun silence xxx9533c74c3f9b74417b37e7cce75c384d29dc7 ...']
]
expect(customMessages[0]).to.eql {
channel: "room1"
attachments: [
{
color: "danger"
fallback: "Bosun couldn't deal with that; maybe the silence doesn't exists? I suggest, you list the open silences now. That's what Bosun told me: ```\nundefined\n```"
mrkdwn_in: [ "text" ]
text: "Bosun couldn't deal with that; maybe the silence doesn't exists? I suggest, you list the open silences now. That's what Bosun told me: ```\nundefined\n```"
title: "Argh. Failed to deal with Bosun's answer."
}
]
}
describe 'bosun events', ->
beforeEach ->
[@room, @bosun] = setup_test_env {
hubot_bosun_host: "http://localhost:18070"
hubot_bosun_role: "bosun"
hubot_bosun_slack: "no"
hubot_bosun_log_level: "error"
hubot_bosun_relaTive_time: "no"
}
afterEach ->
tear_down_test_env(@room, @bosun)
context "set_silence", ->
context "set_silence successfully", ->
beforeEach ->
robot = @room.robot
@room.robot.on 'bosun.result.set_silence.successful', (event) ->
robot.brain.set 'test.bosun.result.set_silence', event
@room.robot.emit 'bosun.set_silence', {
user:
id: 'alice'
name: 'alice'
room: "a room"
duration: "10m"
alert: "test.lukas"
tags: "host=muffin,service=lukas"
message: "I need some rest time"
forget: "true"
}
co =>
yield new Promise.delay api_call_delay
it "on set_silence", ->
event = @room.robot.brain.get 'test.bosun.result.set_silence'
expect(event).not.to.eql null
expect(event).to.eql {
user:
id: 'alice'
name: 'PI:NAME:<NAME>END_PI'
room: "a room"
duration: "10m"
silence_id: "6e89533c74c3f9b74417b37e7cce75c384d29dc7"
}
context "set_silence failed", ->
beforeEach ->
robot = @room.robot
@room.robot.on 'bosun.result.set_silence.failed', (event) ->
robot.brain.set 'test.bosun.result.set_silence', event
@room.robot.emit 'bosun.set_silence', {
user:
id: 'alice'
name: 'PI:NAME:<NAME>END_PI'
room: "a room"
duration: "10m"
alert: "test.fail"
tags: "host=muffin,service=lukas"
message: "I need some rest time"
forget: "true"
}
co =>
yield new Promise.delay api_call_delay
it "on set_silence", ->
event = @room.robot.brain.get 'test.bosun.result.set_silence'
expect(event).not.to.eql null
expect(event).to.eql {
user:
id: 'alice'
name: 'alice'
room: "a room"
message: "API call failed with status code 500."
}
context "clear_silence", ->
context "clear_silence successfully", ->
beforeEach ->
robot = @room.robot
@room.robot.on 'bosun.result.clear_silence.successful', (event) ->
robot.brain.set 'test.bosun.result.clear_silence', event
@room.robot.emit 'bosun.clear_silence', {
user:
id: 'alice'
name: 'alice'
room: "a room"
silence_id: "6e89533c74c3f9b74417b37e7cce75c384d29dc7"
}
co =>
yield new Promise.delay api_call_delay
it "on clear_silence", ->
event = @room.robot.brain.get 'test.bosun.result.clear_silence'
expect(event).not.to.eql null
expect(event).to.eql {
user:
id: 'alice'
name: 'alice'
room: "a room"
silence_id: "6e89533c74c3f9b74417b37e7cce75c384d29dc7"
}
context "clear_silence failed", ->
beforeEach ->
robot = @room.robot
@room.robot.on 'bosun.result.clear_silence.failed', (event) ->
robot.brain.set 'test.bosun.result.clear_silence', event
@room.robot.emit 'bosun.clear_silence', {
user:
id: 'alice'
name: 'alice'
room: "a room"
silence_id: "xxx9533c74c3f9b74417b37e7cce75c384d29dc7"
}
co =>
yield new Promise.delay api_call_delay
it "on clear_silence", ->
event = @room.robot.brain.get 'test.bosun.result.clear_silence'
expect(event).not.to.eql null
expect(event).to.eql {
user:
id: 'alice'
name: 'alice'
room: "a room"
silence_id: "xxx9533c74c3f9b74417b37e7cce75c384d29dc7"
message: "API call failed with status code 500."
}
context "check_silence", ->
context "check_silence successfully", ->
context "silence is still active", ->
beforeEach ->
robot = @room.robot
@room.robot.on 'bosun.result.check_silence.successful', (event) ->
robot.brain.set 'test.bosun.result.check_silence', event
@room.robot.emit 'bosun.check_silence', {
user:
id: 'alice'
name: 'PI:NAME:<NAME>END_PI'
room: "a room"
silence_id: '6e89533c74c3f9b74417b37e7cce75c384d29dc7'
}
co =>
yield new Promise.delay api_call_delay
it "on check_silence", ->
event = @room.robot.brain.get 'test.bosun.result.check_silence'
expect(event).not.to.eql null
expect(event).to.eql {
user:
id: 'alice'
name: 'alice'
room: "a room"
silence_id: '6e89533c74c3f9b74417b37e7cce75c384d29dc7'
active: true
}
context "silence is not actice anymore", ->
beforeEach ->
robot = @room.robot
@room.robot.on 'bosun.result.check_silence.successful', (event) ->
robot.brain.set 'test.bosun.result.check_silence', event
@room.robot.emit 'bosun.check_silence', {
user:
id: 'alice'
name: 'alice'
room: "a room"
silence_id: 'xxx9533c74c3f9b74417b37e7cce75c384d29dc7'
}
co =>
yield new Promise.delay api_call_delay
it "on check_silence", ->
event = @room.robot.brain.get 'test.bosun.result.check_silence'
expect(event).not.to.eql null
expect(event).to.eql {
user:
id: 'alice'
name: 'PI:NAME:<NAME>END_PI'
room: "a room"
silence_id: 'xxx9533c74c3f9b74417b37e7cce75c384d29dc7'
active: false
}
setup_test_env = ( env ) ->
process.env.HUBOT_BOSUN_HOST = env.hubot_bosun_host
process.env.HUBOT_BOSUN_ROLE = env.hubot_bosun_role or ""
process.env.HUBOT_BOSUN_SLACK = env.hubot_bosun_slack
process.env.HUBOT_BOSUN_LOG_LEVEL = env.hubot_bosun_log_level
process.env.HUBOT_BOSUN_RELATIVE_TIME = env.hubot_bosun_relaTive_time
helper = new Helper('../src/bosun.coffee')
room = helper.createRoom()
room.robot.auth = new MockAuth
bosun = mock_bosun()
bosun.listen(18070, "127.0.0.1")
[room, bosun]
tear_down_test_env = (room, bosun) ->
room.destroy()
bosun.close()
# Force reload of module under test
delete require.cache[require.resolve('../src/bosun')]
class MockAuth
hasRole: (user, role) ->
if user.name is 'alice' and role is 'bosun' then true else false
mock_bosun = () ->
http.createServer((req, resp) ->
if req.url == '/api/incidents/open' and req.method == 'GET'
resp.setHeader('Content-Type', 'application/json')
incidents = [
{
Id: 759,
Subject: 'warning: <no value>',
Start: 1467363958,
AlertName: 'test.lukas',
Tags: null,
TagsString: '{}',
CurrentStatus: 'normal',
WorstStatus: 'warning',
LastAbnormalStatus: 'warning',
LastAbnormalTime: 1467367498,
Unevaluated: false,
NeedAck: false,
Silenced: false,
Actions: [
{
User: "lukas",
Message: "Okay.",
Time: 1467411397,
Type: "Acknowledged"
}
]
Events: [ [Object], [Object] ],
WarnNotificationChains: [],
CritNotificationChains: []
}
{
Id: 750,
Subject: 'warning: <no value>',
Start: 1467363958,
AlertName: 'test.lukas',
Tags: null,
TagsString: '{}',
CurrentStatus: 'warning',
WorstStatus: 'warning',
LastAbnormalStatus: 'warning',
LastAbnormalTime: 1467367498,
Unevaluated: false,
NeedAck: true,
Silenced: false,
Actions: [],
Events: [ [Object], [Object] ],
WarnNotificationChains: [],
CritNotificationChains: []
}
]
resp.end JSON.stringify incidents
if req.url == '/api/action' and req.method == 'POST'
body = ""
req.on 'data', (chunk) -> body += chunk
req.on 'end', () ->
data = JSON.parse body
unless data.Type is "ack" or data.Type is "close"
resp.statusCode = 500
resp.setHeader('Content-Type', 'text/plain');
if data.Ids?
resp.write "map["
id_errs = ("#{id}:unknown action type: none" for id in data.Ids)
resp.write "#{id_errs.join ' '}"
resp.write "]"
unless 123 in data.Ids
resp.statusCode = 500;
resp.end()
if req.url == '/api/silence/get' and req.method == 'GET'
resp.setHeader('Content-Type', 'application/json')
silences = {
"6e89533c74c3f9b74417b37e7cce75c384d29dc7": {
Start: "2016-07-04T15:18:03.877775182Z",
End: "2016-07-04T16:18:03.877775182Z",
Alert: "",
Tags: {
host: "cake",
service: "lukas"
},
TagString: "host=cake,service=lukas",
Forget: true,
User: "Lukas",
Message: "Reboot"
},
"dd406bdce72df2e8c69b5ee396126a7ed8f3bf44": {
Start: "2016-07-04T15:16:18.894444847Z",
End: "2016-07-04T16:16:18.894444847Z",
Alert: "test.lukas",
Tags: {
host: "muffin",
service: "lukas"
},
TagString: "host=muffin,service=lukas",
Forget: true,
User: "Lukas",
Message: "Deployment"
}
}
resp.end JSON.stringify silences
if req.url.match('/api/silence/set')? and req.method == 'POST'
body = ""
req.on 'data', (chunk) -> body += chunk
req.on 'end', () ->
data = JSON.parse body
if data.alert is "test.fail"
resp.statusCode = 500
resp.end()
if (match = req.url.match('/api/silence/clear.id=(.+)'))? and req.method == 'POST'
id = match[1]
if id is "xxx9PI:KEY:<KEY>END_PIcPI:KEY:<KEY>END_PIc3PI:KEY:<KEY>END_PI"
resp.statusCode = 500
resp.end ""
)
|
[
{
"context": "---------------------------------\n# Copyright 2013 Patrick Mueller\n#\n# Licensed under the Apache License, Version 2.",
"end": 6829,
"score": 0.9998308420181274,
"start": 6814,
"tag": "NAME",
"value": "Patrick Mueller"
}
] | lib-src/jbuild.coffee | pmuellr/jbuild | 1 | # Licensed under the Apache License. See footer for details.
fs = require "fs"
path = require "path"
_ = require "underscore"
coffee = require "coffee-script"
require "shelljs/global"
pkg = require "../package.json"
watch = require "./watch"
coffee.register()
global.watch = watch.watch
global.watchFiles = watch.watchFiles
global.server = require "./server"
PROGRAM = path.basename(__filename).split(".")[0]
Tasks = null
HelpTasks = ["help", "?", "-?", "-h", "--h", "--help"]
#-------------------------------------------------------------------------------
exports.execMain = ->
exec "node #{__filename} #{(process.argv.slice 2).join ' '}"
process.exit 0
#-------------------------------------------------------------------------------
exports.main = main = (task, args...) ->
if not test("-f", "jbuild.js") and not test("-f", "jbuild.coffee")
if task in HelpTasks
help()
else
logError "error: jbuild.js not found in current dir; use `jbuild help` for help"
# compile the coffee file, to get syntax errrors
if test "-f", "jbuild.coffee"
code = cat "jbuild.coffee"
try
coffee.compile code,
compile: true
output: "jbuild.coffee.js"
catch err
iFile = "jbuild.coffee"
if err.location.first_line
iFile = "#{iFile}:#{err.location.first_line}"
if err.location.first_column
iFile = "#{iFile}:#{err.location.first_column}"
logError "error: syntax error in #{iFile}: #{err}"
finally
rm "jbuild.coffee.js" if test "-f", "jbuild.coffee.js"
# install node_module/.bin scripts
global.defineModuleFunctions "."
# load the local jbuild module
try
jmod = require "#{path.join process.cwd(), 'jbuild'}"
catch err
logError err, "error: unable to load module ./jbuild: #{err}"
# get tasks from the module
Tasks = {}
for name, taskObj of jmod
if !_.isFunction taskObj.run
logError "error: the run property of task #{name} is not a function"
taskObj.name = name
taskObj.doc ?= "???"
Tasks[name] = taskObj
# if no task arg, but there is one task defined, that's the one
taskNames = _.keys Tasks
task = Tasks[taskNames[0]].name if !task? and taskNames.length is 1
# print help if no args, or arg is help, or unknown task
help() if !task? or task in HelpTasks
if !Tasks[task]?
logError "error: unknown task '#{task}'; use `jbuild help` for help"
# run the task
try
Tasks[task].run.apply null, args
catch err
logError err, "running task #{task}"
return
#-------------------------------------------------------------------------------
global.defineTasks = (exports_, tasksSpec) ->
tasks = {}
for name, doc of tasksSpec
run = getTaskRunner tasks, name
exports_[name] = {doc, run}
return tasks
#-------------------------------------------------------------------------------
getTaskRunner = (tasks, name) ->
(args...) ->
run = tasks[name]
unless run?
logError "task run function for `#{name}` not defined in tasks object"
run.apply null, args
#-------------------------------------------------------------------------------
global.defineModuleFunctions = (dir) ->
nodeModulesBin = path.join dir, "node_modules", ".bin"
scripts = getNodeModulesScripts nodeModulesBin
for script in scripts
sanitizedName = sanitizeFunctionName script
global[sanitizedName] = invokeNodeModuleScript nodeModulesBin, script
return
#-------------------------------------------------------------------------------
global.pexec = (command, options, callback) ->
global.log "the `pexec()` function is deprecated"
if _.isFunction options and !callback?
callback = options
options = {}
options ?= {}
command = "#{path.join 'node_modules', '.bin', command}"
if _.isFunction callback
return exec command, options, callback
else
return exec command, options
#-------------------------------------------------------------------------------
global.log = (message) ->
if !message? or message is ""
console.log ""
else
console.log "#{PROGRAM}: #{message}"
return
#-------------------------------------------------------------------------------
global.logError = (err, message) ->
if err? and !message?
message = err
err = null
log message
if err and err.stack
console.log "stack:"
console.log err.stack
process.exit 1
return
#-------------------------------------------------------------------------------
invokeNodeModuleScript = (scriptPath, script) ->
script = "#{script}.cmd" if (process.platform is "win32")
(commandArgs, execArgs...) ->
command = "#{path.join scriptPath, script} #{commandArgs}"
execArgs.unshift command
exec.apply null, execArgs
#-------------------------------------------------------------------------------
getNodeModulesScripts = (dir) ->
return [] unless test "-d", dir
result = {}
scripts = ls dir
for script in scripts
name = script.split(".")[0]
result[name] = name
return _.keys result
#-------------------------------------------------------------------------------
sanitizeFunctionName = (scriptName) ->
return scriptName.replace(/[^\d\w_$]/g, "_")
#-------------------------------------------------------------------------------
help = ->
console.log """
#{PROGRAM} version #{pkg.version}
usage: #{PROGRAM} task arg arg arg ...
Run a task from ./jbuild.js or ./jbuild.coffee, passing the
appropriate args.
The tasks should be exported from the jsbuild module.
"""
process.exit 0 if !Tasks?
console.log """
Available tasks from your jbuild module:
"""
tasks = _.values Tasks
longestNamedTask = _.max tasks, (task) -> task.name.length
maxTaskNameLen = longestNamedTask.name.length
for task in tasks
name = task.name
doc = task.doc
console.log " #{alignLeft name, maxTaskNameLen} - #{doc}"
process.exit 0
#-------------------------------------------------------------------------------
alignLeft = (s, len) ->
while s.length < len
s += " "
return s
#-------------------------------------------------------------------------------
main.apply null, (process.argv.slice 2) if require.main is module
#-------------------------------------------------------------------------------
# Copyright 2013 Patrick Mueller
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#-------------------------------------------------------------------------------
| 56725 | # Licensed under the Apache License. See footer for details.
fs = require "fs"
path = require "path"
_ = require "underscore"
coffee = require "coffee-script"
require "shelljs/global"
pkg = require "../package.json"
watch = require "./watch"
coffee.register()
global.watch = watch.watch
global.watchFiles = watch.watchFiles
global.server = require "./server"
PROGRAM = path.basename(__filename).split(".")[0]
Tasks = null
HelpTasks = ["help", "?", "-?", "-h", "--h", "--help"]
#-------------------------------------------------------------------------------
exports.execMain = ->
exec "node #{__filename} #{(process.argv.slice 2).join ' '}"
process.exit 0
#-------------------------------------------------------------------------------
exports.main = main = (task, args...) ->
if not test("-f", "jbuild.js") and not test("-f", "jbuild.coffee")
if task in HelpTasks
help()
else
logError "error: jbuild.js not found in current dir; use `jbuild help` for help"
# compile the coffee file, to get syntax errrors
if test "-f", "jbuild.coffee"
code = cat "jbuild.coffee"
try
coffee.compile code,
compile: true
output: "jbuild.coffee.js"
catch err
iFile = "jbuild.coffee"
if err.location.first_line
iFile = "#{iFile}:#{err.location.first_line}"
if err.location.first_column
iFile = "#{iFile}:#{err.location.first_column}"
logError "error: syntax error in #{iFile}: #{err}"
finally
rm "jbuild.coffee.js" if test "-f", "jbuild.coffee.js"
# install node_module/.bin scripts
global.defineModuleFunctions "."
# load the local jbuild module
try
jmod = require "#{path.join process.cwd(), 'jbuild'}"
catch err
logError err, "error: unable to load module ./jbuild: #{err}"
# get tasks from the module
Tasks = {}
for name, taskObj of jmod
if !_.isFunction taskObj.run
logError "error: the run property of task #{name} is not a function"
taskObj.name = name
taskObj.doc ?= "???"
Tasks[name] = taskObj
# if no task arg, but there is one task defined, that's the one
taskNames = _.keys Tasks
task = Tasks[taskNames[0]].name if !task? and taskNames.length is 1
# print help if no args, or arg is help, or unknown task
help() if !task? or task in HelpTasks
if !Tasks[task]?
logError "error: unknown task '#{task}'; use `jbuild help` for help"
# run the task
try
Tasks[task].run.apply null, args
catch err
logError err, "running task #{task}"
return
#-------------------------------------------------------------------------------
global.defineTasks = (exports_, tasksSpec) ->
tasks = {}
for name, doc of tasksSpec
run = getTaskRunner tasks, name
exports_[name] = {doc, run}
return tasks
#-------------------------------------------------------------------------------
getTaskRunner = (tasks, name) ->
(args...) ->
run = tasks[name]
unless run?
logError "task run function for `#{name}` not defined in tasks object"
run.apply null, args
#-------------------------------------------------------------------------------
global.defineModuleFunctions = (dir) ->
nodeModulesBin = path.join dir, "node_modules", ".bin"
scripts = getNodeModulesScripts nodeModulesBin
for script in scripts
sanitizedName = sanitizeFunctionName script
global[sanitizedName] = invokeNodeModuleScript nodeModulesBin, script
return
#-------------------------------------------------------------------------------
global.pexec = (command, options, callback) ->
global.log "the `pexec()` function is deprecated"
if _.isFunction options and !callback?
callback = options
options = {}
options ?= {}
command = "#{path.join 'node_modules', '.bin', command}"
if _.isFunction callback
return exec command, options, callback
else
return exec command, options
#-------------------------------------------------------------------------------
global.log = (message) ->
if !message? or message is ""
console.log ""
else
console.log "#{PROGRAM}: #{message}"
return
#-------------------------------------------------------------------------------
global.logError = (err, message) ->
if err? and !message?
message = err
err = null
log message
if err and err.stack
console.log "stack:"
console.log err.stack
process.exit 1
return
#-------------------------------------------------------------------------------
invokeNodeModuleScript = (scriptPath, script) ->
script = "#{script}.cmd" if (process.platform is "win32")
(commandArgs, execArgs...) ->
command = "#{path.join scriptPath, script} #{commandArgs}"
execArgs.unshift command
exec.apply null, execArgs
#-------------------------------------------------------------------------------
getNodeModulesScripts = (dir) ->
return [] unless test "-d", dir
result = {}
scripts = ls dir
for script in scripts
name = script.split(".")[0]
result[name] = name
return _.keys result
#-------------------------------------------------------------------------------
sanitizeFunctionName = (scriptName) ->
return scriptName.replace(/[^\d\w_$]/g, "_")
#-------------------------------------------------------------------------------
help = ->
console.log """
#{PROGRAM} version #{pkg.version}
usage: #{PROGRAM} task arg arg arg ...
Run a task from ./jbuild.js or ./jbuild.coffee, passing the
appropriate args.
The tasks should be exported from the jsbuild module.
"""
process.exit 0 if !Tasks?
console.log """
Available tasks from your jbuild module:
"""
tasks = _.values Tasks
longestNamedTask = _.max tasks, (task) -> task.name.length
maxTaskNameLen = longestNamedTask.name.length
for task in tasks
name = task.name
doc = task.doc
console.log " #{alignLeft name, maxTaskNameLen} - #{doc}"
process.exit 0
#-------------------------------------------------------------------------------
alignLeft = (s, len) ->
while s.length < len
s += " "
return s
#-------------------------------------------------------------------------------
main.apply null, (process.argv.slice 2) if require.main is module
#-------------------------------------------------------------------------------
# Copyright 2013 <NAME>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#-------------------------------------------------------------------------------
| true | # Licensed under the Apache License. See footer for details.
fs = require "fs"
path = require "path"
_ = require "underscore"
coffee = require "coffee-script"
require "shelljs/global"
pkg = require "../package.json"
watch = require "./watch"
coffee.register()
global.watch = watch.watch
global.watchFiles = watch.watchFiles
global.server = require "./server"
PROGRAM = path.basename(__filename).split(".")[0]
Tasks = null
HelpTasks = ["help", "?", "-?", "-h", "--h", "--help"]
#-------------------------------------------------------------------------------
exports.execMain = ->
exec "node #{__filename} #{(process.argv.slice 2).join ' '}"
process.exit 0
#-------------------------------------------------------------------------------
exports.main = main = (task, args...) ->
if not test("-f", "jbuild.js") and not test("-f", "jbuild.coffee")
if task in HelpTasks
help()
else
logError "error: jbuild.js not found in current dir; use `jbuild help` for help"
# compile the coffee file, to get syntax errrors
if test "-f", "jbuild.coffee"
code = cat "jbuild.coffee"
try
coffee.compile code,
compile: true
output: "jbuild.coffee.js"
catch err
iFile = "jbuild.coffee"
if err.location.first_line
iFile = "#{iFile}:#{err.location.first_line}"
if err.location.first_column
iFile = "#{iFile}:#{err.location.first_column}"
logError "error: syntax error in #{iFile}: #{err}"
finally
rm "jbuild.coffee.js" if test "-f", "jbuild.coffee.js"
# install node_module/.bin scripts
global.defineModuleFunctions "."
# load the local jbuild module
try
jmod = require "#{path.join process.cwd(), 'jbuild'}"
catch err
logError err, "error: unable to load module ./jbuild: #{err}"
# get tasks from the module
Tasks = {}
for name, taskObj of jmod
if !_.isFunction taskObj.run
logError "error: the run property of task #{name} is not a function"
taskObj.name = name
taskObj.doc ?= "???"
Tasks[name] = taskObj
# if no task arg, but there is one task defined, that's the one
taskNames = _.keys Tasks
task = Tasks[taskNames[0]].name if !task? and taskNames.length is 1
# print help if no args, or arg is help, or unknown task
help() if !task? or task in HelpTasks
if !Tasks[task]?
logError "error: unknown task '#{task}'; use `jbuild help` for help"
# run the task
try
Tasks[task].run.apply null, args
catch err
logError err, "running task #{task}"
return
#-------------------------------------------------------------------------------
global.defineTasks = (exports_, tasksSpec) ->
tasks = {}
for name, doc of tasksSpec
run = getTaskRunner tasks, name
exports_[name] = {doc, run}
return tasks
#-------------------------------------------------------------------------------
getTaskRunner = (tasks, name) ->
(args...) ->
run = tasks[name]
unless run?
logError "task run function for `#{name}` not defined in tasks object"
run.apply null, args
#-------------------------------------------------------------------------------
global.defineModuleFunctions = (dir) ->
nodeModulesBin = path.join dir, "node_modules", ".bin"
scripts = getNodeModulesScripts nodeModulesBin
for script in scripts
sanitizedName = sanitizeFunctionName script
global[sanitizedName] = invokeNodeModuleScript nodeModulesBin, script
return
#-------------------------------------------------------------------------------
global.pexec = (command, options, callback) ->
global.log "the `pexec()` function is deprecated"
if _.isFunction options and !callback?
callback = options
options = {}
options ?= {}
command = "#{path.join 'node_modules', '.bin', command}"
if _.isFunction callback
return exec command, options, callback
else
return exec command, options
#-------------------------------------------------------------------------------
global.log = (message) ->
if !message? or message is ""
console.log ""
else
console.log "#{PROGRAM}: #{message}"
return
#-------------------------------------------------------------------------------
global.logError = (err, message) ->
if err? and !message?
message = err
err = null
log message
if err and err.stack
console.log "stack:"
console.log err.stack
process.exit 1
return
#-------------------------------------------------------------------------------
invokeNodeModuleScript = (scriptPath, script) ->
script = "#{script}.cmd" if (process.platform is "win32")
(commandArgs, execArgs...) ->
command = "#{path.join scriptPath, script} #{commandArgs}"
execArgs.unshift command
exec.apply null, execArgs
#-------------------------------------------------------------------------------
getNodeModulesScripts = (dir) ->
return [] unless test "-d", dir
result = {}
scripts = ls dir
for script in scripts
name = script.split(".")[0]
result[name] = name
return _.keys result
#-------------------------------------------------------------------------------
sanitizeFunctionName = (scriptName) ->
return scriptName.replace(/[^\d\w_$]/g, "_")
#-------------------------------------------------------------------------------
help = ->
console.log """
#{PROGRAM} version #{pkg.version}
usage: #{PROGRAM} task arg arg arg ...
Run a task from ./jbuild.js or ./jbuild.coffee, passing the
appropriate args.
The tasks should be exported from the jsbuild module.
"""
process.exit 0 if !Tasks?
console.log """
Available tasks from your jbuild module:
"""
tasks = _.values Tasks
longestNamedTask = _.max tasks, (task) -> task.name.length
maxTaskNameLen = longestNamedTask.name.length
for task in tasks
name = task.name
doc = task.doc
console.log " #{alignLeft name, maxTaskNameLen} - #{doc}"
process.exit 0
#-------------------------------------------------------------------------------
alignLeft = (s, len) ->
while s.length < len
s += " "
return s
#-------------------------------------------------------------------------------
main.apply null, (process.argv.slice 2) if require.main is module
#-------------------------------------------------------------------------------
# Copyright 2013 PI:NAME:<NAME>END_PI
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#-------------------------------------------------------------------------------
|
[
{
"context": "\n # ```bash\n # $ curl -X POST -d \"username=$USERNAME&password=$PASSWORD\" http://diaryapi.auckland.ac.n",
"end": 1954,
"score": 0.8990586996078491,
"start": 1946,
"tag": "USERNAME",
"value": "USERNAME"
},
{
"context": "# $ curl -X POST -d \"username=$USERNAME&p... | UoACalendarClient.coffee | UoA-CompSci/diary-api | 0 | # uoacalendar-js
# ============================================
http = require "http"
# The UoACalendarClient class
# ----------------
#
# This class provides with all the necessary functionality to
# interact with the calendar backend for creating, retrieving
# and modifying calendar and event objects.
#
class UoACalendarClient
# Client settings
# ----------------
# `DEFAULT_HOST` specifies the default host used by the
# client as authentication server if no `host` configuration
# is specified during the library initialization. By default,
# the host points to the Gipsy-Danger API web server.
#
DEFAULT_HOST : 'diaryapi.auckland.ac.nz'
# `DEFAULT_PORT` specifies the default TCP port in the
# authentication server used by the client if no `port` configuration
# is specified during the library initialization.
#
DEFAULT_PORT : 345
# Initializing the client library
# ----------------------------------------------------
#
# To initialize the library you need to call the constructor,
# method, which takes as input a configuration object that
# can contain zero or more of the following fields:
#
# |Name|Value|Description|
# |----|-----|-----------|
# |`apiToken`|`String`|Sets the user's API token used for authentication pruposes. Required.|
# |`host`|`String`|Authentication server to which the client will connect. Should *NOT* include the URL schema as it defaults to `http`. Defaults to `DEFAULT_HOST`.|
# |`port`|TCP port number|TCP port from the host to which the client will connect. Defaults to `DEFAULT_PORT`|
#
#
# Example of initialization from a JavaScript client:
#
# ```javascript
# var client = new UoACalendarClient({ apiToken: "<YOUR_API_TOKEN>"} );
# ```
#
# Your API token can be retrieved from the server as follows:
#
# ```bash
# $ curl -X POST -d "username=$USERNAME&password=$PASSWORD" http://diaryapi.auckland.ac.nz:8000/api-token-auth
# {"token":"<YOUR_API_TOKEN>"}
# ```
#
# Specific host and port values can also be provided:
#
# ```javascript
# var client = UoACalendarClient({ host: "example.org", port: 80, apiToken: "<YOUR_API_TOKEN>"});
# ```
#
constructor: (config) ->
{ @host, @port, @apiToken } = config if config?
@host ?= @DEFAULT_HOST
@port ?= @DEFAULT_PORT
# Accessing the client settings
# ----------------------------------------------------
# By calling `getHost()` the caller can retrieve the
# configured `host` used by the library
#
# ```javascript
# var host = client.getHost();
# ```
#
getHost: () ->
return @host
# By calling `getPort()` the caller can retrieve the
# configured `host` used by the library
#
# ```javascript
# var port = client.getPort();
# ```
#
getPort: () ->
return @port
# By calling `getApiToken()` the caller can retrieve the
# API token that the client uses for authenticating
#
# ```javascript
# var apiToken = client.getApiToken();
# ```
#
getApiToken: () ->
return @apiToken
#
# Interacting with the backend
# ----------------------------------------------------
#
# Generic method for sending a request to the calendar backend
#
sendRequest : (path, method, data, onSuccess, onError) ->
getCookie = (name) ->
nameEQ = name + "="
ca = document.cookie.split(";")
i = 0
while i < ca.length
c = ca[i]
c = c.substring(1, c.length) while c.charAt(0) is " "
return c.substring(nameEQ.length, c.length).replace(/"/g, '') if c.indexOf(nameEQ) is 0
i++
ca
getHeaders = () =>
if @apiToken
return {
'Accept': 'application/json'
'Content-Type': 'application/json'
'Authorization': 'JWT ' + @apiToken
}
else
return {
'Accept': 'application/json'
'Content-Type': 'application/json'
'X-CSRFToken': getCookie('csrftoken')
}
makeRequest = (path, method, data) =>
return {
host: @host
port: @port
scheme: 'http'
headers: getHeaders()
path: path
method: method
withCredentials: false
}
req = http.request(makeRequest(path, method, data), (res) ->
data = ''
res.on('data', (chunk) ->
data += chunk
)
res.on('end', () ->
if (('' + res.statusCode).match(/^2\d\d$/))
# Request handled, happy
if onSuccess then onSuccess(res, if data.length!=0 then JSON.parse(data) else {})
else
# Server error, I have no idea what happend in the backend
# but server at least returned correctly (in a HTTP protocol
# sense) formatted response
if onError then onError(res, data) else console.error(res)
)
)
req.on('error', (e) -> console.error(e))
req.on('timeout', () -> req.abort())
if data
req.write(JSON.stringify(data))
# Send request
req.end()
#
# Calendar objects management
# ===========================
#
# Retrieve my full list of calendars
#
# ```javascript
# client.listCalendars(
# // onSuccess callback
# function(res, data) {
# // response
# console.log(res);
# // deserialized calendars dictionary
# console.log(data);
# },
# // onError callback
# function(res, data) {
# ...
# }
# );
# ```
#
listCalendars : (onSuccess, onError) ->
@sendRequest('/calendars/', 'GET', 0, onSuccess, onError)
# Retrieve my full list of calendars
#
# ```javascript
# client.getCalendar(calendarId
# // onSuccess callback
# function(res, data) {
# // response
# console.log(res);
# // deserialized calendar data
# console.log(data);
# },
# // onError callback
# function(res, data) {
# ...
# }
# );
# ```
#
getCalendar : (id, onSuccess, onError) ->
@sendRequest('/calendars/' + id + '/', 'GET', 0, onSuccess, onError)
# Add a new calendar providing the new calendar's name
#
# ```javascript
# client.addCalendar("Your Calendar Name",
# // onSuccess callback
# function(res, data) {
# // response
# console.log(res);
# // deserialized new calendar data
# console.log(data);
# },
# // onError callback
# function(res, data) {
# ...
# }
# );
# ```
#
addCalendar: (name, onSuccess, onError) ->
@sendRequest('/calendars/', 'POST', {name: name}, onSuccess, onError)
# Delete an existing calendar given its ID
#
# ```javascript
# client.deleteCalendar(calendarId,
# // onSuccess callback
# function(res, data) {
# // response
# console.log(res);
# // deserialized response data
# console.log(data);
# },
# // onError callback
# function(res, data) {
# ...
# }
# );
# ```
#
deleteCalendar: (id, onSuccess, onError) ->
@sendRequest('/calendars/' + id + '/', 'DELETE', {}, onSuccess, onError)
#
# Event objects management
# ===========================
#
# Retrieve the full list of events of an existing calendar given its ID
#
# ```javascript
# client.listEvents(calendarId,
# // onSuccess callback
# function(res, data) {
# // response
# console.log(res);
# // deserialized events dictionary
# console.log(data);
# },
# // onError callback
# function(res, data) {
# ...
# }
# );
# ```
#
listEvents: (calendarId, onSuccess, onError) ->
@sendRequest('/calendars/' + calendarId + '/events/?format=json', 'GET', 0, onSuccess, onError)
# Add a new event to an existing calendar given its ID
#
# ```javascript
# client.listEvents(calendarId, { title: "Event Title", ... }
# // onSuccess callback
# function(res, data) {
# // response
# console.log(res);
# // deserialized new event object data
# console.log(data);
# },
# // onError callback
# function(res, data) {
# ...
# }
# );
# ```
#
addEvent: (calendarId, event, onSuccess, onError) ->
@sendRequest('/calendars/' + calendarId + '/events/', 'POST', event, onSuccess, onError)
# Delete an existing event from a calendar giving their IDs
#
# ```javascript
# client.deleteEvent(calendarId, eventId,
# // onSuccess callback
# function(res, data) {
# // response
# console.log(res);
# // deserialized response data
# console.log(data);
# },
# // onError callback
# function(res, data) {
# ...
# }
# );
# ```
#
deleteEvent: (calendarId, eventId, onSuccess, onError) ->
@sendRequest('/calendars/' + calendarId + '/events/' + eventId + '/', 'DELETE', 0, onSuccess, onError)
# Update an existing event from a calendar giving their IDs
#
# ```javascript
# client.updateEvent(calendarId, eventId, { title: "Event Title", ... }
# // onSuccess callback
# function(res, data) {
# // response
# console.log(res);
# // deserialized updated event object data
# console.log(data);
# },
# // onError callback
# function(res, data) {
# ...
# }
# );
# ```
#
updateEvent: (calendarId, eventId, event, onSuccess, onError) ->
@sendRequest('/calendars/' + calendarId + '/events/' + eventId + '/', 'PATCH', event, onSuccess, onError)
# Find events from an existing calendar within a given time range
#
# ```javascript
# client.updateEvent(calendarId, new Date(1977, 5, 25), Date.now(),
# // onSuccess callback
# function(res, data) {
# // response
# console.log(res);
# // deserialized events dictionary
# console.log(data);
# },
# // onError callback
# function(res, data) {
# ...
# }
# );
# ```
#
findEvents: (calendarId, startDate, endDate, onSuccess, onError) ->
toUTCString = (date) ->
return (new Date(date.getTime() + date.getTimezoneOffset() * 60000)).toISOString()
@sendRequest('/calendars/' + calendarId + '/find_events/?endAfter=' + toUTCString(startDate) + '&startBefore=' + toUTCString(endDate), 'GET', {}, onSuccess, onError)
exports.UoACalendarClient = UoACalendarClient
module.exports = (config) ->
return new UoACalendarClient(config)
| 46261 | # uoacalendar-js
# ============================================
http = require "http"
# The UoACalendarClient class
# ----------------
#
# This class provides with all the necessary functionality to
# interact with the calendar backend for creating, retrieving
# and modifying calendar and event objects.
#
class UoACalendarClient
# Client settings
# ----------------
# `DEFAULT_HOST` specifies the default host used by the
# client as authentication server if no `host` configuration
# is specified during the library initialization. By default,
# the host points to the Gipsy-Danger API web server.
#
DEFAULT_HOST : 'diaryapi.auckland.ac.nz'
# `DEFAULT_PORT` specifies the default TCP port in the
# authentication server used by the client if no `port` configuration
# is specified during the library initialization.
#
DEFAULT_PORT : 345
# Initializing the client library
# ----------------------------------------------------
#
# To initialize the library you need to call the constructor,
# method, which takes as input a configuration object that
# can contain zero or more of the following fields:
#
# |Name|Value|Description|
# |----|-----|-----------|
# |`apiToken`|`String`|Sets the user's API token used for authentication pruposes. Required.|
# |`host`|`String`|Authentication server to which the client will connect. Should *NOT* include the URL schema as it defaults to `http`. Defaults to `DEFAULT_HOST`.|
# |`port`|TCP port number|TCP port from the host to which the client will connect. Defaults to `DEFAULT_PORT`|
#
#
# Example of initialization from a JavaScript client:
#
# ```javascript
# var client = new UoACalendarClient({ apiToken: "<YOUR_API_TOKEN>"} );
# ```
#
# Your API token can be retrieved from the server as follows:
#
# ```bash
# $ curl -X POST -d "username=$USERNAME&password=$<PASSWORD>" http://diaryapi.auckland.ac.nz:8000/api-token-auth
# {"token":"<YOUR_API_TOKEN>"}
# ```
#
# Specific host and port values can also be provided:
#
# ```javascript
# var client = UoACalendarClient({ host: "example.org", port: 80, apiToken: "<YOUR_API_TOKEN>"});
# ```
#
constructor: (config) ->
{ @host, @port, @apiToken } = config if config?
@host ?= @DEFAULT_HOST
@port ?= @DEFAULT_PORT
# Accessing the client settings
# ----------------------------------------------------
# By calling `getHost()` the caller can retrieve the
# configured `host` used by the library
#
# ```javascript
# var host = client.getHost();
# ```
#
getHost: () ->
return @host
# By calling `getPort()` the caller can retrieve the
# configured `host` used by the library
#
# ```javascript
# var port = client.getPort();
# ```
#
getPort: () ->
return @port
# By calling `getApiToken()` the caller can retrieve the
# API token that the client uses for authenticating
#
# ```javascript
# var apiToken = client.getApiToken();
# ```
#
getApiToken: () ->
return @apiToken
#
# Interacting with the backend
# ----------------------------------------------------
#
# Generic method for sending a request to the calendar backend
#
sendRequest : (path, method, data, onSuccess, onError) ->
getCookie = (name) ->
nameEQ = name + "="
ca = document.cookie.split(";")
i = 0
while i < ca.length
c = ca[i]
c = c.substring(1, c.length) while c.charAt(0) is " "
return c.substring(nameEQ.length, c.length).replace(/"/g, '') if c.indexOf(nameEQ) is 0
i++
ca
getHeaders = () =>
if @apiToken
return {
'Accept': 'application/json'
'Content-Type': 'application/json'
'Authorization': 'JWT ' + @apiToken
}
else
return {
'Accept': 'application/json'
'Content-Type': 'application/json'
'X-CSRFToken': getCookie('csrftoken')
}
makeRequest = (path, method, data) =>
return {
host: @host
port: @port
scheme: 'http'
headers: getHeaders()
path: path
method: method
withCredentials: false
}
req = http.request(makeRequest(path, method, data), (res) ->
data = ''
res.on('data', (chunk) ->
data += chunk
)
res.on('end', () ->
if (('' + res.statusCode).match(/^2\d\d$/))
# Request handled, happy
if onSuccess then onSuccess(res, if data.length!=0 then JSON.parse(data) else {})
else
# Server error, I have no idea what happend in the backend
# but server at least returned correctly (in a HTTP protocol
# sense) formatted response
if onError then onError(res, data) else console.error(res)
)
)
req.on('error', (e) -> console.error(e))
req.on('timeout', () -> req.abort())
if data
req.write(JSON.stringify(data))
# Send request
req.end()
#
# Calendar objects management
# ===========================
#
# Retrieve my full list of calendars
#
# ```javascript
# client.listCalendars(
# // onSuccess callback
# function(res, data) {
# // response
# console.log(res);
# // deserialized calendars dictionary
# console.log(data);
# },
# // onError callback
# function(res, data) {
# ...
# }
# );
# ```
#
listCalendars : (onSuccess, onError) ->
@sendRequest('/calendars/', 'GET', 0, onSuccess, onError)
# Retrieve my full list of calendars
#
# ```javascript
# client.getCalendar(calendarId
# // onSuccess callback
# function(res, data) {
# // response
# console.log(res);
# // deserialized calendar data
# console.log(data);
# },
# // onError callback
# function(res, data) {
# ...
# }
# );
# ```
#
getCalendar : (id, onSuccess, onError) ->
@sendRequest('/calendars/' + id + '/', 'GET', 0, onSuccess, onError)
# Add a new calendar providing the new calendar's name
#
# ```javascript
# client.addCalendar("Your Calendar Name",
# // onSuccess callback
# function(res, data) {
# // response
# console.log(res);
# // deserialized new calendar data
# console.log(data);
# },
# // onError callback
# function(res, data) {
# ...
# }
# );
# ```
#
addCalendar: (name, onSuccess, onError) ->
@sendRequest('/calendars/', 'POST', {name: name}, onSuccess, onError)
# Delete an existing calendar given its ID
#
# ```javascript
# client.deleteCalendar(calendarId,
# // onSuccess callback
# function(res, data) {
# // response
# console.log(res);
# // deserialized response data
# console.log(data);
# },
# // onError callback
# function(res, data) {
# ...
# }
# );
# ```
#
deleteCalendar: (id, onSuccess, onError) ->
@sendRequest('/calendars/' + id + '/', 'DELETE', {}, onSuccess, onError)
#
# Event objects management
# ===========================
#
# Retrieve the full list of events of an existing calendar given its ID
#
# ```javascript
# client.listEvents(calendarId,
# // onSuccess callback
# function(res, data) {
# // response
# console.log(res);
# // deserialized events dictionary
# console.log(data);
# },
# // onError callback
# function(res, data) {
# ...
# }
# );
# ```
#
listEvents: (calendarId, onSuccess, onError) ->
@sendRequest('/calendars/' + calendarId + '/events/?format=json', 'GET', 0, onSuccess, onError)
# Add a new event to an existing calendar given its ID
#
# ```javascript
# client.listEvents(calendarId, { title: "Event Title", ... }
# // onSuccess callback
# function(res, data) {
# // response
# console.log(res);
# // deserialized new event object data
# console.log(data);
# },
# // onError callback
# function(res, data) {
# ...
# }
# );
# ```
#
addEvent: (calendarId, event, onSuccess, onError) ->
@sendRequest('/calendars/' + calendarId + '/events/', 'POST', event, onSuccess, onError)
# Delete an existing event from a calendar giving their IDs
#
# ```javascript
# client.deleteEvent(calendarId, eventId,
# // onSuccess callback
# function(res, data) {
# // response
# console.log(res);
# // deserialized response data
# console.log(data);
# },
# // onError callback
# function(res, data) {
# ...
# }
# );
# ```
#
deleteEvent: (calendarId, eventId, onSuccess, onError) ->
@sendRequest('/calendars/' + calendarId + '/events/' + eventId + '/', 'DELETE', 0, onSuccess, onError)
# Update an existing event from a calendar giving their IDs
#
# ```javascript
# client.updateEvent(calendarId, eventId, { title: "Event Title", ... }
# // onSuccess callback
# function(res, data) {
# // response
# console.log(res);
# // deserialized updated event object data
# console.log(data);
# },
# // onError callback
# function(res, data) {
# ...
# }
# );
# ```
#
updateEvent: (calendarId, eventId, event, onSuccess, onError) ->
@sendRequest('/calendars/' + calendarId + '/events/' + eventId + '/', 'PATCH', event, onSuccess, onError)
# Find events from an existing calendar within a given time range
#
# ```javascript
# client.updateEvent(calendarId, new Date(1977, 5, 25), Date.now(),
# // onSuccess callback
# function(res, data) {
# // response
# console.log(res);
# // deserialized events dictionary
# console.log(data);
# },
# // onError callback
# function(res, data) {
# ...
# }
# );
# ```
#
findEvents: (calendarId, startDate, endDate, onSuccess, onError) ->
toUTCString = (date) ->
return (new Date(date.getTime() + date.getTimezoneOffset() * 60000)).toISOString()
@sendRequest('/calendars/' + calendarId + '/find_events/?endAfter=' + toUTCString(startDate) + '&startBefore=' + toUTCString(endDate), 'GET', {}, onSuccess, onError)
exports.UoACalendarClient = UoACalendarClient
module.exports = (config) ->
return new UoACalendarClient(config)
| true | # uoacalendar-js
# ============================================
http = require "http"
# The UoACalendarClient class
# ----------------
#
# This class provides with all the necessary functionality to
# interact with the calendar backend for creating, retrieving
# and modifying calendar and event objects.
#
class UoACalendarClient
# Client settings
# ----------------
# `DEFAULT_HOST` specifies the default host used by the
# client as authentication server if no `host` configuration
# is specified during the library initialization. By default,
# the host points to the Gipsy-Danger API web server.
#
DEFAULT_HOST : 'diaryapi.auckland.ac.nz'
# `DEFAULT_PORT` specifies the default TCP port in the
# authentication server used by the client if no `port` configuration
# is specified during the library initialization.
#
DEFAULT_PORT : 345
# Initializing the client library
# ----------------------------------------------------
#
# To initialize the library you need to call the constructor,
# method, which takes as input a configuration object that
# can contain zero or more of the following fields:
#
# |Name|Value|Description|
# |----|-----|-----------|
# |`apiToken`|`String`|Sets the user's API token used for authentication pruposes. Required.|
# |`host`|`String`|Authentication server to which the client will connect. Should *NOT* include the URL schema as it defaults to `http`. Defaults to `DEFAULT_HOST`.|
# |`port`|TCP port number|TCP port from the host to which the client will connect. Defaults to `DEFAULT_PORT`|
#
#
# Example of initialization from a JavaScript client:
#
# ```javascript
# var client = new UoACalendarClient({ apiToken: "<YOUR_API_TOKEN>"} );
# ```
#
# Your API token can be retrieved from the server as follows:
#
# ```bash
# $ curl -X POST -d "username=$USERNAME&password=$PI:PASSWORD:<PASSWORD>END_PI" http://diaryapi.auckland.ac.nz:8000/api-token-auth
# {"token":"<YOUR_API_TOKEN>"}
# ```
#
# Specific host and port values can also be provided:
#
# ```javascript
# var client = UoACalendarClient({ host: "example.org", port: 80, apiToken: "<YOUR_API_TOKEN>"});
# ```
#
constructor: (config) ->
{ @host, @port, @apiToken } = config if config?
@host ?= @DEFAULT_HOST
@port ?= @DEFAULT_PORT
# Accessing the client settings
# ----------------------------------------------------
# By calling `getHost()` the caller can retrieve the
# configured `host` used by the library
#
# ```javascript
# var host = client.getHost();
# ```
#
getHost: () ->
return @host
# By calling `getPort()` the caller can retrieve the
# configured `host` used by the library
#
# ```javascript
# var port = client.getPort();
# ```
#
getPort: () ->
return @port
# By calling `getApiToken()` the caller can retrieve the
# API token that the client uses for authenticating
#
# ```javascript
# var apiToken = client.getApiToken();
# ```
#
getApiToken: () ->
return @apiToken
#
# Interacting with the backend
# ----------------------------------------------------
#
# Generic method for sending a request to the calendar backend
#
sendRequest : (path, method, data, onSuccess, onError) ->
getCookie = (name) ->
nameEQ = name + "="
ca = document.cookie.split(";")
i = 0
while i < ca.length
c = ca[i]
c = c.substring(1, c.length) while c.charAt(0) is " "
return c.substring(nameEQ.length, c.length).replace(/"/g, '') if c.indexOf(nameEQ) is 0
i++
ca
getHeaders = () =>
if @apiToken
return {
'Accept': 'application/json'
'Content-Type': 'application/json'
'Authorization': 'JWT ' + @apiToken
}
else
return {
'Accept': 'application/json'
'Content-Type': 'application/json'
'X-CSRFToken': getCookie('csrftoken')
}
makeRequest = (path, method, data) =>
return {
host: @host
port: @port
scheme: 'http'
headers: getHeaders()
path: path
method: method
withCredentials: false
}
req = http.request(makeRequest(path, method, data), (res) ->
data = ''
res.on('data', (chunk) ->
data += chunk
)
res.on('end', () ->
if (('' + res.statusCode).match(/^2\d\d$/))
# Request handled, happy
if onSuccess then onSuccess(res, if data.length!=0 then JSON.parse(data) else {})
else
# Server error, I have no idea what happend in the backend
# but server at least returned correctly (in a HTTP protocol
# sense) formatted response
if onError then onError(res, data) else console.error(res)
)
)
req.on('error', (e) -> console.error(e))
req.on('timeout', () -> req.abort())
if data
req.write(JSON.stringify(data))
# Send request
req.end()
#
# Calendar objects management
# ===========================
#
# Retrieve my full list of calendars
#
# ```javascript
# client.listCalendars(
# // onSuccess callback
# function(res, data) {
# // response
# console.log(res);
# // deserialized calendars dictionary
# console.log(data);
# },
# // onError callback
# function(res, data) {
# ...
# }
# );
# ```
#
listCalendars : (onSuccess, onError) ->
@sendRequest('/calendars/', 'GET', 0, onSuccess, onError)
# Retrieve my full list of calendars
#
# ```javascript
# client.getCalendar(calendarId
# // onSuccess callback
# function(res, data) {
# // response
# console.log(res);
# // deserialized calendar data
# console.log(data);
# },
# // onError callback
# function(res, data) {
# ...
# }
# );
# ```
#
getCalendar : (id, onSuccess, onError) ->
@sendRequest('/calendars/' + id + '/', 'GET', 0, onSuccess, onError)
# Add a new calendar providing the new calendar's name
#
# ```javascript
# client.addCalendar("Your Calendar Name",
# // onSuccess callback
# function(res, data) {
# // response
# console.log(res);
# // deserialized new calendar data
# console.log(data);
# },
# // onError callback
# function(res, data) {
# ...
# }
# );
# ```
#
addCalendar: (name, onSuccess, onError) ->
@sendRequest('/calendars/', 'POST', {name: name}, onSuccess, onError)
# Delete an existing calendar given its ID
#
# ```javascript
# client.deleteCalendar(calendarId,
# // onSuccess callback
# function(res, data) {
# // response
# console.log(res);
# // deserialized response data
# console.log(data);
# },
# // onError callback
# function(res, data) {
# ...
# }
# );
# ```
#
deleteCalendar: (id, onSuccess, onError) ->
@sendRequest('/calendars/' + id + '/', 'DELETE', {}, onSuccess, onError)
#
# Event objects management
# ===========================
#
# Retrieve the full list of events of an existing calendar given its ID
#
# ```javascript
# client.listEvents(calendarId,
# // onSuccess callback
# function(res, data) {
# // response
# console.log(res);
# // deserialized events dictionary
# console.log(data);
# },
# // onError callback
# function(res, data) {
# ...
# }
# );
# ```
#
listEvents: (calendarId, onSuccess, onError) ->
@sendRequest('/calendars/' + calendarId + '/events/?format=json', 'GET', 0, onSuccess, onError)
# Add a new event to an existing calendar given its ID
#
# ```javascript
# client.listEvents(calendarId, { title: "Event Title", ... }
# // onSuccess callback
# function(res, data) {
# // response
# console.log(res);
# // deserialized new event object data
# console.log(data);
# },
# // onError callback
# function(res, data) {
# ...
# }
# );
# ```
#
addEvent: (calendarId, event, onSuccess, onError) ->
@sendRequest('/calendars/' + calendarId + '/events/', 'POST', event, onSuccess, onError)
# Delete an existing event from a calendar giving their IDs
#
# ```javascript
# client.deleteEvent(calendarId, eventId,
# // onSuccess callback
# function(res, data) {
# // response
# console.log(res);
# // deserialized response data
# console.log(data);
# },
# // onError callback
# function(res, data) {
# ...
# }
# );
# ```
#
deleteEvent: (calendarId, eventId, onSuccess, onError) ->
@sendRequest('/calendars/' + calendarId + '/events/' + eventId + '/', 'DELETE', 0, onSuccess, onError)
# Update an existing event from a calendar giving their IDs
#
# ```javascript
# client.updateEvent(calendarId, eventId, { title: "Event Title", ... }
# // onSuccess callback
# function(res, data) {
# // response
# console.log(res);
# // deserialized updated event object data
# console.log(data);
# },
# // onError callback
# function(res, data) {
# ...
# }
# );
# ```
#
updateEvent: (calendarId, eventId, event, onSuccess, onError) ->
@sendRequest('/calendars/' + calendarId + '/events/' + eventId + '/', 'PATCH', event, onSuccess, onError)
# Find events from an existing calendar within a given time range
#
# ```javascript
# client.updateEvent(calendarId, new Date(1977, 5, 25), Date.now(),
# // onSuccess callback
# function(res, data) {
# // response
# console.log(res);
# // deserialized events dictionary
# console.log(data);
# },
# // onError callback
# function(res, data) {
# ...
# }
# );
# ```
#
findEvents: (calendarId, startDate, endDate, onSuccess, onError) ->
toUTCString = (date) ->
return (new Date(date.getTime() + date.getTimezoneOffset() * 60000)).toISOString()
@sendRequest('/calendars/' + calendarId + '/find_events/?endAfter=' + toUTCString(startDate) + '&startBefore=' + toUTCString(endDate), 'GET', {}, onSuccess, onError)
exports.UoACalendarClient = UoACalendarClient
module.exports = (config) ->
return new UoACalendarClient(config)
|
[
{
"context": "tfcode : ''\n username : generateRandomUsername()\n password : 'testpass'\n redire",
"end": 397,
"score": 0.7815880179405212,
"start": 375,
"tag": "USERNAME",
"value": "generateRandomUsername"
},
{
"context": "nerateRandomUser... | servers/testhelper/handler/loginhelper.coffee | ezgikaysi/koding | 1 | querystring = require 'querystring'
{ generateUrl
deepObjectExtend
generateRandomString
generateRandomUsername
generateRequestParamsEncodeBody } = require '../index'
generateLoginRequestBody = (opts = {}) ->
defaultBodyObject =
_csrf : generateRandomString()
token : ''
tfcode : ''
username : generateRandomUsername()
password : 'testpass'
redirect : ''
groupName : 'koding'
deepObjectExtend defaultBodyObject, opts
return defaultBodyObject
# overwrites given options in the default params
generateLoginRequestParams = (opts = {}) ->
body = generateLoginRequestBody()
params =
url : generateUrl { route : 'Login' }
body : body
csrfCookie : body._csrf
requestParams = generateRequestParamsEncodeBody params, opts
return requestParams
module.exports = {
generateLoginRequestBody
generateLoginRequestParams
}
| 16646 | querystring = require 'querystring'
{ generateUrl
deepObjectExtend
generateRandomString
generateRandomUsername
generateRequestParamsEncodeBody } = require '../index'
generateLoginRequestBody = (opts = {}) ->
defaultBodyObject =
_csrf : generateRandomString()
token : ''
tfcode : ''
username : generateRandomUsername()
password : '<PASSWORD>'
redirect : ''
groupName : 'koding'
deepObjectExtend defaultBodyObject, opts
return defaultBodyObject
# overwrites given options in the default params
generateLoginRequestParams = (opts = {}) ->
body = generateLoginRequestBody()
params =
url : generateUrl { route : 'Login' }
body : body
csrfCookie : body._csrf
requestParams = generateRequestParamsEncodeBody params, opts
return requestParams
module.exports = {
generateLoginRequestBody
generateLoginRequestParams
}
| true | querystring = require 'querystring'
{ generateUrl
deepObjectExtend
generateRandomString
generateRandomUsername
generateRequestParamsEncodeBody } = require '../index'
generateLoginRequestBody = (opts = {}) ->
defaultBodyObject =
_csrf : generateRandomString()
token : ''
tfcode : ''
username : generateRandomUsername()
password : 'PI:PASSWORD:<PASSWORD>END_PI'
redirect : ''
groupName : 'koding'
deepObjectExtend defaultBodyObject, opts
return defaultBodyObject
# overwrites given options in the default params
generateLoginRequestParams = (opts = {}) ->
body = generateLoginRequestBody()
params =
url : generateUrl { route : 'Login' }
body : body
csrfCookie : body._csrf
requestParams = generateRequestParamsEncodeBody params, opts
return requestParams
module.exports = {
generateLoginRequestBody
generateLoginRequestParams
}
|
[
{
"context": "ist2: ['one', 'two', 'three']\n person: {name: 'Alexander Schilling', job: 'Developer'}\n complex: [{name: 'Egon'},",
"end": 872,
"score": 0.9997644424438477,
"start": 853,
"tag": "NAME",
"value": "Alexander Schilling"
},
{
"context": "hilling', job: 'Developer'}\... | test/mocha/yaml.coffee | alinex/node-formatter | 0 | chai = require 'chai'
expect = chai.expect
### eslint-env node, mocha ###
fs = require 'fs'
debug = require('debug') 'test'
chalk = require 'chalk'
formatter = require '../../src/index'
describe "YAML", ->
file = __dirname + '/../data/format.yml'
format = 'yaml'
example = fs.readFileSync file, 'UTF8'
data =
null: null
boolean: true
string: 'test'
unicode: 'Sosa did fine.☺'
control: '\b1998\t1999\t2000\n'
'hex esc': '\r\n is \r\n'
single: '"Howdy!" he cried.'
quoted: ' # Not a \'comment\'.'
date: new Date '2016-05-10T19:06:36.909Z'
numberInt: -8
numberFloat: 5.6
octal: '0o14'
hexadecimal: 12
exponential: 1230.15
fixed: 1230.15
'negative infinity': -Infinity
'not a number': NaN
list: ['one', 'two', 'three']
list2: ['one', 'two', 'three']
person: {name: 'Alexander Schilling', job: 'Developer'}
complex: [{name: 'Egon'}, {name: 'Janina'}]
multiline: 'This text will be read as one line without linebreaks.'
multilineQuoted: 'This text will be read as one line without linebreaks.'
lineBreaks: 'This text will keep\nas it is and all line\nbreaks will be kept.\n'
lineSingle: 'This text will be read as one line without linebreaks.\n'
lineBreak: 'The empty line\nwill be a line break.\n'
address1: {city: 'Stuttgart'}
address2: {city: 'Stuttgart'}
numberString: '123'
numberString2: '123'
re: /\d+/
picture: new Buffer 'R0lGODdhDQAIAIAAAAAAANnZ2SwAAAAADQAIAAACF4SDGQar3xxbJ9p0qa7R0YxwzaFME1IAADs=', 'base64'
'Detroit Tigers,Chicago cubs': new Date '2001-07-23'
describe "parse preset file", ->
it "should get object", (cb) ->
formatter.parse example, format, (err, obj) ->
expect(err, 'error').to.not.exist
expect(obj, 'object').to.deep.equal data
cb()
it "should work with autodetect", (cb) ->
formatter.parse example, (err, obj) ->
expect(err, 'error').to.not.exist
expect(obj, 'object').to.deep.equal data
cb()
it "should work with filename", (cb) ->
formatter.parse example, file, (err, obj) ->
expect(err, 'error').to.not.exist
expect(obj, 'object').to.deep.equal data
cb()
describe "format and parse", ->
it "should reread object", (cb) ->
formatter.stringify data, format, (err, text) ->
expect(err, 'error').to.not.exist
expect(typeof text, 'type of result').to.equal 'string'
debug "result", chalk.grey text
formatter.parse text, format, (err, obj) ->
expect(obj, 'reread object').to.deep.equal data
cb()
| 86618 | chai = require 'chai'
expect = chai.expect
### eslint-env node, mocha ###
fs = require 'fs'
debug = require('debug') 'test'
chalk = require 'chalk'
formatter = require '../../src/index'
describe "YAML", ->
file = __dirname + '/../data/format.yml'
format = 'yaml'
example = fs.readFileSync file, 'UTF8'
data =
null: null
boolean: true
string: 'test'
unicode: 'Sosa did fine.☺'
control: '\b1998\t1999\t2000\n'
'hex esc': '\r\n is \r\n'
single: '"Howdy!" he cried.'
quoted: ' # Not a \'comment\'.'
date: new Date '2016-05-10T19:06:36.909Z'
numberInt: -8
numberFloat: 5.6
octal: '0o14'
hexadecimal: 12
exponential: 1230.15
fixed: 1230.15
'negative infinity': -Infinity
'not a number': NaN
list: ['one', 'two', 'three']
list2: ['one', 'two', 'three']
person: {name: '<NAME>', job: 'Developer'}
complex: [{name: '<NAME>'}, {name: '<NAME>'}]
multiline: 'This text will be read as one line without linebreaks.'
multilineQuoted: 'This text will be read as one line without linebreaks.'
lineBreaks: 'This text will keep\nas it is and all line\nbreaks will be kept.\n'
lineSingle: 'This text will be read as one line without linebreaks.\n'
lineBreak: 'The empty line\nwill be a line break.\n'
address1: {city: 'Stuttgart'}
address2: {city: 'Stuttgart'}
numberString: '123'
numberString2: '123'
re: /\d+/
picture: new Buffer 'R0lGODdhDQAIAIAAAAAAANnZ2SwAAAAADQAIAAACF4SDGQar3xxbJ9p0qa7R0YxwzaFME1IAADs=', 'base64'
'Detroit Tigers,Chicago cubs': new Date '2001-07-23'
describe "parse preset file", ->
it "should get object", (cb) ->
formatter.parse example, format, (err, obj) ->
expect(err, 'error').to.not.exist
expect(obj, 'object').to.deep.equal data
cb()
it "should work with autodetect", (cb) ->
formatter.parse example, (err, obj) ->
expect(err, 'error').to.not.exist
expect(obj, 'object').to.deep.equal data
cb()
it "should work with filename", (cb) ->
formatter.parse example, file, (err, obj) ->
expect(err, 'error').to.not.exist
expect(obj, 'object').to.deep.equal data
cb()
describe "format and parse", ->
it "should reread object", (cb) ->
formatter.stringify data, format, (err, text) ->
expect(err, 'error').to.not.exist
expect(typeof text, 'type of result').to.equal 'string'
debug "result", chalk.grey text
formatter.parse text, format, (err, obj) ->
expect(obj, 'reread object').to.deep.equal data
cb()
| true | chai = require 'chai'
expect = chai.expect
### eslint-env node, mocha ###
fs = require 'fs'
debug = require('debug') 'test'
chalk = require 'chalk'
formatter = require '../../src/index'
describe "YAML", ->
file = __dirname + '/../data/format.yml'
format = 'yaml'
example = fs.readFileSync file, 'UTF8'
data =
null: null
boolean: true
string: 'test'
unicode: 'Sosa did fine.☺'
control: '\b1998\t1999\t2000\n'
'hex esc': '\r\n is \r\n'
single: '"Howdy!" he cried.'
quoted: ' # Not a \'comment\'.'
date: new Date '2016-05-10T19:06:36.909Z'
numberInt: -8
numberFloat: 5.6
octal: '0o14'
hexadecimal: 12
exponential: 1230.15
fixed: 1230.15
'negative infinity': -Infinity
'not a number': NaN
list: ['one', 'two', 'three']
list2: ['one', 'two', 'three']
person: {name: 'PI:NAME:<NAME>END_PI', job: 'Developer'}
complex: [{name: 'PI:NAME:<NAME>END_PI'}, {name: 'PI:NAME:<NAME>END_PI'}]
multiline: 'This text will be read as one line without linebreaks.'
multilineQuoted: 'This text will be read as one line without linebreaks.'
lineBreaks: 'This text will keep\nas it is and all line\nbreaks will be kept.\n'
lineSingle: 'This text will be read as one line without linebreaks.\n'
lineBreak: 'The empty line\nwill be a line break.\n'
address1: {city: 'Stuttgart'}
address2: {city: 'Stuttgart'}
numberString: '123'
numberString2: '123'
re: /\d+/
picture: new Buffer 'R0lGODdhDQAIAIAAAAAAANnZ2SwAAAAADQAIAAACF4SDGQar3xxbJ9p0qa7R0YxwzaFME1IAADs=', 'base64'
'Detroit Tigers,Chicago cubs': new Date '2001-07-23'
describe "parse preset file", ->
it "should get object", (cb) ->
formatter.parse example, format, (err, obj) ->
expect(err, 'error').to.not.exist
expect(obj, 'object').to.deep.equal data
cb()
it "should work with autodetect", (cb) ->
formatter.parse example, (err, obj) ->
expect(err, 'error').to.not.exist
expect(obj, 'object').to.deep.equal data
cb()
it "should work with filename", (cb) ->
formatter.parse example, file, (err, obj) ->
expect(err, 'error').to.not.exist
expect(obj, 'object').to.deep.equal data
cb()
describe "format and parse", ->
it "should reread object", (cb) ->
formatter.stringify data, format, (err, text) ->
expect(err, 'error').to.not.exist
expect(typeof text, 'type of result').to.equal 'string'
debug "result", chalk.grey text
formatter.parse text, format, (err, obj) ->
expect(obj, 'reread object').to.deep.equal data
cb()
|
[
{
"context": " _ref: 'project/ref'\n Name: 'TestProject'\n workspace:\n ",
"end": 719,
"score": 0.6975902318954468,
"start": 715,
"tag": "NAME",
"value": "Test"
}
] | test/spec/roadmapplanningboard/RoadmapPlanningBoardAppSpec.coffee | sboles/app-catalog | 0 | Ext = window.Ext4 || window.Ext
Ext.require [
'Rally.test.apps.roadmapplanningboard.helper.TestDependencyHelper'
'Rally.apps.roadmapplanningboard.RoadmapPlanningBoardApp'
'Rally.apps.roadmapplanningboard.SplashContainer'
'Rally.test.mock.ModelObjectMother'
]
describe 'Rally.apps.roadmapplanningboard.RoadmapPlanningBoardApp', ->
helpers
createApp: (expectError = false, config = {}) ->
config = _.extend
alreadyGotIt: true
expectSplash: false
isAdmin: true
context: Ext.create 'Rally.app.Context',
initialValues:
Ext.merge
project:
ObjectID: 123456
_ref: 'project/ref'
Name: 'TestProject'
workspace:
WorkspaceConfiguration:
DragDropRankingEnabled: true
, {}
settings: {}
renderTo: 'testDiv'
, config
@stub Rally.apps.roadmapplanningboard.SplashContainer, 'loadPreference', =>
pref = {}
pref[Rally.apps.roadmapplanningboard.SplashContainer.PREFERENCE_NAME] = config.alreadyGotIt
deferred = new Deft.Deferred()
deferred.resolve pref
deferred.promise
@app = Ext.create 'Rally.apps.roadmapplanningboard.RoadmapPlanningBoardApp', config
if expectError
@once
condition: => @errorNotifyStub.calledOnce
else
@waitForComponentReady(@app).then =>
if !config.expectSplash
@waitForComponentReady('#gridboard').then =>
@planningBoard = @app.down 'roadmapplanningboard'
else
deferred = Ext.create 'Deft.Deferred'
Ext.defer -> deferred.resolve()
deferred.promise
beforeEach ->
Rally.test.apps.roadmapplanningboard.helper.TestDependencyHelper.loadDependencies()
@isBrowserSupportedStub = @stub Rally.apps.roadmapplanningboard.RoadmapPlanningBoardApp::, '_isSupportedBrowser', =>
true
@setBrowserPrefValue = @stub Rally.apps.roadmapplanningboard.RoadmapPlanningBoardApp::, '_getBrowserPrefValue', =>
true
@timelineStore = Deft.Injector.resolve('timelineStore')
@roadmapStore = Deft.Injector.resolve('roadmapStore')
@errorNotifyStub = @stub Rally.ui.notify.Notifier, 'showError'
@ajax.whenQuerying('TypeDefinition').respondWith Rally.test.mock.data.WsapiModelFactory.getModelDefinition('PortfolioItemFeature')
@ajax.whenQuerying('PortfolioItem/Feature').respondWith []
afterEach ->
@app?.destroy()
Deft.Injector.reset()
it 'should use the provided context', ->
@createApp().then =>
expect(@app.getContext().getProject().ObjectID).toBe 123456
it 'should render a planning board with a timeline', ->
@createApp().then =>
expect(@planningBoard.timeline.getId()).toBe @timelineStore.first().getId()
it 'should render a planning board with a roadmap', ->
@createApp().then =>
expect(@planningBoard.timeline.getId()).toBe @timelineStore.first().getId()
it 'should define height based on content window', ->
Ext.DomHelper.append Ext.getBody(), '<div id="content" style="height: 600px;"><div class="page" style="height: 20px;"></div></div>'
@createApp().then =>
#test range as jasmine does not like to render html the same with local and test server
appHeight = @app._computeFullPagePanelContentAreaHeight()
expect(appHeight >= 570).toBe true
expect(appHeight <= 600).toBe true
it 'should define board height based on app height', ->
@createApp(false, {height: 1000}).then =>
#test range as jasmine does not like to render html the same with local and test server
boardHeight = @planningBoard.getHeight()
expect(boardHeight >= 950).toBe true
expect(boardHeight <= 1000).toBe true
it 'should notify of error if the timeline store fails to load', ->
@stub @timelineStore, 'load', ->
deferred = new Deft.promise.Deferred()
deferred.reject({storeServiceName: 'Timeline'});
deferred.promise
@createApp(true).then =>
expect(@errorNotifyStub.lastCall.args[0]).toEqual
message: 'Failed to load app: Timeline service data load issue'
it 'should notify of error if the roadmap store fails to load', ->
@stub @roadmapStore, 'load', ->
deferred = new Deft.promise.Deferred()
deferred.reject({storeServiceName: 'Planning'});
deferred.promise
@createApp(true).then =>
expect(@errorNotifyStub.lastCall.args[0]).toEqual
message: 'Failed to load app: Planning service data load issue'
it 'should show the splash container if there is no roadmap', ->
@roadmapStore.data.clear()
@stub @roadmapStore, 'load', ->
Deft.Promise.when { records: {} }
@createApp(false, {expectSplash: true}).then =>
expect(@app).not.toContainComponent '#got-it'
expect(@app).toContainComponent '#roadmap-splash-container'
it 'should show the splash container if there is no timeline', ->
@timelineStore.data.clear()
@stub @timelineStore, 'load', ->
Deft.Promise.when { records: {} }
@createApp(false, {expectSplash: true}).then =>
expect(@app).not.toContainComponent '#got-it'
expect(@app).toContainComponent '#roadmap-splash-container'
it 'should show the splash container if the preference is not set', ->
@createApp(false, {expectSplash: true, alreadyGotIt: false}).then =>
expect(@app).toContainComponent '#got-it'
expect(@app).toContainComponent '#roadmap-splash-container'
it 'should show the gridboard after clicking the got it button', ->
@createApp(false, {expectSplash: true, alreadyGotIt: false}).then =>
@click(css: '.primary').then =>
@waitForComponentReady(@app.down('#gridboard')).then =>
expect(@app).toContainComponent '#gridboard'
describe 'Service error handling', ->
it 'should display a friendly notification if any service (planning, timeline, WSAPI) is unavailable', ->
@createApp().then =>
Ext.Ajax.fireEvent('requestexception', null, null, { operation:
requester: @app })
expect(@app.getEl().getHTML()).toContain 'temporarily unavailable'
describe '_isSupportedBrowser', ->
beforeEach ->
@isBrowserSupportedStub.restore()
userAgentStrings =
"Chrome 29": ["Mozilla/5.0 (X11; CrOS i686 4319.74.0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/29.0.1547.57 Safari/537.36", false]
"Chrome 33": ["Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/33.0.1750.152 Safari/537.36", true]
"Chrome No Version": ["Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome Safari/537.36", false]
"IE 10": ["Mozilla/5.0 (compatible; MSIE 10.6; Windows NT 6.1; Trident/5.0; InfoPath.2; SLCC1; .NET CLR 3.0.4506.2152; .NET CLR 3.5.30729; .NET CLR 2.0.50727) 3gpp-gba UNTRUSTED/1.0", true]
"IE 8": ["Mozilla/5.0 (compatible; MSIE 8.0; Windows NT 6.1; Trident/4.0; GTB7.4; InfoPath.2; SV1; .NET CLR 3.3.69573; WOW64; en-US)", false]
"Opera": ["Mozilla/5.0 (Windows NT 6.0; rv:2.0) Gecko/20100101 Firefox/4.0 Opera 12.14", false]
"Mac_Safari 6": ["Mozilla/5.0 (iPad; CPU OS 6_0 like Mac OS X) AppleWebKit/536.26 (KHTML, like Gecko) Version/6.0 Mobile/10A5355d Safari/8536.25", true]
"Mac_Safari 5": ["Mozilla/5.0 (Macintosh; Intel Mac OS X 10_6_8) AppleWebKit/537.13+ (KHTML, like Gecko) Version/5.1.7 Safari/534.57.2", false]
"Firefox 28": ["Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:25.0) Gecko/20100101 Firefox/28.0", true]
"Firefox 25": ["Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:25.0) Gecko/20100101 Firefox/25.0", false]
"Firefox No Version": ["Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:25.0) Gecko/20100101 Firefox/", false]
"Empty String": ["", false]
"Random Gibberish": ["fiwuehfwieufhweiufhweiuf", false]
"Midori": ["Mozilla/5.0 (X11; U; Linux i686; fr-fr) AppleWebKit/525.1+ (KHTML, like Gecko, Safari/525.1+) midori/1.19", false]
_.each userAgentStrings, ([userAgent, isSupported], displayName) ->
it "should state that #{displayName} is #{if isSupported then 'supported' else 'unsupported'}", ->
window.navigator.__defineGetter__ 'userAgent', () -> userAgent
browserInfo = Rally.apps.roadmapplanningboard.RoadmapPlanningBoardApp::_getBrowserInfo()
expect(Rally.apps.roadmapplanningboard.RoadmapPlanningBoardApp::_isSupportedBrowser browserInfo).toBe isSupported
| 181510 | Ext = window.Ext4 || window.Ext
Ext.require [
'Rally.test.apps.roadmapplanningboard.helper.TestDependencyHelper'
'Rally.apps.roadmapplanningboard.RoadmapPlanningBoardApp'
'Rally.apps.roadmapplanningboard.SplashContainer'
'Rally.test.mock.ModelObjectMother'
]
describe 'Rally.apps.roadmapplanningboard.RoadmapPlanningBoardApp', ->
helpers
createApp: (expectError = false, config = {}) ->
config = _.extend
alreadyGotIt: true
expectSplash: false
isAdmin: true
context: Ext.create 'Rally.app.Context',
initialValues:
Ext.merge
project:
ObjectID: 123456
_ref: 'project/ref'
Name: '<NAME>Project'
workspace:
WorkspaceConfiguration:
DragDropRankingEnabled: true
, {}
settings: {}
renderTo: 'testDiv'
, config
@stub Rally.apps.roadmapplanningboard.SplashContainer, 'loadPreference', =>
pref = {}
pref[Rally.apps.roadmapplanningboard.SplashContainer.PREFERENCE_NAME] = config.alreadyGotIt
deferred = new Deft.Deferred()
deferred.resolve pref
deferred.promise
@app = Ext.create 'Rally.apps.roadmapplanningboard.RoadmapPlanningBoardApp', config
if expectError
@once
condition: => @errorNotifyStub.calledOnce
else
@waitForComponentReady(@app).then =>
if !config.expectSplash
@waitForComponentReady('#gridboard').then =>
@planningBoard = @app.down 'roadmapplanningboard'
else
deferred = Ext.create 'Deft.Deferred'
Ext.defer -> deferred.resolve()
deferred.promise
beforeEach ->
Rally.test.apps.roadmapplanningboard.helper.TestDependencyHelper.loadDependencies()
@isBrowserSupportedStub = @stub Rally.apps.roadmapplanningboard.RoadmapPlanningBoardApp::, '_isSupportedBrowser', =>
true
@setBrowserPrefValue = @stub Rally.apps.roadmapplanningboard.RoadmapPlanningBoardApp::, '_getBrowserPrefValue', =>
true
@timelineStore = Deft.Injector.resolve('timelineStore')
@roadmapStore = Deft.Injector.resolve('roadmapStore')
@errorNotifyStub = @stub Rally.ui.notify.Notifier, 'showError'
@ajax.whenQuerying('TypeDefinition').respondWith Rally.test.mock.data.WsapiModelFactory.getModelDefinition('PortfolioItemFeature')
@ajax.whenQuerying('PortfolioItem/Feature').respondWith []
afterEach ->
@app?.destroy()
Deft.Injector.reset()
it 'should use the provided context', ->
@createApp().then =>
expect(@app.getContext().getProject().ObjectID).toBe 123456
it 'should render a planning board with a timeline', ->
@createApp().then =>
expect(@planningBoard.timeline.getId()).toBe @timelineStore.first().getId()
it 'should render a planning board with a roadmap', ->
@createApp().then =>
expect(@planningBoard.timeline.getId()).toBe @timelineStore.first().getId()
it 'should define height based on content window', ->
Ext.DomHelper.append Ext.getBody(), '<div id="content" style="height: 600px;"><div class="page" style="height: 20px;"></div></div>'
@createApp().then =>
#test range as jasmine does not like to render html the same with local and test server
appHeight = @app._computeFullPagePanelContentAreaHeight()
expect(appHeight >= 570).toBe true
expect(appHeight <= 600).toBe true
it 'should define board height based on app height', ->
@createApp(false, {height: 1000}).then =>
#test range as jasmine does not like to render html the same with local and test server
boardHeight = @planningBoard.getHeight()
expect(boardHeight >= 950).toBe true
expect(boardHeight <= 1000).toBe true
it 'should notify of error if the timeline store fails to load', ->
@stub @timelineStore, 'load', ->
deferred = new Deft.promise.Deferred()
deferred.reject({storeServiceName: 'Timeline'});
deferred.promise
@createApp(true).then =>
expect(@errorNotifyStub.lastCall.args[0]).toEqual
message: 'Failed to load app: Timeline service data load issue'
it 'should notify of error if the roadmap store fails to load', ->
@stub @roadmapStore, 'load', ->
deferred = new Deft.promise.Deferred()
deferred.reject({storeServiceName: 'Planning'});
deferred.promise
@createApp(true).then =>
expect(@errorNotifyStub.lastCall.args[0]).toEqual
message: 'Failed to load app: Planning service data load issue'
it 'should show the splash container if there is no roadmap', ->
@roadmapStore.data.clear()
@stub @roadmapStore, 'load', ->
Deft.Promise.when { records: {} }
@createApp(false, {expectSplash: true}).then =>
expect(@app).not.toContainComponent '#got-it'
expect(@app).toContainComponent '#roadmap-splash-container'
it 'should show the splash container if there is no timeline', ->
@timelineStore.data.clear()
@stub @timelineStore, 'load', ->
Deft.Promise.when { records: {} }
@createApp(false, {expectSplash: true}).then =>
expect(@app).not.toContainComponent '#got-it'
expect(@app).toContainComponent '#roadmap-splash-container'
it 'should show the splash container if the preference is not set', ->
@createApp(false, {expectSplash: true, alreadyGotIt: false}).then =>
expect(@app).toContainComponent '#got-it'
expect(@app).toContainComponent '#roadmap-splash-container'
it 'should show the gridboard after clicking the got it button', ->
@createApp(false, {expectSplash: true, alreadyGotIt: false}).then =>
@click(css: '.primary').then =>
@waitForComponentReady(@app.down('#gridboard')).then =>
expect(@app).toContainComponent '#gridboard'
describe 'Service error handling', ->
it 'should display a friendly notification if any service (planning, timeline, WSAPI) is unavailable', ->
@createApp().then =>
Ext.Ajax.fireEvent('requestexception', null, null, { operation:
requester: @app })
expect(@app.getEl().getHTML()).toContain 'temporarily unavailable'
describe '_isSupportedBrowser', ->
beforeEach ->
@isBrowserSupportedStub.restore()
userAgentStrings =
"Chrome 29": ["Mozilla/5.0 (X11; CrOS i686 4319.74.0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/29.0.1547.57 Safari/537.36", false]
"Chrome 33": ["Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/33.0.1750.152 Safari/537.36", true]
"Chrome No Version": ["Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome Safari/537.36", false]
"IE 10": ["Mozilla/5.0 (compatible; MSIE 10.6; Windows NT 6.1; Trident/5.0; InfoPath.2; SLCC1; .NET CLR 3.0.4506.2152; .NET CLR 3.5.30729; .NET CLR 2.0.50727) 3gpp-gba UNTRUSTED/1.0", true]
"IE 8": ["Mozilla/5.0 (compatible; MSIE 8.0; Windows NT 6.1; Trident/4.0; GTB7.4; InfoPath.2; SV1; .NET CLR 3.3.69573; WOW64; en-US)", false]
"Opera": ["Mozilla/5.0 (Windows NT 6.0; rv:2.0) Gecko/20100101 Firefox/4.0 Opera 12.14", false]
"Mac_Safari 6": ["Mozilla/5.0 (iPad; CPU OS 6_0 like Mac OS X) AppleWebKit/536.26 (KHTML, like Gecko) Version/6.0 Mobile/10A5355d Safari/8536.25", true]
"Mac_Safari 5": ["Mozilla/5.0 (Macintosh; Intel Mac OS X 10_6_8) AppleWebKit/537.13+ (KHTML, like Gecko) Version/5.1.7 Safari/534.57.2", false]
"Firefox 28": ["Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:25.0) Gecko/20100101 Firefox/28.0", true]
"Firefox 25": ["Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:25.0) Gecko/20100101 Firefox/25.0", false]
"Firefox No Version": ["Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:25.0) Gecko/20100101 Firefox/", false]
"Empty String": ["", false]
"Random Gibberish": ["fiwuehfwieufhweiufhweiuf", false]
"Midori": ["Mozilla/5.0 (X11; U; Linux i686; fr-fr) AppleWebKit/525.1+ (KHTML, like Gecko, Safari/525.1+) midori/1.19", false]
_.each userAgentStrings, ([userAgent, isSupported], displayName) ->
it "should state that #{displayName} is #{if isSupported then 'supported' else 'unsupported'}", ->
window.navigator.__defineGetter__ 'userAgent', () -> userAgent
browserInfo = Rally.apps.roadmapplanningboard.RoadmapPlanningBoardApp::_getBrowserInfo()
expect(Rally.apps.roadmapplanningboard.RoadmapPlanningBoardApp::_isSupportedBrowser browserInfo).toBe isSupported
| true | Ext = window.Ext4 || window.Ext
Ext.require [
'Rally.test.apps.roadmapplanningboard.helper.TestDependencyHelper'
'Rally.apps.roadmapplanningboard.RoadmapPlanningBoardApp'
'Rally.apps.roadmapplanningboard.SplashContainer'
'Rally.test.mock.ModelObjectMother'
]
describe 'Rally.apps.roadmapplanningboard.RoadmapPlanningBoardApp', ->
helpers
createApp: (expectError = false, config = {}) ->
config = _.extend
alreadyGotIt: true
expectSplash: false
isAdmin: true
context: Ext.create 'Rally.app.Context',
initialValues:
Ext.merge
project:
ObjectID: 123456
_ref: 'project/ref'
Name: 'PI:NAME:<NAME>END_PIProject'
workspace:
WorkspaceConfiguration:
DragDropRankingEnabled: true
, {}
settings: {}
renderTo: 'testDiv'
, config
@stub Rally.apps.roadmapplanningboard.SplashContainer, 'loadPreference', =>
pref = {}
pref[Rally.apps.roadmapplanningboard.SplashContainer.PREFERENCE_NAME] = config.alreadyGotIt
deferred = new Deft.Deferred()
deferred.resolve pref
deferred.promise
@app = Ext.create 'Rally.apps.roadmapplanningboard.RoadmapPlanningBoardApp', config
if expectError
@once
condition: => @errorNotifyStub.calledOnce
else
@waitForComponentReady(@app).then =>
if !config.expectSplash
@waitForComponentReady('#gridboard').then =>
@planningBoard = @app.down 'roadmapplanningboard'
else
deferred = Ext.create 'Deft.Deferred'
Ext.defer -> deferred.resolve()
deferred.promise
beforeEach ->
Rally.test.apps.roadmapplanningboard.helper.TestDependencyHelper.loadDependencies()
@isBrowserSupportedStub = @stub Rally.apps.roadmapplanningboard.RoadmapPlanningBoardApp::, '_isSupportedBrowser', =>
true
@setBrowserPrefValue = @stub Rally.apps.roadmapplanningboard.RoadmapPlanningBoardApp::, '_getBrowserPrefValue', =>
true
@timelineStore = Deft.Injector.resolve('timelineStore')
@roadmapStore = Deft.Injector.resolve('roadmapStore')
@errorNotifyStub = @stub Rally.ui.notify.Notifier, 'showError'
@ajax.whenQuerying('TypeDefinition').respondWith Rally.test.mock.data.WsapiModelFactory.getModelDefinition('PortfolioItemFeature')
@ajax.whenQuerying('PortfolioItem/Feature').respondWith []
afterEach ->
@app?.destroy()
Deft.Injector.reset()
it 'should use the provided context', ->
@createApp().then =>
expect(@app.getContext().getProject().ObjectID).toBe 123456
it 'should render a planning board with a timeline', ->
@createApp().then =>
expect(@planningBoard.timeline.getId()).toBe @timelineStore.first().getId()
it 'should render a planning board with a roadmap', ->
@createApp().then =>
expect(@planningBoard.timeline.getId()).toBe @timelineStore.first().getId()
it 'should define height based on content window', ->
Ext.DomHelper.append Ext.getBody(), '<div id="content" style="height: 600px;"><div class="page" style="height: 20px;"></div></div>'
@createApp().then =>
#test range as jasmine does not like to render html the same with local and test server
appHeight = @app._computeFullPagePanelContentAreaHeight()
expect(appHeight >= 570).toBe true
expect(appHeight <= 600).toBe true
it 'should define board height based on app height', ->
@createApp(false, {height: 1000}).then =>
#test range as jasmine does not like to render html the same with local and test server
boardHeight = @planningBoard.getHeight()
expect(boardHeight >= 950).toBe true
expect(boardHeight <= 1000).toBe true
it 'should notify of error if the timeline store fails to load', ->
@stub @timelineStore, 'load', ->
deferred = new Deft.promise.Deferred()
deferred.reject({storeServiceName: 'Timeline'});
deferred.promise
@createApp(true).then =>
expect(@errorNotifyStub.lastCall.args[0]).toEqual
message: 'Failed to load app: Timeline service data load issue'
it 'should notify of error if the roadmap store fails to load', ->
@stub @roadmapStore, 'load', ->
deferred = new Deft.promise.Deferred()
deferred.reject({storeServiceName: 'Planning'});
deferred.promise
@createApp(true).then =>
expect(@errorNotifyStub.lastCall.args[0]).toEqual
message: 'Failed to load app: Planning service data load issue'
it 'should show the splash container if there is no roadmap', ->
@roadmapStore.data.clear()
@stub @roadmapStore, 'load', ->
Deft.Promise.when { records: {} }
@createApp(false, {expectSplash: true}).then =>
expect(@app).not.toContainComponent '#got-it'
expect(@app).toContainComponent '#roadmap-splash-container'
it 'should show the splash container if there is no timeline', ->
@timelineStore.data.clear()
@stub @timelineStore, 'load', ->
Deft.Promise.when { records: {} }
@createApp(false, {expectSplash: true}).then =>
expect(@app).not.toContainComponent '#got-it'
expect(@app).toContainComponent '#roadmap-splash-container'
it 'should show the splash container if the preference is not set', ->
@createApp(false, {expectSplash: true, alreadyGotIt: false}).then =>
expect(@app).toContainComponent '#got-it'
expect(@app).toContainComponent '#roadmap-splash-container'
it 'should show the gridboard after clicking the got it button', ->
@createApp(false, {expectSplash: true, alreadyGotIt: false}).then =>
@click(css: '.primary').then =>
@waitForComponentReady(@app.down('#gridboard')).then =>
expect(@app).toContainComponent '#gridboard'
describe 'Service error handling', ->
it 'should display a friendly notification if any service (planning, timeline, WSAPI) is unavailable', ->
@createApp().then =>
Ext.Ajax.fireEvent('requestexception', null, null, { operation:
requester: @app })
expect(@app.getEl().getHTML()).toContain 'temporarily unavailable'
describe '_isSupportedBrowser', ->
beforeEach ->
@isBrowserSupportedStub.restore()
userAgentStrings =
"Chrome 29": ["Mozilla/5.0 (X11; CrOS i686 4319.74.0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/29.0.1547.57 Safari/537.36", false]
"Chrome 33": ["Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/33.0.1750.152 Safari/537.36", true]
"Chrome No Version": ["Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome Safari/537.36", false]
"IE 10": ["Mozilla/5.0 (compatible; MSIE 10.6; Windows NT 6.1; Trident/5.0; InfoPath.2; SLCC1; .NET CLR 3.0.4506.2152; .NET CLR 3.5.30729; .NET CLR 2.0.50727) 3gpp-gba UNTRUSTED/1.0", true]
"IE 8": ["Mozilla/5.0 (compatible; MSIE 8.0; Windows NT 6.1; Trident/4.0; GTB7.4; InfoPath.2; SV1; .NET CLR 3.3.69573; WOW64; en-US)", false]
"Opera": ["Mozilla/5.0 (Windows NT 6.0; rv:2.0) Gecko/20100101 Firefox/4.0 Opera 12.14", false]
"Mac_Safari 6": ["Mozilla/5.0 (iPad; CPU OS 6_0 like Mac OS X) AppleWebKit/536.26 (KHTML, like Gecko) Version/6.0 Mobile/10A5355d Safari/8536.25", true]
"Mac_Safari 5": ["Mozilla/5.0 (Macintosh; Intel Mac OS X 10_6_8) AppleWebKit/537.13+ (KHTML, like Gecko) Version/5.1.7 Safari/534.57.2", false]
"Firefox 28": ["Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:25.0) Gecko/20100101 Firefox/28.0", true]
"Firefox 25": ["Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:25.0) Gecko/20100101 Firefox/25.0", false]
"Firefox No Version": ["Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:25.0) Gecko/20100101 Firefox/", false]
"Empty String": ["", false]
"Random Gibberish": ["fiwuehfwieufhweiufhweiuf", false]
"Midori": ["Mozilla/5.0 (X11; U; Linux i686; fr-fr) AppleWebKit/525.1+ (KHTML, like Gecko, Safari/525.1+) midori/1.19", false]
_.each userAgentStrings, ([userAgent, isSupported], displayName) ->
it "should state that #{displayName} is #{if isSupported then 'supported' else 'unsupported'}", ->
window.navigator.__defineGetter__ 'userAgent', () -> userAgent
browserInfo = Rally.apps.roadmapplanningboard.RoadmapPlanningBoardApp::_getBrowserInfo()
expect(Rally.apps.roadmapplanningboard.RoadmapPlanningBoardApp::_isSupportedBrowser browserInfo).toBe isSupported
|
[
{
"context": "ate')\n \n config.shared_key = opts.shared_key ? '59b325af9e266d0285bc1f0840a5e89915a3105c36f19bae58f5176b15476d05'\n config.shared_key_header = opts.shared_key_hea",
"end": 512,
"score": 0.999778151512146,
"start": 448,
"tag": "KEY",
"value": "59b325af9e266d0285bc1f0840a5e8... | lib/configuration.coffee | mattinsler/keyless | 1 | path = require 'path'
walkabout = require 'walkabout'
module.exports = (opts) ->
config = {}
config.url = {
root: opts.root_url ? '/'
}
config.url.root = '/' + config.url.root unless config.url.root[0] is '/'
config.url.login = path.join(config.url.root, 'login')
config.url.logout = path.join(config.url.root, 'logout')
config.url.validate = path.join(config.url.root, 'validate')
config.shared_key = opts.shared_key ? '59b325af9e266d0285bc1f0840a5e89915a3105c36f19bae58f5176b15476d05'
config.shared_key_header = opts.shared_key_header ? 'x-keyless-sso'
config.session_key = opts.session_key ? 'keyless.sid'
config.session_secret = opts.session_secret ? 'b3dbc47c1cd6b210ab3312aa3804f47d07f15dd5ba50907b0bf5b49da8a02483'
config.force_ssl = opts.force_ssl ? (if process.env.NODE_ENV is 'production' then true else false)
config.authorized_callback_domains = opts.authorized_callback_domains ? [/.*/]
config.authorized_callback_domains = [config.authorized_callback_domains] unless Array.isArray(config.authorized_callback_domains)
config.querystring_callback_params = opts.querystring_callback_params or ['callback']
config.querystring_callback_params = [config.querystring_callback_params] unless Array.isArray(config.querystring_callback_params)
config.on_login = opts.on_login
config.ticket_store = opts.ticket_store
config.token_store = opts.token_store
if opts.defer_login_url?
config.defer_login_url = opts.defer_login_url
else if opts.login_html_file?
try
config.login_html = walkabout(opts.login_html_file).read_file_sync('utf8')
catch err
throw new Error('Error reading login_html_file (' + opts.login_html_file + '): ' + err.message)
else
throw new Error('Must pass either defer_login_url or login_html_file')
config
| 146738 | path = require 'path'
walkabout = require 'walkabout'
module.exports = (opts) ->
config = {}
config.url = {
root: opts.root_url ? '/'
}
config.url.root = '/' + config.url.root unless config.url.root[0] is '/'
config.url.login = path.join(config.url.root, 'login')
config.url.logout = path.join(config.url.root, 'logout')
config.url.validate = path.join(config.url.root, 'validate')
config.shared_key = opts.shared_key ? '<KEY>'
config.shared_key_header = opts.shared_key_header ? 'x-keyless-sso'
config.session_key = opts.session_key ? 'keyless.sid'
config.session_secret = opts.session_secret ? '<KEY>'
config.force_ssl = opts.force_ssl ? (if process.env.NODE_ENV is 'production' then true else false)
config.authorized_callback_domains = opts.authorized_callback_domains ? [/.*/]
config.authorized_callback_domains = [config.authorized_callback_domains] unless Array.isArray(config.authorized_callback_domains)
config.querystring_callback_params = opts.querystring_callback_params or ['callback']
config.querystring_callback_params = [config.querystring_callback_params] unless Array.isArray(config.querystring_callback_params)
config.on_login = opts.on_login
config.ticket_store = opts.ticket_store
config.token_store = opts.token_store
if opts.defer_login_url?
config.defer_login_url = opts.defer_login_url
else if opts.login_html_file?
try
config.login_html = walkabout(opts.login_html_file).read_file_sync('utf8')
catch err
throw new Error('Error reading login_html_file (' + opts.login_html_file + '): ' + err.message)
else
throw new Error('Must pass either defer_login_url or login_html_file')
config
| true | path = require 'path'
walkabout = require 'walkabout'
module.exports = (opts) ->
config = {}
config.url = {
root: opts.root_url ? '/'
}
config.url.root = '/' + config.url.root unless config.url.root[0] is '/'
config.url.login = path.join(config.url.root, 'login')
config.url.logout = path.join(config.url.root, 'logout')
config.url.validate = path.join(config.url.root, 'validate')
config.shared_key = opts.shared_key ? 'PI:KEY:<KEY>END_PI'
config.shared_key_header = opts.shared_key_header ? 'x-keyless-sso'
config.session_key = opts.session_key ? 'keyless.sid'
config.session_secret = opts.session_secret ? 'PI:KEY:<KEY>END_PI'
config.force_ssl = opts.force_ssl ? (if process.env.NODE_ENV is 'production' then true else false)
config.authorized_callback_domains = opts.authorized_callback_domains ? [/.*/]
config.authorized_callback_domains = [config.authorized_callback_domains] unless Array.isArray(config.authorized_callback_domains)
config.querystring_callback_params = opts.querystring_callback_params or ['callback']
config.querystring_callback_params = [config.querystring_callback_params] unless Array.isArray(config.querystring_callback_params)
config.on_login = opts.on_login
config.ticket_store = opts.ticket_store
config.token_store = opts.token_store
if opts.defer_login_url?
config.defer_login_url = opts.defer_login_url
else if opts.login_html_file?
try
config.login_html = walkabout(opts.login_html_file).read_file_sync('utf8')
catch err
throw new Error('Error reading login_html_file (' + opts.login_html_file + '): ' + err.message)
else
throw new Error('Must pass either defer_login_url or login_html_file')
config
|
[
{
"context": "oad precompiled handlebars templates.\n\n @author Sebastian Sachtleben\n###\ntemplates = \n\n ###\n Load template b",
"end": 103,
"score": 0.9998824596405029,
"start": 83,
"tag": "NAME",
"value": "Sebastian Sachtleben"
}
] | app/assets/javascripts/shared/templates.coffee | ssachtleben/herowar | 1 | ###
The templates helps to load precompiled handlebars templates.
@author Sebastian Sachtleben
###
templates =
###
Load template by name via our loader and return handlebars template function.
@param {String} The template name.
@return {Function} The precompiled handlebars template.
###
get: (name) ->
template = require name
Handlebars.template template if template?
return templates | 224267 | ###
The templates helps to load precompiled handlebars templates.
@author <NAME>
###
templates =
###
Load template by name via our loader and return handlebars template function.
@param {String} The template name.
@return {Function} The precompiled handlebars template.
###
get: (name) ->
template = require name
Handlebars.template template if template?
return templates | true | ###
The templates helps to load precompiled handlebars templates.
@author PI:NAME:<NAME>END_PI
###
templates =
###
Load template by name via our loader and return handlebars template function.
@param {String} The template name.
@return {Function} The precompiled handlebars template.
###
get: (name) ->
template = require name
Handlebars.template template if template?
return templates |
[
{
"context": " : 'StoreCloud'\n 'owner_email' : 'john@koding.com'\n 'owner_name' : 'John Smith'\n ",
"end": 300,
"score": 0.9999294877052307,
"start": 285,
"tag": "EMAIL",
"value": "john@koding.com"
},
{
"context": "'john@koding.com'\n 'owner_name... | servers/lib/server/handlers/api/gitlab/_sampledata.coffee | lionheart1022/koding | 0 | module.exports = {
'project_create' : {
'created_at' : '2012-07-21T07:30:54Z'
'updated_at' : '2012-07-21T07:38:22Z'
'event_name' : 'project_create'
'name' : 'StoreCloud'
'owner_email' : 'john@koding.com'
'owner_name' : 'John Smith'
'path' : 'storecloud'
'path_with_namespace' : 'jsmith/storecloud'
'project_id' : 74
'project_visibility' : 'private'
}
'project_destroy' : {
'created_at' : '2012-07-21T07:30:58Z'
'updated_at' : '2012-07-21T07:38:22Z'
'event_name' : 'project_destroy'
'name' : 'Underscore'
'owner_email' : 'john@koding.com'
'owner_name' : 'John Smith'
'path' : 'underscore'
'path_with_namespace' : 'jsmith/underscore'
'project_id' : 73
'project_visibility' : 'internal'
}
'project_rename' : {
'created_at' : '2012-07-21T07:30:58Z'
'updated_at' : '2012-07-21T07:38:22Z'
'event_name' : 'project_rename'
'name' : 'Underscore'
'path' : 'underscore'
'path_with_namespace' : 'jsmith/underscore'
'project_id' : 73
'owner_name' : 'John Smith'
'owner_email' : 'john@koding.com'
'project_visibility' : 'internal'
'old_path_with_namespace': 'jsmith/overscore'
}
'project_transfer' : {
'created_at' : '2012-07-21T07:30:58Z'
'updated_at' : '2012-07-21T07:38:22Z'
'event_name' : 'project_transfer'
'name' : 'Underscore'
'path' : 'underscore'
'path_with_namespace' : 'scores/underscore'
'project_id' : 73
'owner_name' : 'John Smith'
'owner_email' : 'john@koding.com'
'project_visibility' : 'internal'
'old_path_with_namespace': 'jsmith/overscore'
}
# ----------------
'user_add_to_team' : {
'created_at' : '2012-07-21T07:30:56Z'
'updated_at' : '2012-07-21T07:38:22Z'
'event_name' : 'user_add_to_team'
'project_access' : 'Master'
'project_id' : 74
'project_name' : 'StoreCloud'
'project_path' : 'storecloud'
'project_path_with_namespace' : 'jsmith/storecloud'
'user_email' : 'john@koding.com'
'user_name' : 'John Smith'
'user_username' : 'johnsmith'
'user_id' : 41
'project_visibility' : 'private'
}
'user_remove_from_team' : {
'created_at' : '2012-07-21T07:30:56Z'
'updated_at' : '2012-07-21T07:38:22Z'
'event_name' : 'user_remove_from_team'
'project_access' : 'Master'
'project_id' : 74
'project_name' : 'StoreCloud'
'project_path' : 'storecloud'
'project_path_with_namespace' : 'jsmith/storecloud'
'user_email' : 'john@koding.com'
'user_name' : 'John Smith'
'user_username' : 'johnsmith'
'user_id' : 41
'project_visibility' : 'private'
}
'user_add_to_group' : {
'created_at' : '2012-07-21T07:30:56Z'
'updated_at' : '2012-07-21T07:38:22Z'
'event_name' : 'user_add_to_group'
'group_access' : 'Master'
'group_id' : 78
'group_name' : 'StoreCloud'
'group_path' : 'storecloud'
'user_email' : 'john@koding.com'
'user_name' : 'John Smith'
'user_username' : 'johnsmith'
'user_id' : 41
}
'user_remove_from_group' : {
'created_at' : '2012-07-21T07:30:56Z'
'updated_at' : '2012-07-21T07:38:22Z'
'event_name' : 'user_remove_from_group'
'group_access' : 'Master'
'group_id' : 78
'group_name' : 'StoreCloud'
'group_path' : 'storecloud'
'user_email' : 'john@koding.com'
'user_name' : 'John Smith'
'user_username' : 'johnsmith'
'user_id' : 41
}
'user_create' : {
'created_at' : '2012-07-21T07:44:07Z'
'updated_at' : '2012-07-21T07:38:22Z'
'email' : 'john@koding.com'
'event_name' : 'user_create'
'name' : 'John Smith'
'username' : 'john'
'user_id' : 41
}
'user_destroy' : {
'created_at' : '2012-07-21T07:44:07Z'
'updated_at' : '2012-07-21T07:38:22Z'
'email' : 'john@koding.com'
'event_name' : 'user_destroy'
'name' : 'John Smith'
'username' : 'john'
'user_id' : 41
}
# ----------------
'key_create' : {
'event_name' : 'key_create'
'created_at' : '2014-08-18 18:45:16 UTC'
'updated_at' : '2012-07-21T07:38:22Z'
'username' : 'root'
'key' : 'ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQC58FwqHUbebw2SdT7SP4FxZ0w+lAO/erhy2ylhlcW/tZ3GY3mBu9VeeiSGoGz8hCx80Zrz+aQv28xfFfKlC8XQFpCWwsnWnQqO2Lv9bS8V1fIHgMxOHIt5Vs+9CAWGCCvUOAurjsUDoE2ALIXLDMKnJxcxD13XjWdK54j6ZXDB4syLF0C2PnAQSVY9X7MfCYwtuFmhQhKaBussAXpaVMRHltie3UYSBUUuZaB3J4cg/7TxlmxcNd+ppPRIpSZAB0NI6aOnqoBCpimscO/VpQRJMVLr3XiSYeT6HBiDXWHnIVPfQc03OGcaFqOit6p8lYKMaP/iUQLm+pgpZqrXZ9vB john@localhost'
'id' : 4
}
'key_destroy' : {
'event_name' : 'key_destroy'
'created_at' : '2014-08-18 18:45:16 UTC'
'updated_at' : '2012-07-21T07:38:22Z'
'username' : 'root'
'key' : 'ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQC58FwqHUbebw2SdT7SP4FxZ0w+lAO/erhy2ylhlcW/tZ3GY3mBu9VeeiSGoGz8hCx80Zrz+aQv28xfFfKlC8XQFpCWwsnWnQqO2Lv9bS8V1fIHgMxOHIt5Vs+9CAWGCCvUOAurjsUDoE2ALIXLDMKnJxcxD13XjWdK54j6ZXDB4syLF0C2PnAQSVY9X7MfCYwtuFmhQhKaBussAXpaVMRHltie3UYSBUUuZaB3J4cg/7TxlmxcNd+ppPRIpSZAB0NI6aOnqoBCpimscO/VpQRJMVLr3XiSYeT6HBiDXWHnIVPfQc03OGcaFqOit6p8lYKMaP/iUQLm+pgpZqrXZ9vB john@localhost'
'id' : 4
}
# ----------------
'group_create' : {
'created_at' : '2012-07-21T07:30:54Z'
'updated_at' : '2012-07-21T07:38:22Z'
'event_name' : 'group_create'
'name' : 'StoreCloud'
'owner_email' : 'john@koding.com'
'owner_name' : 'John Smith'
'path' : 'storecloud'
'group_id' : 78
}
'group_destroy' : {
'created_at' : '2012-07-21T07:30:54Z'
'updated_at' : '2012-07-21T07:38:22Z'
'event_name' : 'group_destroy'
'name' : 'StoreCloud'
'owner_email' : 'john@koding.com'
'owner_name' : 'John Smith'
'path' : 'storecloud'
'group_id' : 78
}
# ----------------
'tag_push' : {
'event_name' : 'tag_push'
'before' : '0000000000000000000000000000000000000000'
'after' : '82b3d5ae55f7080f1e6022629cdb57bfae7cccc7'
'ref' : 'refs/tags/v1.0.0'
'checkout_sha' : '5937ac0a7beb003549fc5fd26fc247adbce4a52e'
'user_id' : 1
'user_name' : 'John Smith'
'user_avatar' : 'https://s.gravatar.com/avatar/d4c74594d841139328695756648b6bd6?s=8://s.gravatar.com/avatar/d4c74594d841139328695756648b6bd6?s=80'
'project_id' : 1
'project' : {
'name' : 'Example'
'description' : ''
'web_url' : 'http://example.com/jsmith/example'
'avatar_url' : null
'git_ssh_url' : 'git@example.com:jsmith/example.git'
'git_http_url' : 'http://example.com/jsmith/example.git'
'namespace' : 'Jsmith'
'visibility_level' : 0
'path_with_namespace' : 'jsmith/example'
'default_branch' : 'master'
'homepage' : 'http://example.com/jsmith/example'
'url' : 'git@example.com:jsmith/example.git'
'ssh_url' : 'git@example.com:jsmith/example.git'
'http_url' : 'http://example.com/jsmith/example.git'
}
'repository' : {
'name' : 'Example'
'url' : 'ssh://git@example.com/jsmith/example.git'
'description' : ''
'homepage' : 'http://example.com/jsmith/example'
'git_http_url' : 'http://example.com/jsmith/example.git'
'git_ssh_url' : 'git@example.com:jsmith/example.git'
'visibility_level' : 0
}
'commits' : []
'total_commits_count' : 0
}
# --------------
'push' : {
'event_name' : 'push'
'before' : '95790bf891e76fee5e1747ab589903a6a1f80f22'
'after' : 'da1560886d4f094c3e6c9ef40349f7d38b5d27d7'
'ref' : 'refs/heads/master'
'checkout_sha' : 'da1560886d4f094c3e6c9ef40349f7d38b5d27d7'
'user_id' : 4
'user_name' : 'John Smith'
'user_email' : 'john@koding.com'
'user_avatar' : 'https://s.gravatar.com/avatar/d4c74594d841139328695756648b6bd6?s=8://s.gravatar.com/avatar/d4c74594d841139328695756648b6bd6?s=80'
'project_id' : 15
'project' : {
'name' : 'Diaspora'
'description' : 'diaspora description'
'web_url' : 'http://example.com/mike/diaspora'
'avatar_url' : null
'git_ssh_url' : 'git@example.com:mike/diaspora.git'
'git_http_url' : 'http://example.com/mike/diaspora.git'
'namespace' : 'Mike'
'visibility_level' : 0
'path_with_namespace' : 'mike/diaspora'
'default_branch' : 'master'
'homepage' : 'http://example.com/mike/diaspora'
'url' : 'git@example.com:mike/diaspora.git'
'ssh_url' : 'git@example.com:mike/diaspora.git'
'http_url' : 'http://example.com/mike/diaspora.git'
}
'repository' : {
'name' : 'Diaspora'
'url' : 'git@example.com:mike/diaspora.git'
'description' : 'diaspora description'
'homepage' : 'http://example.com/mike/diaspora'
'git_http_url' : 'http://example.com/mike/diaspora.git'
'git_ssh_url' : 'git@example.com:mike/diaspora.git'
'visibility_level' : 0
}
'commits' : []
'total_commits_count' : 0
}
}
| 85713 | module.exports = {
'project_create' : {
'created_at' : '2012-07-21T07:30:54Z'
'updated_at' : '2012-07-21T07:38:22Z'
'event_name' : 'project_create'
'name' : 'StoreCloud'
'owner_email' : '<EMAIL>'
'owner_name' : '<NAME>'
'path' : 'storecloud'
'path_with_namespace' : 'jsmith/storecloud'
'project_id' : 74
'project_visibility' : 'private'
}
'project_destroy' : {
'created_at' : '2012-07-21T07:30:58Z'
'updated_at' : '2012-07-21T07:38:22Z'
'event_name' : 'project_destroy'
'name' : 'Underscore'
'owner_email' : '<EMAIL>'
'owner_name' : '<NAME>'
'path' : 'underscore'
'path_with_namespace' : 'jsmith/underscore'
'project_id' : 73
'project_visibility' : 'internal'
}
'project_rename' : {
'created_at' : '2012-07-21T07:30:58Z'
'updated_at' : '2012-07-21T07:38:22Z'
'event_name' : 'project_rename'
'name' : 'Underscore'
'path' : 'underscore'
'path_with_namespace' : 'jsmith/underscore'
'project_id' : 73
'owner_name' : '<NAME>'
'owner_email' : '<EMAIL>'
'project_visibility' : 'internal'
'old_path_with_namespace': 'jsmith/overscore'
}
'project_transfer' : {
'created_at' : '2012-07-21T07:30:58Z'
'updated_at' : '2012-07-21T07:38:22Z'
'event_name' : 'project_transfer'
'name' : 'Underscore'
'path' : 'underscore'
'path_with_namespace' : 'scores/underscore'
'project_id' : 73
'owner_name' : '<NAME>'
'owner_email' : '<EMAIL>'
'project_visibility' : 'internal'
'old_path_with_namespace': 'jsmith/overscore'
}
# ----------------
'user_add_to_team' : {
'created_at' : '2012-07-21T07:30:56Z'
'updated_at' : '2012-07-21T07:38:22Z'
'event_name' : 'user_add_to_team'
'project_access' : 'Master'
'project_id' : 74
'project_name' : 'StoreCloud'
'project_path' : 'storecloud'
'project_path_with_namespace' : 'jsmith/storecloud'
'user_email' : '<EMAIL>'
'user_name' : '<NAME>'
'user_username' : 'johnsmith'
'user_id' : 41
'project_visibility' : 'private'
}
'user_remove_from_team' : {
'created_at' : '2012-07-21T07:30:56Z'
'updated_at' : '2012-07-21T07:38:22Z'
'event_name' : 'user_remove_from_team'
'project_access' : 'Master'
'project_id' : 74
'project_name' : 'StoreCloud'
'project_path' : 'storecloud'
'project_path_with_namespace' : 'jsmith/storecloud'
'user_email' : '<EMAIL>'
'user_name' : '<NAME>'
'user_username' : 'johnsmith'
'user_id' : 41
'project_visibility' : 'private'
}
'user_add_to_group' : {
'created_at' : '2012-07-21T07:30:56Z'
'updated_at' : '2012-07-21T07:38:22Z'
'event_name' : 'user_add_to_group'
'group_access' : 'Master'
'group_id' : 78
'group_name' : 'StoreCloud'
'group_path' : 'storecloud'
'user_email' : '<EMAIL>'
'user_name' : '<NAME>'
'user_username' : 'johnsmith'
'user_id' : 41
}
'user_remove_from_group' : {
'created_at' : '2012-07-21T07:30:56Z'
'updated_at' : '2012-07-21T07:38:22Z'
'event_name' : 'user_remove_from_group'
'group_access' : 'Master'
'group_id' : 78
'group_name' : 'StoreCloud'
'group_path' : 'storecloud'
'user_email' : '<EMAIL>'
'user_name' : '<NAME>'
'user_username' : 'johnsmith'
'user_id' : 41
}
'user_create' : {
'created_at' : '2012-07-21T07:44:07Z'
'updated_at' : '2012-07-21T07:38:22Z'
'email' : '<EMAIL>'
'event_name' : 'user_create'
'name' : '<NAME>'
'username' : 'john'
'user_id' : 41
}
'user_destroy' : {
'created_at' : '2012-07-21T07:44:07Z'
'updated_at' : '2012-07-21T07:38:22Z'
'email' : '<EMAIL>'
'event_name' : 'user_destroy'
'name' : '<NAME>'
'username' : 'john'
'user_id' : 41
}
# ----------------
'key_create' : {
'event_name' : 'key_create'
'created_at' : '2014-08-18 18:45:16 UTC'
'updated_at' : '2012-07-21T07:38:22Z'
'username' : 'root'
'key' : '<KEY> john@localhost'
'id' : 4
}
'key_destroy' : {
'event_name' : 'key_destroy'
'created_at' : '2014-08-18 18:45:16 UTC'
'updated_at' : '2012-07-21T07:38:22Z'
'username' : 'root'
'key' : 'ssh-rsa A<KEY>B john@localhost'
'id' : 4
}
# ----------------
'group_create' : {
'created_at' : '2012-07-21T07:30:54Z'
'updated_at' : '2012-07-21T07:38:22Z'
'event_name' : 'group_create'
'name' : 'StoreCloud'
'owner_email' : '<EMAIL>'
'owner_name' : '<NAME>'
'path' : 'storecloud'
'group_id' : 78
}
'group_destroy' : {
'created_at' : '2012-07-21T07:30:54Z'
'updated_at' : '2012-07-21T07:38:22Z'
'event_name' : 'group_destroy'
'name' : 'StoreCloud'
'owner_email' : '<EMAIL>'
'owner_name' : '<NAME>'
'path' : 'storecloud'
'group_id' : 78
}
# ----------------
'tag_push' : {
'event_name' : 'tag_push'
'before' : '0000000000000000000000000000000000000000'
'after' : '82b3d5ae55f7080f1e6022629cdb57bfae7cccc7'
'ref' : 'refs/tags/v1.0.0'
'checkout_sha' : '5937ac0a7beb003549fc5fd26fc247adbce4a52e'
'user_id' : 1
'user_name' : '<NAME>'
'user_avatar' : 'https://s.gravatar.com/avatar/d4c74594d841139328695756648b6bd6?s=8://s.gravatar.com/avatar/d4c74594d841139328695756648b6bd6?s=80'
'project_id' : 1
'project' : {
'name' : '<NAME>'
'description' : ''
'web_url' : 'http://example.com/jsmith/example'
'avatar_url' : null
'git_ssh_url' : '<EMAIL>:jsmith/example.git'
'git_http_url' : 'http://example.com/jsmith/example.git'
'namespace' : 'Jsmith'
'visibility_level' : 0
'path_with_namespace' : 'jsmith/example'
'default_branch' : 'master'
'homepage' : 'http://example.com/jsmith/example'
'url' : '<EMAIL>:jsmith/example.git'
'ssh_url' : '<EMAIL>:jsmith/example.git'
'http_url' : 'http://example.com/jsmith/example.git'
}
'repository' : {
'name' : '<NAME>'
'url' : 'ssh://git@example.com/jsmith/example.git'
'description' : ''
'homepage' : 'http://example.com/jsmith/example'
'git_http_url' : 'http://example.com/jsmith/example.git'
'git_ssh_url' : '<EMAIL>:jsmith/example.git'
'visibility_level' : 0
}
'commits' : []
'total_commits_count' : 0
}
# --------------
'push' : {
'event_name' : 'push'
'before' : '95790bf891e76fee5e1747ab589903a6a1f80f22'
'after' : 'da1560886d4f094c3e6c9ef40349f7d38b5d27d7'
'ref' : 'refs/heads/master'
'checkout_sha' : 'da1560886d4f094c3e6c9ef40349f7d38b5d27d7'
'user_id' : 4
'user_name' : '<NAME>'
'user_email' : '<EMAIL>'
'user_avatar' : 'https://s.gravatar.com/avatar/d4c74594d841139328695756648b6bd6?s=8://s.gravatar.com/avatar/d4c74594d841139328695756648b6bd6?s=80'
'project_id' : 15
'project' : {
'name' : '<NAME>'
'description' : 'diaspora description'
'web_url' : 'http://example.com/mike/diaspora'
'avatar_url' : null
'git_ssh_url' : 'git@example.com:mike/diaspora.git'
'git_http_url' : 'http://example.com/mike/diaspora.git'
'namespace' : 'Mike'
'visibility_level' : 0
'path_with_namespace' : 'mike/diaspora'
'default_branch' : 'master'
'homepage' : 'http://example.com/mike/diaspora'
'url' : '<EMAIL>:mike/diaspora.git'
'ssh_url' : 'git<EMAIL>:mike/diaspora.git'
'http_url' : 'http://example.com/mike/diaspora.git'
}
'repository' : {
'name' : '<NAME>'
'url' : '<EMAIL>:mike/diaspora.git'
'description' : 'diaspora description'
'homepage' : 'http://example.com/mike/diaspora'
'git_http_url' : 'http://example.com/mike/diaspora.git'
'git_ssh_url' : '<EMAIL>:mike/diaspora.git'
'visibility_level' : 0
}
'commits' : []
'total_commits_count' : 0
}
}
| true | module.exports = {
'project_create' : {
'created_at' : '2012-07-21T07:30:54Z'
'updated_at' : '2012-07-21T07:38:22Z'
'event_name' : 'project_create'
'name' : 'StoreCloud'
'owner_email' : 'PI:EMAIL:<EMAIL>END_PI'
'owner_name' : 'PI:NAME:<NAME>END_PI'
'path' : 'storecloud'
'path_with_namespace' : 'jsmith/storecloud'
'project_id' : 74
'project_visibility' : 'private'
}
'project_destroy' : {
'created_at' : '2012-07-21T07:30:58Z'
'updated_at' : '2012-07-21T07:38:22Z'
'event_name' : 'project_destroy'
'name' : 'Underscore'
'owner_email' : 'PI:EMAIL:<EMAIL>END_PI'
'owner_name' : 'PI:NAME:<NAME>END_PI'
'path' : 'underscore'
'path_with_namespace' : 'jsmith/underscore'
'project_id' : 73
'project_visibility' : 'internal'
}
'project_rename' : {
'created_at' : '2012-07-21T07:30:58Z'
'updated_at' : '2012-07-21T07:38:22Z'
'event_name' : 'project_rename'
'name' : 'Underscore'
'path' : 'underscore'
'path_with_namespace' : 'jsmith/underscore'
'project_id' : 73
'owner_name' : 'PI:NAME:<NAME>END_PI'
'owner_email' : 'PI:EMAIL:<EMAIL>END_PI'
'project_visibility' : 'internal'
'old_path_with_namespace': 'jsmith/overscore'
}
'project_transfer' : {
'created_at' : '2012-07-21T07:30:58Z'
'updated_at' : '2012-07-21T07:38:22Z'
'event_name' : 'project_transfer'
'name' : 'Underscore'
'path' : 'underscore'
'path_with_namespace' : 'scores/underscore'
'project_id' : 73
'owner_name' : 'PI:NAME:<NAME>END_PI'
'owner_email' : 'PI:EMAIL:<EMAIL>END_PI'
'project_visibility' : 'internal'
'old_path_with_namespace': 'jsmith/overscore'
}
# ----------------
'user_add_to_team' : {
'created_at' : '2012-07-21T07:30:56Z'
'updated_at' : '2012-07-21T07:38:22Z'
'event_name' : 'user_add_to_team'
'project_access' : 'Master'
'project_id' : 74
'project_name' : 'StoreCloud'
'project_path' : 'storecloud'
'project_path_with_namespace' : 'jsmith/storecloud'
'user_email' : 'PI:EMAIL:<EMAIL>END_PI'
'user_name' : 'PI:NAME:<NAME>END_PI'
'user_username' : 'johnsmith'
'user_id' : 41
'project_visibility' : 'private'
}
'user_remove_from_team' : {
'created_at' : '2012-07-21T07:30:56Z'
'updated_at' : '2012-07-21T07:38:22Z'
'event_name' : 'user_remove_from_team'
'project_access' : 'Master'
'project_id' : 74
'project_name' : 'StoreCloud'
'project_path' : 'storecloud'
'project_path_with_namespace' : 'jsmith/storecloud'
'user_email' : 'PI:EMAIL:<EMAIL>END_PI'
'user_name' : 'PI:NAME:<NAME>END_PI'
'user_username' : 'johnsmith'
'user_id' : 41
'project_visibility' : 'private'
}
'user_add_to_group' : {
'created_at' : '2012-07-21T07:30:56Z'
'updated_at' : '2012-07-21T07:38:22Z'
'event_name' : 'user_add_to_group'
'group_access' : 'Master'
'group_id' : 78
'group_name' : 'StoreCloud'
'group_path' : 'storecloud'
'user_email' : 'PI:EMAIL:<EMAIL>END_PI'
'user_name' : 'PI:NAME:<NAME>END_PI'
'user_username' : 'johnsmith'
'user_id' : 41
}
'user_remove_from_group' : {
'created_at' : '2012-07-21T07:30:56Z'
'updated_at' : '2012-07-21T07:38:22Z'
'event_name' : 'user_remove_from_group'
'group_access' : 'Master'
'group_id' : 78
'group_name' : 'StoreCloud'
'group_path' : 'storecloud'
'user_email' : 'PI:EMAIL:<EMAIL>END_PI'
'user_name' : 'PI:NAME:<NAME>END_PI'
'user_username' : 'johnsmith'
'user_id' : 41
}
'user_create' : {
'created_at' : '2012-07-21T07:44:07Z'
'updated_at' : '2012-07-21T07:38:22Z'
'email' : 'PI:EMAIL:<EMAIL>END_PI'
'event_name' : 'user_create'
'name' : 'PI:NAME:<NAME>END_PI'
'username' : 'john'
'user_id' : 41
}
'user_destroy' : {
'created_at' : '2012-07-21T07:44:07Z'
'updated_at' : '2012-07-21T07:38:22Z'
'email' : 'PI:EMAIL:<EMAIL>END_PI'
'event_name' : 'user_destroy'
'name' : 'PI:NAME:<NAME>END_PI'
'username' : 'john'
'user_id' : 41
}
# ----------------
'key_create' : {
'event_name' : 'key_create'
'created_at' : '2014-08-18 18:45:16 UTC'
'updated_at' : '2012-07-21T07:38:22Z'
'username' : 'root'
'key' : 'PI:KEY:<KEY>END_PI john@localhost'
'id' : 4
}
'key_destroy' : {
'event_name' : 'key_destroy'
'created_at' : '2014-08-18 18:45:16 UTC'
'updated_at' : '2012-07-21T07:38:22Z'
'username' : 'root'
'key' : 'ssh-rsa API:KEY:<KEY>END_PIB john@localhost'
'id' : 4
}
# ----------------
'group_create' : {
'created_at' : '2012-07-21T07:30:54Z'
'updated_at' : '2012-07-21T07:38:22Z'
'event_name' : 'group_create'
'name' : 'StoreCloud'
'owner_email' : 'PI:EMAIL:<EMAIL>END_PI'
'owner_name' : 'PI:NAME:<NAME>END_PI'
'path' : 'storecloud'
'group_id' : 78
}
'group_destroy' : {
'created_at' : '2012-07-21T07:30:54Z'
'updated_at' : '2012-07-21T07:38:22Z'
'event_name' : 'group_destroy'
'name' : 'StoreCloud'
'owner_email' : 'PI:EMAIL:<EMAIL>END_PI'
'owner_name' : 'PI:NAME:<NAME>END_PI'
'path' : 'storecloud'
'group_id' : 78
}
# ----------------
'tag_push' : {
'event_name' : 'tag_push'
'before' : '0000000000000000000000000000000000000000'
'after' : '82b3d5ae55f7080f1e6022629cdb57bfae7cccc7'
'ref' : 'refs/tags/v1.0.0'
'checkout_sha' : '5937ac0a7beb003549fc5fd26fc247adbce4a52e'
'user_id' : 1
'user_name' : 'PI:NAME:<NAME>END_PI'
'user_avatar' : 'https://s.gravatar.com/avatar/d4c74594d841139328695756648b6bd6?s=8://s.gravatar.com/avatar/d4c74594d841139328695756648b6bd6?s=80'
'project_id' : 1
'project' : {
'name' : 'PI:NAME:<NAME>END_PI'
'description' : ''
'web_url' : 'http://example.com/jsmith/example'
'avatar_url' : null
'git_ssh_url' : 'PI:EMAIL:<EMAIL>END_PI:jsmith/example.git'
'git_http_url' : 'http://example.com/jsmith/example.git'
'namespace' : 'Jsmith'
'visibility_level' : 0
'path_with_namespace' : 'jsmith/example'
'default_branch' : 'master'
'homepage' : 'http://example.com/jsmith/example'
'url' : 'PI:EMAIL:<EMAIL>END_PI:jsmith/example.git'
'ssh_url' : 'PI:EMAIL:<EMAIL>END_PI:jsmith/example.git'
'http_url' : 'http://example.com/jsmith/example.git'
}
'repository' : {
'name' : 'PI:NAME:<NAME>END_PI'
'url' : 'ssh://git@example.com/jsmith/example.git'
'description' : ''
'homepage' : 'http://example.com/jsmith/example'
'git_http_url' : 'http://example.com/jsmith/example.git'
'git_ssh_url' : 'PI:EMAIL:<EMAIL>END_PI:jsmith/example.git'
'visibility_level' : 0
}
'commits' : []
'total_commits_count' : 0
}
# --------------
'push' : {
'event_name' : 'push'
'before' : '95790bf891e76fee5e1747ab589903a6a1f80f22'
'after' : 'da1560886d4f094c3e6c9ef40349f7d38b5d27d7'
'ref' : 'refs/heads/master'
'checkout_sha' : 'da1560886d4f094c3e6c9ef40349f7d38b5d27d7'
'user_id' : 4
'user_name' : 'PI:NAME:<NAME>END_PI'
'user_email' : 'PI:EMAIL:<EMAIL>END_PI'
'user_avatar' : 'https://s.gravatar.com/avatar/d4c74594d841139328695756648b6bd6?s=8://s.gravatar.com/avatar/d4c74594d841139328695756648b6bd6?s=80'
'project_id' : 15
'project' : {
'name' : 'PI:NAME:<NAME>END_PI'
'description' : 'diaspora description'
'web_url' : 'http://example.com/mike/diaspora'
'avatar_url' : null
'git_ssh_url' : 'git@example.com:mike/diaspora.git'
'git_http_url' : 'http://example.com/mike/diaspora.git'
'namespace' : 'Mike'
'visibility_level' : 0
'path_with_namespace' : 'mike/diaspora'
'default_branch' : 'master'
'homepage' : 'http://example.com/mike/diaspora'
'url' : 'PI:EMAIL:<EMAIL>END_PI:mike/diaspora.git'
'ssh_url' : 'gitPI:EMAIL:<EMAIL>END_PI:mike/diaspora.git'
'http_url' : 'http://example.com/mike/diaspora.git'
}
'repository' : {
'name' : 'PI:NAME:<NAME>END_PI'
'url' : 'PI:EMAIL:<EMAIL>END_PI:mike/diaspora.git'
'description' : 'diaspora description'
'homepage' : 'http://example.com/mike/diaspora'
'git_http_url' : 'http://example.com/mike/diaspora.git'
'git_ssh_url' : 'PI:EMAIL:<EMAIL>END_PI:mike/diaspora.git'
'visibility_level' : 0
}
'commits' : []
'total_commits_count' : 0
}
}
|
[
{
"context": "entation of the List abstract data type.\n @author Mads Hartmann Jensen (mads379@gmail.com)\n### \n\nmugs.provide('mugs.List",
"end": 111,
"score": 0.9998427629470825,
"start": 91,
"tag": "NAME",
"value": "Mads Hartmann Jensen"
},
{
"context": "stract data type.\n @au... | src/List.coffee | mads-hartmann/mugs | 1 | ###*
@fileoverview Contains the implementation of the List abstract data type.
@author Mads Hartmann Jensen (mads379@gmail.com)
###
mugs.provide('mugs.List')
mugs.require("mugs.Some")
mugs.require("mugs.None")
###*
List provides the implementation of the abstract data type List based on a Singly-Linked list. The
list contains the following operations:
<pre>
append(item) O(n)
prepend(item) O(1)
update(index, item ) O(n)
get(index ) O(n)
remove(index) O(n)
foldLeft(seed)(f) O(n*O(f))
foldRight(seed)(f) O(n*O(f))
forEach(f) O(n*O(f))
insert(item) O(n)
last() O(n)
first() O(1)
reverse() O(n)
appendAll(items) O(n)*items
prependAll(items) O(items)
</pre>
@public
@class List provides the implementation of the abstract data type List based on a Singly-Linked list
@augments mugs.Indexed
@example
var list = new mugs.List([1,2,3,4,5,6,7,8,9,10]);
@argument items An array of items to construct the List from
###
mugs.List = (items) ->
if not items? || items.length == 0
this.head = () -> throw new Error("Can't get head of empty List")
this.tail = () -> throw new Error("Can't get tail of empty List")
this.isEmpty = () -> true
else
[x, xs...] = items
this.head = () -> x
this.tail = () -> new mugs.List(xs)
this.isEmpty = () -> false
this
mugs.List.prototype = new mugs.Indexed()
###*
Helper method to construct a list from a value and another list
@private
###
mugs.List.prototype.cons = (head, tail) ->
l = new mugs.List([head])
l.tail = () -> tail
return l
###*
Applies a binary operator on all items of this list going left to right and ending with the
seed value. This is a curried function that takes a seed value which returns a function that
takes a function which will then be applied to the items. The function is binary where
the first parameter is the value of the fold so far and the second is the current item.
@example
new mugs.List([1,2,3,4,5]).foldLeft(0)(function(acc,current){ return acc+current })
// returns 15 (the sum of the items in the list)
@param {*} seed The value to use when the list is empty
@return {function(function(*, *):*):*} A function which takes a binary function
###
mugs.List.prototype.foldLeft = (seed) -> (f) =>
__foldLeft = (acc, xs) ->
if (xs.isEmpty())
acc
else
__foldLeft( f(acc, xs.head()), xs.tail())
__foldLeft(seed,this)
###*
Applies a binary operator on all items of this list going right to left and ending with the
seed value. This is a curried function that takes a seed value which returns a function that
takes a function which will then be applied to the items. The function is binary where
the first parameter is the value of the fold so far and the second is the current item.
@example
new mugs.List([1,2,3,4,5]).foldRight(0)(function(acc,current){ return acc+current })
// returns 15 (the sum of the items in the list)
@param {*} seed The value to use when the list is empty
@return {function(function(*, *):*):*} A function which takes a binary function
###
mugs.List.prototype.foldRight = (seed) -> (f) =>
__foldRight = (xs) ->
if (xs.isEmpty())
seed
else
f(__foldRight(xs.tail()), xs.head())
__foldRight(this)
###
---------------------------------------------------------------------------------------------
Collection interface
head(), tail(), and isEmpty() are defined in the constructor
---------------------------------------------------------------------------------------------
###
###*
@private
###
mugs.List.prototype.buildFromArray = (arr) ->
new mugs.List(arr)
###*
Applies function 'f' on each value in the collection. This return nothing and is only invoked
for the side-effects of f.
@param f The unary function to apply on each element in the collection.
@see mugs.Collection
###
mugs.List.prototype.forEach = ( f ) ->
if !this.isEmpty()
f(this.head())
this.tail().forEach(f)
###
---------------------------------------------------------------------------------------------
Indexed interface
---------------------------------------------------------------------------------------------
###
###*
Update the value with the given index.
@param {number} index The index of the item to update
@param {*} item The item to replace with the current item
@return {List} A new list with the updated value.
###
mugs.List.prototype.update = (index, item) ->
if index < 0
throw new Error("Index out of bounds by #{index}")
else if (index == 0)
this.cons(item, this.tail())
else
this.cons(this.head(), this.tail().update(index-1,item))
###*
Return an Option containing the nth item in the list.
@param {number} index The index of the item to get
@return {mugs.Some|mugs.None} mugs.Some(item) is it exists, otherwise mugs.None
###
mugs.List.prototype.get = (index) ->
if index < 0 || this.isEmpty()
new mugs.None()
new mugs.None()
else if (index == 0)
new mugs.Some(this.head())
else
this.tail().get(index-1)
###*
Removes the item at the given index. Runs in O(n) time.
@param {number} index The index of the item to remove
@return {List} A new list without the item at the given index
###
mugs.List.prototype.removeAt = (index) ->
if index == 0
if !this.tail().isEmpty()
this.cons(this.tail().head(), this.tail().tail())
else
new mugs.List()
else
this.cons(this.head(), this.tail().removeAt(index-1))
###
---------------------------------------------------------------------------------------------
Extensible interface
---------------------------------------------------------------------------------------------
###
###*
Inserts a new item to the end of the List. Equivalent to append. This is needed so a List can be treated
as an Extensible collection. runs in O(mugs.List.append)
@param item The item to add to the end of the List
@return A new list with the item appended to the end
###
mugs.List.prototype.insert = (item) ->
this.append(item)
###
Removes an item from the List. Runs in O(n).
@param item The item to remove from the List.
###
mugs.List.prototype.remove = (item) ->
if this.isEmpty()
this
else if this.head() == item
if this.tail().isEmpty()
new mugs.List([])
else
this.cons(this.tail().head(), this.tail().tail())
else
this.cons(this.head(), this.tail().remove(item))
###
---------------------------------------------------------------------------------------------
Sequenced interface
---------------------------------------------------------------------------------------------
###
###*
Returns a mugs.Some with the last item in the collection if it's non-empty.
otherwise, mugs.None
@return a mugs.Some with the last item in the collection if it's non-empty.
otherwise, mugs.None
###
mugs.List.prototype.last = () ->
current = this
if current.isEmpty()
return new mugs.None
while !current.isEmpty()
item = current.head()
current = current.tail()
return new mugs.Some(item)
###*
Returns a mugs.Some with the first item in the collection if it's non-empty.
otherwise, mugs.None
@return a mugs.Some with the first item in the collection if it's non-empty.
otherwise, mugs.None
###
mugs.List.prototype.first = () ->
if this.isEmpty()
new mugs.None()
else
new mugs.Some(this.head())
###*
Create a new list by appending this value
@param item The item to append to the List
@return A new list containing all the items of the old with followed by the item
###
mugs.List.prototype.append = (item) ->
if (this.isEmpty())
new mugs.List([item])
else
this.cons(this.head(), this.tail().append(item))
###*
Create a new list by prepending this value
@param {*} item The item to prepend to the List
@return {List} A new list containing all the items of the old list
prepended with the item
###
mugs.List.prototype.prepend = (item) ->
this.cons(item,this)
###*
Returns a new list with the items in reversed order.
@return A new list with the items in reversed order
###
mugs.List.prototype.reverse = () ->
result = new mugs.List()
rest = this
while (!rest.isEmpty())
result = result.prepend(rest.head())
rest = rest.tail()
result
###*
Creates a new list with the items appended
@example
new mugs.List([1,2,3]).appendAll([4,5,6]);
// returns a list with the item 1,2,3,4,5,6
@param items An array with the items to append to this list.
@return A new list with the items appended
###
mugs.List.prototype.appendAll = (items) ->
if (this.isEmpty())
new mugs.List(items)
else
this.cons(this.head(), this.tail().appendAll(items))
###*
Creates a new list by copying all of the items in the argument 'list'
before of 'this' list
@example
new mugs.List([4,5,6]).prependAll(new mugs.List([1,2,3]));
// returns a list with the items 1,2,3,4,5,6
@param {List} list The list to prepend to this list.
@return {List} A new list containing the items of the prepended List
and the items of the original List.
###
mugs.List.prototype.prependAll = (items) ->
if this.isEmpty()
new mugs.List(items)
else
if items.length == 0
this
else
head = items.shift()
this.cons(head, this.prependAll(items))
| 144560 | ###*
@fileoverview Contains the implementation of the List abstract data type.
@author <NAME> (<EMAIL>)
###
mugs.provide('mugs.List')
mugs.require("mugs.Some")
mugs.require("mugs.None")
###*
List provides the implementation of the abstract data type List based on a Singly-Linked list. The
list contains the following operations:
<pre>
append(item) O(n)
prepend(item) O(1)
update(index, item ) O(n)
get(index ) O(n)
remove(index) O(n)
foldLeft(seed)(f) O(n*O(f))
foldRight(seed)(f) O(n*O(f))
forEach(f) O(n*O(f))
insert(item) O(n)
last() O(n)
first() O(1)
reverse() O(n)
appendAll(items) O(n)*items
prependAll(items) O(items)
</pre>
@public
@class List provides the implementation of the abstract data type List based on a Singly-Linked list
@augments mugs.Indexed
@example
var list = new mugs.List([1,2,3,4,5,6,7,8,9,10]);
@argument items An array of items to construct the List from
###
mugs.List = (items) ->
if not items? || items.length == 0
this.head = () -> throw new Error("Can't get head of empty List")
this.tail = () -> throw new Error("Can't get tail of empty List")
this.isEmpty = () -> true
else
[x, xs...] = items
this.head = () -> x
this.tail = () -> new mugs.List(xs)
this.isEmpty = () -> false
this
mugs.List.prototype = new mugs.Indexed()
###*
Helper method to construct a list from a value and another list
@private
###
mugs.List.prototype.cons = (head, tail) ->
l = new mugs.List([head])
l.tail = () -> tail
return l
###*
Applies a binary operator on all items of this list going left to right and ending with the
seed value. This is a curried function that takes a seed value which returns a function that
takes a function which will then be applied to the items. The function is binary where
the first parameter is the value of the fold so far and the second is the current item.
@example
new mugs.List([1,2,3,4,5]).foldLeft(0)(function(acc,current){ return acc+current })
// returns 15 (the sum of the items in the list)
@param {*} seed The value to use when the list is empty
@return {function(function(*, *):*):*} A function which takes a binary function
###
mugs.List.prototype.foldLeft = (seed) -> (f) =>
__foldLeft = (acc, xs) ->
if (xs.isEmpty())
acc
else
__foldLeft( f(acc, xs.head()), xs.tail())
__foldLeft(seed,this)
###*
Applies a binary operator on all items of this list going right to left and ending with the
seed value. This is a curried function that takes a seed value which returns a function that
takes a function which will then be applied to the items. The function is binary where
the first parameter is the value of the fold so far and the second is the current item.
@example
new mugs.List([1,2,3,4,5]).foldRight(0)(function(acc,current){ return acc+current })
// returns 15 (the sum of the items in the list)
@param {*} seed The value to use when the list is empty
@return {function(function(*, *):*):*} A function which takes a binary function
###
mugs.List.prototype.foldRight = (seed) -> (f) =>
__foldRight = (xs) ->
if (xs.isEmpty())
seed
else
f(__foldRight(xs.tail()), xs.head())
__foldRight(this)
###
---------------------------------------------------------------------------------------------
Collection interface
head(), tail(), and isEmpty() are defined in the constructor
---------------------------------------------------------------------------------------------
###
###*
@private
###
mugs.List.prototype.buildFromArray = (arr) ->
new mugs.List(arr)
###*
Applies function 'f' on each value in the collection. This return nothing and is only invoked
for the side-effects of f.
@param f The unary function to apply on each element in the collection.
@see mugs.Collection
###
mugs.List.prototype.forEach = ( f ) ->
if !this.isEmpty()
f(this.head())
this.tail().forEach(f)
###
---------------------------------------------------------------------------------------------
Indexed interface
---------------------------------------------------------------------------------------------
###
###*
Update the value with the given index.
@param {number} index The index of the item to update
@param {*} item The item to replace with the current item
@return {List} A new list with the updated value.
###
mugs.List.prototype.update = (index, item) ->
if index < 0
throw new Error("Index out of bounds by #{index}")
else if (index == 0)
this.cons(item, this.tail())
else
this.cons(this.head(), this.tail().update(index-1,item))
###*
Return an Option containing the nth item in the list.
@param {number} index The index of the item to get
@return {mugs.Some|mugs.None} mugs.Some(item) is it exists, otherwise mugs.None
###
mugs.List.prototype.get = (index) ->
if index < 0 || this.isEmpty()
new mugs.None()
new mugs.None()
else if (index == 0)
new mugs.Some(this.head())
else
this.tail().get(index-1)
###*
Removes the item at the given index. Runs in O(n) time.
@param {number} index The index of the item to remove
@return {List} A new list without the item at the given index
###
mugs.List.prototype.removeAt = (index) ->
if index == 0
if !this.tail().isEmpty()
this.cons(this.tail().head(), this.tail().tail())
else
new mugs.List()
else
this.cons(this.head(), this.tail().removeAt(index-1))
###
---------------------------------------------------------------------------------------------
Extensible interface
---------------------------------------------------------------------------------------------
###
###*
Inserts a new item to the end of the List. Equivalent to append. This is needed so a List can be treated
as an Extensible collection. runs in O(mugs.List.append)
@param item The item to add to the end of the List
@return A new list with the item appended to the end
###
mugs.List.prototype.insert = (item) ->
this.append(item)
###
Removes an item from the List. Runs in O(n).
@param item The item to remove from the List.
###
mugs.List.prototype.remove = (item) ->
if this.isEmpty()
this
else if this.head() == item
if this.tail().isEmpty()
new mugs.List([])
else
this.cons(this.tail().head(), this.tail().tail())
else
this.cons(this.head(), this.tail().remove(item))
###
---------------------------------------------------------------------------------------------
Sequenced interface
---------------------------------------------------------------------------------------------
###
###*
Returns a mugs.Some with the last item in the collection if it's non-empty.
otherwise, mugs.None
@return a mugs.Some with the last item in the collection if it's non-empty.
otherwise, mugs.None
###
mugs.List.prototype.last = () ->
current = this
if current.isEmpty()
return new mugs.None
while !current.isEmpty()
item = current.head()
current = current.tail()
return new mugs.Some(item)
###*
Returns a mugs.Some with the first item in the collection if it's non-empty.
otherwise, mugs.None
@return a mugs.Some with the first item in the collection if it's non-empty.
otherwise, mugs.None
###
mugs.List.prototype.first = () ->
if this.isEmpty()
new mugs.None()
else
new mugs.Some(this.head())
###*
Create a new list by appending this value
@param item The item to append to the List
@return A new list containing all the items of the old with followed by the item
###
mugs.List.prototype.append = (item) ->
if (this.isEmpty())
new mugs.List([item])
else
this.cons(this.head(), this.tail().append(item))
###*
Create a new list by prepending this value
@param {*} item The item to prepend to the List
@return {List} A new list containing all the items of the old list
prepended with the item
###
mugs.List.prototype.prepend = (item) ->
this.cons(item,this)
###*
Returns a new list with the items in reversed order.
@return A new list with the items in reversed order
###
mugs.List.prototype.reverse = () ->
result = new mugs.List()
rest = this
while (!rest.isEmpty())
result = result.prepend(rest.head())
rest = rest.tail()
result
###*
Creates a new list with the items appended
@example
new mugs.List([1,2,3]).appendAll([4,5,6]);
// returns a list with the item 1,2,3,4,5,6
@param items An array with the items to append to this list.
@return A new list with the items appended
###
mugs.List.prototype.appendAll = (items) ->
if (this.isEmpty())
new mugs.List(items)
else
this.cons(this.head(), this.tail().appendAll(items))
###*
Creates a new list by copying all of the items in the argument 'list'
before of 'this' list
@example
new mugs.List([4,5,6]).prependAll(new mugs.List([1,2,3]));
// returns a list with the items 1,2,3,4,5,6
@param {List} list The list to prepend to this list.
@return {List} A new list containing the items of the prepended List
and the items of the original List.
###
mugs.List.prototype.prependAll = (items) ->
if this.isEmpty()
new mugs.List(items)
else
if items.length == 0
this
else
head = items.shift()
this.cons(head, this.prependAll(items))
| true | ###*
@fileoverview Contains the implementation of the List abstract data type.
@author PI:NAME:<NAME>END_PI (PI:EMAIL:<EMAIL>END_PI)
###
mugs.provide('mugs.List')
mugs.require("mugs.Some")
mugs.require("mugs.None")
###*
List provides the implementation of the abstract data type List based on a Singly-Linked list. The
list contains the following operations:
<pre>
append(item) O(n)
prepend(item) O(1)
update(index, item ) O(n)
get(index ) O(n)
remove(index) O(n)
foldLeft(seed)(f) O(n*O(f))
foldRight(seed)(f) O(n*O(f))
forEach(f) O(n*O(f))
insert(item) O(n)
last() O(n)
first() O(1)
reverse() O(n)
appendAll(items) O(n)*items
prependAll(items) O(items)
</pre>
@public
@class List provides the implementation of the abstract data type List based on a Singly-Linked list
@augments mugs.Indexed
@example
var list = new mugs.List([1,2,3,4,5,6,7,8,9,10]);
@argument items An array of items to construct the List from
###
mugs.List = (items) ->
if not items? || items.length == 0
this.head = () -> throw new Error("Can't get head of empty List")
this.tail = () -> throw new Error("Can't get tail of empty List")
this.isEmpty = () -> true
else
[x, xs...] = items
this.head = () -> x
this.tail = () -> new mugs.List(xs)
this.isEmpty = () -> false
this
mugs.List.prototype = new mugs.Indexed()
###*
Helper method to construct a list from a value and another list
@private
###
mugs.List.prototype.cons = (head, tail) ->
l = new mugs.List([head])
l.tail = () -> tail
return l
###*
Applies a binary operator on all items of this list going left to right and ending with the
seed value. This is a curried function that takes a seed value which returns a function that
takes a function which will then be applied to the items. The function is binary where
the first parameter is the value of the fold so far and the second is the current item.
@example
new mugs.List([1,2,3,4,5]).foldLeft(0)(function(acc,current){ return acc+current })
// returns 15 (the sum of the items in the list)
@param {*} seed The value to use when the list is empty
@return {function(function(*, *):*):*} A function which takes a binary function
###
mugs.List.prototype.foldLeft = (seed) -> (f) =>
__foldLeft = (acc, xs) ->
if (xs.isEmpty())
acc
else
__foldLeft( f(acc, xs.head()), xs.tail())
__foldLeft(seed,this)
###*
Applies a binary operator on all items of this list going right to left and ending with the
seed value. This is a curried function that takes a seed value which returns a function that
takes a function which will then be applied to the items. The function is binary where
the first parameter is the value of the fold so far and the second is the current item.
@example
new mugs.List([1,2,3,4,5]).foldRight(0)(function(acc,current){ return acc+current })
// returns 15 (the sum of the items in the list)
@param {*} seed The value to use when the list is empty
@return {function(function(*, *):*):*} A function which takes a binary function
###
mugs.List.prototype.foldRight = (seed) -> (f) =>
__foldRight = (xs) ->
if (xs.isEmpty())
seed
else
f(__foldRight(xs.tail()), xs.head())
__foldRight(this)
###
---------------------------------------------------------------------------------------------
Collection interface
head(), tail(), and isEmpty() are defined in the constructor
---------------------------------------------------------------------------------------------
###
###*
@private
###
mugs.List.prototype.buildFromArray = (arr) ->
new mugs.List(arr)
###*
Applies function 'f' on each value in the collection. This return nothing and is only invoked
for the side-effects of f.
@param f The unary function to apply on each element in the collection.
@see mugs.Collection
###
mugs.List.prototype.forEach = ( f ) ->
if !this.isEmpty()
f(this.head())
this.tail().forEach(f)
###
---------------------------------------------------------------------------------------------
Indexed interface
---------------------------------------------------------------------------------------------
###
###*
Update the value with the given index.
@param {number} index The index of the item to update
@param {*} item The item to replace with the current item
@return {List} A new list with the updated value.
###
mugs.List.prototype.update = (index, item) ->
if index < 0
throw new Error("Index out of bounds by #{index}")
else if (index == 0)
this.cons(item, this.tail())
else
this.cons(this.head(), this.tail().update(index-1,item))
###*
Return an Option containing the nth item in the list.
@param {number} index The index of the item to get
@return {mugs.Some|mugs.None} mugs.Some(item) is it exists, otherwise mugs.None
###
mugs.List.prototype.get = (index) ->
if index < 0 || this.isEmpty()
new mugs.None()
new mugs.None()
else if (index == 0)
new mugs.Some(this.head())
else
this.tail().get(index-1)
###*
Removes the item at the given index. Runs in O(n) time.
@param {number} index The index of the item to remove
@return {List} A new list without the item at the given index
###
mugs.List.prototype.removeAt = (index) ->
if index == 0
if !this.tail().isEmpty()
this.cons(this.tail().head(), this.tail().tail())
else
new mugs.List()
else
this.cons(this.head(), this.tail().removeAt(index-1))
###
---------------------------------------------------------------------------------------------
Extensible interface
---------------------------------------------------------------------------------------------
###
###*
Inserts a new item to the end of the List. Equivalent to append. This is needed so a List can be treated
as an Extensible collection. runs in O(mugs.List.append)
@param item The item to add to the end of the List
@return A new list with the item appended to the end
###
mugs.List.prototype.insert = (item) ->
this.append(item)
###
Removes an item from the List. Runs in O(n).
@param item The item to remove from the List.
###
mugs.List.prototype.remove = (item) ->
if this.isEmpty()
this
else if this.head() == item
if this.tail().isEmpty()
new mugs.List([])
else
this.cons(this.tail().head(), this.tail().tail())
else
this.cons(this.head(), this.tail().remove(item))
###
---------------------------------------------------------------------------------------------
Sequenced interface
---------------------------------------------------------------------------------------------
###
###*
Returns a mugs.Some with the last item in the collection if it's non-empty.
otherwise, mugs.None
@return a mugs.Some with the last item in the collection if it's non-empty.
otherwise, mugs.None
###
mugs.List.prototype.last = () ->
current = this
if current.isEmpty()
return new mugs.None
while !current.isEmpty()
item = current.head()
current = current.tail()
return new mugs.Some(item)
###*
Returns a mugs.Some with the first item in the collection if it's non-empty.
otherwise, mugs.None
@return a mugs.Some with the first item in the collection if it's non-empty.
otherwise, mugs.None
###
mugs.List.prototype.first = () ->
if this.isEmpty()
new mugs.None()
else
new mugs.Some(this.head())
###*
Create a new list by appending this value
@param item The item to append to the List
@return A new list containing all the items of the old with followed by the item
###
mugs.List.prototype.append = (item) ->
if (this.isEmpty())
new mugs.List([item])
else
this.cons(this.head(), this.tail().append(item))
###*
Create a new list by prepending this value
@param {*} item The item to prepend to the List
@return {List} A new list containing all the items of the old list
prepended with the item
###
mugs.List.prototype.prepend = (item) ->
this.cons(item,this)
###*
Returns a new list with the items in reversed order.
@return A new list with the items in reversed order
###
mugs.List.prototype.reverse = () ->
result = new mugs.List()
rest = this
while (!rest.isEmpty())
result = result.prepend(rest.head())
rest = rest.tail()
result
###*
Creates a new list with the items appended
@example
new mugs.List([1,2,3]).appendAll([4,5,6]);
// returns a list with the item 1,2,3,4,5,6
@param items An array with the items to append to this list.
@return A new list with the items appended
###
mugs.List.prototype.appendAll = (items) ->
if (this.isEmpty())
new mugs.List(items)
else
this.cons(this.head(), this.tail().appendAll(items))
###*
Creates a new list by copying all of the items in the argument 'list'
before of 'this' list
@example
new mugs.List([4,5,6]).prependAll(new mugs.List([1,2,3]));
// returns a list with the items 1,2,3,4,5,6
@param {List} list The list to prepend to this list.
@return {List} A new list containing the items of the prepended List
and the items of the original List.
###
mugs.List.prototype.prependAll = (items) ->
if this.isEmpty()
new mugs.List(items)
else
if items.length == 0
this
else
head = items.shift()
this.cons(head, this.prependAll(items))
|
[
{
"context": "io.com\n\nCopyright 2016 Chai Biotechnologies Inc. <info@chaibio.com>\n\nLicensed under the Apache License, Version 2.0 ",
"end": 194,
"score": 0.999919593334198,
"start": 178,
"tag": "EMAIL",
"value": "info@chaibio.com"
}
] | frontend/javascripts/app/directives/status-bar.coffee | MakerButt/chaipcr | 1 | ###
Chai PCR - Software platform for Open qPCR and Chai's Real-Time PCR instruments.
For more information visit http://www.chaibio.com
Copyright 2016 Chai Biotechnologies Inc. <info@chaibio.com>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
###
window.App.directive 'statusBar', [
'Experiment'
'$state'
'TestInProgressHelper'
'$timeout'
'$window'
(Experiment, $state, TestInProgressHelper, $timeout, $window) ->
restrict: 'EA'
replace: true
templateUrl: 'app/views/directives/status-bar.html'
link: ($scope, elem, attrs) ->
experiment_id = null
$document = angular.element(document)
inputFocus = elem.find('input')
stopping = false
$scope.truncate = Experiment.truncateName
onResize = ->
elem.find('.left-content').css('opacity', '0')
$timeout ()->
elem.find('.left-content').css('width', '40%')
right_width = elem.find('.right-content').width() + 10
elem.find('.left-content').css('width', 'calc(100% - ' + right_width + 'px)')
elem.find('.left-content').css('opacity', '1')
, 1000
$scope.show = ->
if $scope.state isnt 'idle' then (!!$scope.status and !!$scope.footer_experiment) else !!$scope.status
getExperiment = (cb) ->
return if !experiment_id
Experiment.get(id: experiment_id).then (data) ->
cb data.experiment
$scope.$watch 'experiment_id', (id) ->
return if !id
getExperiment (exp) ->
$scope.footer_experiment = exp
onResize()
$scope.is_holding = false
$scope.goToTestKit = ->
switch $scope.footer_experiment.guid
when "chai_coronavirus_env_kit"
$state.go('coronavirus-env.experiment-running', {id: $scope.footer_experiment.id})
when "chai_covid19_surv_kit"
$state.go('covid19-surv.experiment-running', {id: $scope.footer_experiment.id})
when "pika_4e_kit", "pika_4e_lp_identification_kit"
$state.go('pika_test.experiment-running', {id: $scope.footer_experiment.id})
$scope.$on 'status:data:updated', (e, data, oldData) ->
return if !data
return if !data.experiment_controller
$scope.state = data.experiment_controller.machine.state
$scope.thermal_state = data.experiment_controller.machine.thermal_state
$scope.oldState = oldData?.experiment_controller?.machine?.state || 'NONE'
if ((($scope.oldState isnt $scope.state or !$scope.footer_experiment))) and experiment_id
getExperiment (exp) ->
$scope.footer_experiment = exp
$scope.status = data
$scope.is_holding = TestInProgressHelper.set_holding(data, exp)
onResize()
else
$scope.status = data
$scope.is_holding = TestInProgressHelper.set_holding(data, $scope.footer_experiment)
$scope.timeRemaining = TestInProgressHelper.timeRemaining(data)
if ($scope.state isnt 'idle' and !experiment_id and data.experiment_controller?.experiment?.id)
experiment_id = data.experiment_controller.experiment.id
getExperiment (exp) ->
$scope.footer_experiment = exp
onResize()
if $scope.state is 'idle' and $scope.oldState isnt 'idle'
$scope.footer_experiment = null
experiment_id = null
$scope.getDuration = ->
return 0 if !$scope?.experiment?.completed_at
Experiment.getExperimentDuration($scope.footer_experiment)
$scope.stopExperiment = ->
stopping = true
Experiment.stopExperiment($scope.footer_experiment.id)
.then ->
$scope.footer_experiment = null
$scope.stop_confirm_show = false
.finally ->
stopping = false
$scope.resumeExperiment = ->
Experiment.resumeExperiment($scope.footer_experiment.id)
$scope.stopConfirm = ->
$scope.stop_confirm_show = true
$timeout ->
inputFocus.focus()
$scope.inputBlur = ->
$timeout ->
$scope.stop_confirm_show = false if !stopping
, 250
]
| 96364 | ###
Chai PCR - Software platform for Open qPCR and Chai's Real-Time PCR instruments.
For more information visit http://www.chaibio.com
Copyright 2016 Chai Biotechnologies Inc. <<EMAIL>>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
###
window.App.directive 'statusBar', [
'Experiment'
'$state'
'TestInProgressHelper'
'$timeout'
'$window'
(Experiment, $state, TestInProgressHelper, $timeout, $window) ->
restrict: 'EA'
replace: true
templateUrl: 'app/views/directives/status-bar.html'
link: ($scope, elem, attrs) ->
experiment_id = null
$document = angular.element(document)
inputFocus = elem.find('input')
stopping = false
$scope.truncate = Experiment.truncateName
onResize = ->
elem.find('.left-content').css('opacity', '0')
$timeout ()->
elem.find('.left-content').css('width', '40%')
right_width = elem.find('.right-content').width() + 10
elem.find('.left-content').css('width', 'calc(100% - ' + right_width + 'px)')
elem.find('.left-content').css('opacity', '1')
, 1000
$scope.show = ->
if $scope.state isnt 'idle' then (!!$scope.status and !!$scope.footer_experiment) else !!$scope.status
getExperiment = (cb) ->
return if !experiment_id
Experiment.get(id: experiment_id).then (data) ->
cb data.experiment
$scope.$watch 'experiment_id', (id) ->
return if !id
getExperiment (exp) ->
$scope.footer_experiment = exp
onResize()
$scope.is_holding = false
$scope.goToTestKit = ->
switch $scope.footer_experiment.guid
when "chai_coronavirus_env_kit"
$state.go('coronavirus-env.experiment-running', {id: $scope.footer_experiment.id})
when "chai_covid19_surv_kit"
$state.go('covid19-surv.experiment-running', {id: $scope.footer_experiment.id})
when "pika_4e_kit", "pika_4e_lp_identification_kit"
$state.go('pika_test.experiment-running', {id: $scope.footer_experiment.id})
$scope.$on 'status:data:updated', (e, data, oldData) ->
return if !data
return if !data.experiment_controller
$scope.state = data.experiment_controller.machine.state
$scope.thermal_state = data.experiment_controller.machine.thermal_state
$scope.oldState = oldData?.experiment_controller?.machine?.state || 'NONE'
if ((($scope.oldState isnt $scope.state or !$scope.footer_experiment))) and experiment_id
getExperiment (exp) ->
$scope.footer_experiment = exp
$scope.status = data
$scope.is_holding = TestInProgressHelper.set_holding(data, exp)
onResize()
else
$scope.status = data
$scope.is_holding = TestInProgressHelper.set_holding(data, $scope.footer_experiment)
$scope.timeRemaining = TestInProgressHelper.timeRemaining(data)
if ($scope.state isnt 'idle' and !experiment_id and data.experiment_controller?.experiment?.id)
experiment_id = data.experiment_controller.experiment.id
getExperiment (exp) ->
$scope.footer_experiment = exp
onResize()
if $scope.state is 'idle' and $scope.oldState isnt 'idle'
$scope.footer_experiment = null
experiment_id = null
$scope.getDuration = ->
return 0 if !$scope?.experiment?.completed_at
Experiment.getExperimentDuration($scope.footer_experiment)
$scope.stopExperiment = ->
stopping = true
Experiment.stopExperiment($scope.footer_experiment.id)
.then ->
$scope.footer_experiment = null
$scope.stop_confirm_show = false
.finally ->
stopping = false
$scope.resumeExperiment = ->
Experiment.resumeExperiment($scope.footer_experiment.id)
$scope.stopConfirm = ->
$scope.stop_confirm_show = true
$timeout ->
inputFocus.focus()
$scope.inputBlur = ->
$timeout ->
$scope.stop_confirm_show = false if !stopping
, 250
]
| true | ###
Chai PCR - Software platform for Open qPCR and Chai's Real-Time PCR instruments.
For more information visit http://www.chaibio.com
Copyright 2016 Chai Biotechnologies Inc. <PI:EMAIL:<EMAIL>END_PI>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
###
window.App.directive 'statusBar', [
'Experiment'
'$state'
'TestInProgressHelper'
'$timeout'
'$window'
(Experiment, $state, TestInProgressHelper, $timeout, $window) ->
restrict: 'EA'
replace: true
templateUrl: 'app/views/directives/status-bar.html'
link: ($scope, elem, attrs) ->
experiment_id = null
$document = angular.element(document)
inputFocus = elem.find('input')
stopping = false
$scope.truncate = Experiment.truncateName
onResize = ->
elem.find('.left-content').css('opacity', '0')
$timeout ()->
elem.find('.left-content').css('width', '40%')
right_width = elem.find('.right-content').width() + 10
elem.find('.left-content').css('width', 'calc(100% - ' + right_width + 'px)')
elem.find('.left-content').css('opacity', '1')
, 1000
$scope.show = ->
if $scope.state isnt 'idle' then (!!$scope.status and !!$scope.footer_experiment) else !!$scope.status
getExperiment = (cb) ->
return if !experiment_id
Experiment.get(id: experiment_id).then (data) ->
cb data.experiment
$scope.$watch 'experiment_id', (id) ->
return if !id
getExperiment (exp) ->
$scope.footer_experiment = exp
onResize()
$scope.is_holding = false
$scope.goToTestKit = ->
switch $scope.footer_experiment.guid
when "chai_coronavirus_env_kit"
$state.go('coronavirus-env.experiment-running', {id: $scope.footer_experiment.id})
when "chai_covid19_surv_kit"
$state.go('covid19-surv.experiment-running', {id: $scope.footer_experiment.id})
when "pika_4e_kit", "pika_4e_lp_identification_kit"
$state.go('pika_test.experiment-running', {id: $scope.footer_experiment.id})
$scope.$on 'status:data:updated', (e, data, oldData) ->
return if !data
return if !data.experiment_controller
$scope.state = data.experiment_controller.machine.state
$scope.thermal_state = data.experiment_controller.machine.thermal_state
$scope.oldState = oldData?.experiment_controller?.machine?.state || 'NONE'
if ((($scope.oldState isnt $scope.state or !$scope.footer_experiment))) and experiment_id
getExperiment (exp) ->
$scope.footer_experiment = exp
$scope.status = data
$scope.is_holding = TestInProgressHelper.set_holding(data, exp)
onResize()
else
$scope.status = data
$scope.is_holding = TestInProgressHelper.set_holding(data, $scope.footer_experiment)
$scope.timeRemaining = TestInProgressHelper.timeRemaining(data)
if ($scope.state isnt 'idle' and !experiment_id and data.experiment_controller?.experiment?.id)
experiment_id = data.experiment_controller.experiment.id
getExperiment (exp) ->
$scope.footer_experiment = exp
onResize()
if $scope.state is 'idle' and $scope.oldState isnt 'idle'
$scope.footer_experiment = null
experiment_id = null
$scope.getDuration = ->
return 0 if !$scope?.experiment?.completed_at
Experiment.getExperimentDuration($scope.footer_experiment)
$scope.stopExperiment = ->
stopping = true
Experiment.stopExperiment($scope.footer_experiment.id)
.then ->
$scope.footer_experiment = null
$scope.stop_confirm_show = false
.finally ->
stopping = false
$scope.resumeExperiment = ->
Experiment.resumeExperiment($scope.footer_experiment.id)
$scope.stopConfirm = ->
$scope.stop_confirm_show = true
$timeout ->
inputFocus.focus()
$scope.inputBlur = ->
$timeout ->
$scope.stop_confirm_show = false if !stopping
, 250
]
|
[
{
"context": "ey attribute', ->\n @base.key.should.equal 'basey-mcbase'\n\n context 'without robot', ->\n\n beforeEa",
"end": 1084,
"score": 0.9865447878837585,
"start": 1072,
"tag": "KEY",
"value": "basey-mcbase"
},
{
"context": "= new Base 'test', pretend.robot\n ... | test/unit/01-base_test.coffee | PropertyUX/nubot-playbook | 0 | sinon = require 'sinon'
chai = require 'chai'
should = chai.should()
chai.use require 'sinon-chai'
_ = require 'lodash'
pretend = require 'hubot-pretend'
Base = require '../../lib/modules/base'
Module = require '../../lib/utils/module'
describe 'Base', ->
beforeEach ->
pretend.start()
pretend.log.level = 'silent'
Object.getOwnPropertyNames(Base.prototype).map (key) ->
sinon.spy Base.prototype, key if _.isFunction Base.prototype[key]
afterEach ->
pretend.shutdown()
Object.getOwnPropertyNames(Base.prototype).map (key) ->
Base.prototype[key].restore() if _.isFunction Base.prototype[key]
describe '.constructor', ->
context 'with name, robot and options and key', ->
beforeEach ->
@base = new Base 'test', pretend.robot, test: 'testing', 'basey-mcbase'
it 'stores the robot', ->
@base.robot.should.eql pretend.robot
it 'calls configure with options', ->
@base.configure.should.have.calledWith test: 'testing'
it 'sets key attribute', ->
@base.key.should.equal 'basey-mcbase'
context 'without robot', ->
beforeEach ->
try @base = new Base 'newclass'
it 'runs error handler', ->
Base.prototype.error.should.have.calledOnce
context 'without name', ->
beforeEach ->
try @base = new Base()
it 'runs error handler', ->
Base.prototype.error.should.have.calledOnce
describe '.log', ->
it 'writes log on the given level', ->
base = new Base 'test', pretend.robot
base.id = 'test111'
base.log.debug 'This is some debug'
base.log.info 'This is info'
base.log.warning 'This is a warning'
base.log.error 'This is an error'
pretend.logs.slice(-4).should.eql [
[ 'debug', 'This is some debug (id: test111)' ],
[ 'info', 'This is info (id: test111)' ],
[ 'warning', 'This is a warning (id: test111)' ],
[ 'error', 'This is an error (id: test111)' ]
]
it 'appends the key if instance has one', ->
base = new Base 'test', pretend.robot, 'super-test'
base.id = 'test111'
base.log.debug 'This is some debug'
pretend.logs.slice(-1).should.eql [
[ 'debug', 'This is some debug (id: test111, key: super-test)' ]
]
describe '.error', ->
beforeEach ->
@base = new Base 'test', pretend.robot, test: 'testing'
context 'with an error', ->
beforeEach ->
@err = new Error "BORKED"
try @base.error @err
@errLog = pretend.logs.pop()
it 'logs an error', ->
@errLog[0].should.equal 'error'
it 'emits the error through robot', ->
pretend.robot.emit.should.have.calledWith 'error', @err
it 'threw error', ->
@base.error.should.throw
context 'with error context string', ->
beforeEach ->
try @base.error 'something broke'
@errLog = pretend.logs.pop()
it 'logs an error with the module instance ID and context string', ->
@errLog[1].should.match new RegExp "#{ @base.id }.*something broke"
it 'emits an error through robot', ->
pretend.robot.emit.should.have.calledWith 'error'
it 'threw error', ->
@base.error.should.throw
context 'using inherited method for error', ->
beforeEach ->
@module = new Module pretend.robot
try @module.error 'Throw me an error'
it 'calls inherited method', ->
Base.prototype.error.should.have.calledWith 'Throw me an error'
it 'threw', ->
@module.error.should.throw
describe '.configure', ->
it 'saves new options', ->
base = new Base 'module', pretend.robot
base.configure foo: true
base.config.foo.should.be.true
it 'overrides existing config', ->
base = new Base 'module', pretend.robot, setting: true
base.configure setting: false
base.config.setting.should.be.false
it 'throws when not given options', ->
base = new Base 'module', pretend.robot
try base.configure 'not an object'
base.configure.should.throw
describe '.defaults', ->
it 'sets config if not set', ->
@base = new Base 'module', pretend.robot
@base.defaults setting: true
@base.config.should.eql setting: true
it 'does not change config if already set', ->
@base = new Base 'module', pretend.robot, setting: true
@base.defaults setting: false
@base.config.should.eql setting: true
describe '.emit', ->
it 'emits event via the robot with instance as first arg', ->
@base = new Base 'module', pretend.robot
@eventSpy = sinon.spy()
pretend.robot.on 'mockEvent', @eventSpy
@base.emit 'mockEvent', foo: 'bar'
@eventSpy.should.have.calledWith @base, foo: 'bar'
describe '.on', ->
beforeEach ->
@mockEvent = sinon.spy()
it 'relays events from robot to instance', ->
@base = new Base 'module', pretend.robot
@mockEvent = sinon.spy()
@base.on 'mockEvent', @mockEvent
pretend.robot.emit 'mockEvent', @base, foo: 'bar'
@mockEvent.should.have.calledWith foo: 'bar'
| 57650 | sinon = require 'sinon'
chai = require 'chai'
should = chai.should()
chai.use require 'sinon-chai'
_ = require 'lodash'
pretend = require 'hubot-pretend'
Base = require '../../lib/modules/base'
Module = require '../../lib/utils/module'
describe 'Base', ->
beforeEach ->
pretend.start()
pretend.log.level = 'silent'
Object.getOwnPropertyNames(Base.prototype).map (key) ->
sinon.spy Base.prototype, key if _.isFunction Base.prototype[key]
afterEach ->
pretend.shutdown()
Object.getOwnPropertyNames(Base.prototype).map (key) ->
Base.prototype[key].restore() if _.isFunction Base.prototype[key]
describe '.constructor', ->
context 'with name, robot and options and key', ->
beforeEach ->
@base = new Base 'test', pretend.robot, test: 'testing', 'basey-mcbase'
it 'stores the robot', ->
@base.robot.should.eql pretend.robot
it 'calls configure with options', ->
@base.configure.should.have.calledWith test: 'testing'
it 'sets key attribute', ->
@base.key.should.equal '<KEY>'
context 'without robot', ->
beforeEach ->
try @base = new Base 'newclass'
it 'runs error handler', ->
Base.prototype.error.should.have.calledOnce
context 'without name', ->
beforeEach ->
try @base = new Base()
it 'runs error handler', ->
Base.prototype.error.should.have.calledOnce
describe '.log', ->
it 'writes log on the given level', ->
base = new Base 'test', pretend.robot
base.id = 'test111'
base.log.debug 'This is some debug'
base.log.info 'This is info'
base.log.warning 'This is a warning'
base.log.error 'This is an error'
pretend.logs.slice(-4).should.eql [
[ 'debug', 'This is some debug (id: test111)' ],
[ 'info', 'This is info (id: test111)' ],
[ 'warning', 'This is a warning (id: test111)' ],
[ 'error', 'This is an error (id: test111)' ]
]
it 'appends the key if instance has one', ->
base = new Base 'test', pretend.robot, 'super-test'
base.id = 'test111'
base.log.debug 'This is some debug'
pretend.logs.slice(-1).should.eql [
[ 'debug', 'This is some debug (id: test111, key: super-test)' ]
]
describe '.error', ->
beforeEach ->
@base = new Base 'test', pretend.robot, test: 'testing'
context 'with an error', ->
beforeEach ->
@err = new Error "BORKED"
try @base.error @err
@errLog = pretend.logs.pop()
it 'logs an error', ->
@errLog[0].should.equal 'error'
it 'emits the error through robot', ->
pretend.robot.emit.should.have.calledWith 'error', @err
it 'threw error', ->
@base.error.should.throw
context 'with error context string', ->
beforeEach ->
try @base.error 'something broke'
@errLog = pretend.logs.pop()
it 'logs an error with the module instance ID and context string', ->
@errLog[1].should.match new RegExp "#{ @base.id }.*something broke"
it 'emits an error through robot', ->
pretend.robot.emit.should.have.calledWith 'error'
it 'threw error', ->
@base.error.should.throw
context 'using inherited method for error', ->
beforeEach ->
@module = new Module pretend.robot
try @module.error 'Throw me an error'
it 'calls inherited method', ->
Base.prototype.error.should.have.calledWith 'Throw me an error'
it 'threw', ->
@module.error.should.throw
describe '.configure', ->
it 'saves new options', ->
base = new Base 'module', pretend.robot
base.configure foo: true
base.config.foo.should.be.true
it 'overrides existing config', ->
base = new Base 'module', pretend.robot, setting: true
base.configure setting: false
base.config.setting.should.be.false
it 'throws when not given options', ->
base = new Base 'module', pretend.robot
try base.configure 'not an object'
base.configure.should.throw
describe '.defaults', ->
it 'sets config if not set', ->
@base = new Base 'module', pretend.robot
@base.defaults setting: true
@base.config.should.eql setting: true
it 'does not change config if already set', ->
@base = new Base 'module', pretend.robot, setting: true
@base.defaults setting: false
@base.config.should.eql setting: true
describe '.emit', ->
it 'emits event via the robot with instance as first arg', ->
@base = new Base 'module', pretend.robot
@eventSpy = sinon.spy()
pretend.robot.on 'mockEvent', @eventSpy
@base.emit 'mockEvent', foo: 'bar'
@eventSpy.should.have.calledWith @base, foo: 'bar'
describe '.on', ->
beforeEach ->
@mockEvent = sinon.spy()
it 'relays events from robot to instance', ->
@base = new Base 'module', pretend.robot
@mockEvent = sinon.spy()
@base.on 'mockEvent', @mockEvent
pretend.robot.emit 'mockEvent', @base, foo: 'bar'
@mockEvent.should.have.calledWith foo: 'bar'
| true | sinon = require 'sinon'
chai = require 'chai'
should = chai.should()
chai.use require 'sinon-chai'
_ = require 'lodash'
pretend = require 'hubot-pretend'
Base = require '../../lib/modules/base'
Module = require '../../lib/utils/module'
describe 'Base', ->
beforeEach ->
pretend.start()
pretend.log.level = 'silent'
Object.getOwnPropertyNames(Base.prototype).map (key) ->
sinon.spy Base.prototype, key if _.isFunction Base.prototype[key]
afterEach ->
pretend.shutdown()
Object.getOwnPropertyNames(Base.prototype).map (key) ->
Base.prototype[key].restore() if _.isFunction Base.prototype[key]
describe '.constructor', ->
context 'with name, robot and options and key', ->
beforeEach ->
@base = new Base 'test', pretend.robot, test: 'testing', 'basey-mcbase'
it 'stores the robot', ->
@base.robot.should.eql pretend.robot
it 'calls configure with options', ->
@base.configure.should.have.calledWith test: 'testing'
it 'sets key attribute', ->
@base.key.should.equal 'PI:KEY:<KEY>END_PI'
context 'without robot', ->
beforeEach ->
try @base = new Base 'newclass'
it 'runs error handler', ->
Base.prototype.error.should.have.calledOnce
context 'without name', ->
beforeEach ->
try @base = new Base()
it 'runs error handler', ->
Base.prototype.error.should.have.calledOnce
describe '.log', ->
it 'writes log on the given level', ->
base = new Base 'test', pretend.robot
base.id = 'test111'
base.log.debug 'This is some debug'
base.log.info 'This is info'
base.log.warning 'This is a warning'
base.log.error 'This is an error'
pretend.logs.slice(-4).should.eql [
[ 'debug', 'This is some debug (id: test111)' ],
[ 'info', 'This is info (id: test111)' ],
[ 'warning', 'This is a warning (id: test111)' ],
[ 'error', 'This is an error (id: test111)' ]
]
it 'appends the key if instance has one', ->
base = new Base 'test', pretend.robot, 'super-test'
base.id = 'test111'
base.log.debug 'This is some debug'
pretend.logs.slice(-1).should.eql [
[ 'debug', 'This is some debug (id: test111, key: super-test)' ]
]
describe '.error', ->
beforeEach ->
@base = new Base 'test', pretend.robot, test: 'testing'
context 'with an error', ->
beforeEach ->
@err = new Error "BORKED"
try @base.error @err
@errLog = pretend.logs.pop()
it 'logs an error', ->
@errLog[0].should.equal 'error'
it 'emits the error through robot', ->
pretend.robot.emit.should.have.calledWith 'error', @err
it 'threw error', ->
@base.error.should.throw
context 'with error context string', ->
beforeEach ->
try @base.error 'something broke'
@errLog = pretend.logs.pop()
it 'logs an error with the module instance ID and context string', ->
@errLog[1].should.match new RegExp "#{ @base.id }.*something broke"
it 'emits an error through robot', ->
pretend.robot.emit.should.have.calledWith 'error'
it 'threw error', ->
@base.error.should.throw
context 'using inherited method for error', ->
beforeEach ->
@module = new Module pretend.robot
try @module.error 'Throw me an error'
it 'calls inherited method', ->
Base.prototype.error.should.have.calledWith 'Throw me an error'
it 'threw', ->
@module.error.should.throw
describe '.configure', ->
it 'saves new options', ->
base = new Base 'module', pretend.robot
base.configure foo: true
base.config.foo.should.be.true
it 'overrides existing config', ->
base = new Base 'module', pretend.robot, setting: true
base.configure setting: false
base.config.setting.should.be.false
it 'throws when not given options', ->
base = new Base 'module', pretend.robot
try base.configure 'not an object'
base.configure.should.throw
describe '.defaults', ->
it 'sets config if not set', ->
@base = new Base 'module', pretend.robot
@base.defaults setting: true
@base.config.should.eql setting: true
it 'does not change config if already set', ->
@base = new Base 'module', pretend.robot, setting: true
@base.defaults setting: false
@base.config.should.eql setting: true
describe '.emit', ->
it 'emits event via the robot with instance as first arg', ->
@base = new Base 'module', pretend.robot
@eventSpy = sinon.spy()
pretend.robot.on 'mockEvent', @eventSpy
@base.emit 'mockEvent', foo: 'bar'
@eventSpy.should.have.calledWith @base, foo: 'bar'
describe '.on', ->
beforeEach ->
@mockEvent = sinon.spy()
it 'relays events from robot to instance', ->
@base = new Base 'module', pretend.robot
@mockEvent = sinon.spy()
@base.on 'mockEvent', @mockEvent
pretend.robot.emit 'mockEvent', @base, foo: 'bar'
@mockEvent.should.have.calledWith foo: 'bar'
|
[
{
"context": "{\n \"fileTypes\": [\n \"marko\"\n ],\n \"name\": \"Marko\",\n \"patterns\": [\n {\n \"begin\": \"^\\\\s*(sty",
"end": 52,
"score": 0.7058511972427368,
"start": 47,
"tag": "NAME",
"value": "Marko"
}
] | grammars/marko.cson | marko-js/atom-language-marko | 37 | {
"fileTypes": [
"marko"
],
"name": "Marko",
"patterns": [
{
"begin": "^\\s*(style)\\s+(\\{)",
"beginCaptures": {
"1": {
"name": "storage.type.marko.css"
},
"2": {
"name": "punctuation.section.scope.begin.marko.css"
}
},
"comment": "CSS style block, eg: style { color: green }",
"contentName": "source.css",
"end": "\\}",
"endCaptures": {
"0": {
"name": "punctuation.section.scope.end.marko.css"
}
},
"name": "meta.embedded.css",
"patterns": [
{
"include": "source.css"
}
]
},
{
"begin": "^\\s*(style)\\.(less)\\s+(\\{)",
"beginCaptures": {
"1": {
"name": "storage.type.marko.css"
},
"2": {
"name": "storage.modifier.marko.css"
},
"3": {
"name": "punctuation.section.scope.begin.marko.css"
}
},
"comment": "Less style block, eg: style.less { color: green }",
"contentName": "source.less",
"end": "\\}",
"endCaptures": {
"0": {
"name": "punctuation.section.scope.end.marko.css"
}
},
"name": "meta.embedded.less",
"patterns": [
{
"include": "source.css.less"
}
]
},
{
"begin": "^\\s*(style)\\.(scss)\\s+(\\{)",
"beginCaptures": {
"1": {
"name": "storage.type.marko.css"
},
"2": {
"name": "storage.modifier.marko.css"
},
"3": {
"name": "punctuation.section.scope.begin.marko.css"
}
},
"comment": "SCSS style block, eg: style.scss { color: green }",
"contentName": "source.scss",
"end": "\\}",
"endCaptures": {
"0": {
"name": "punctuation.section.scope.end.marko.css"
}
},
"name": "meta.embedded.scss",
"patterns": [
{
"include": "source.css.scss"
}
]
},
{
"begin": "^\\s*(?:(static )|(?=(?:class|import|export) ))",
"beginCaptures": {
"1": {
"name": "keyword.control.static.marko"
}
},
"comment": "Top level blocks parsed as JavaScript",
"contentName": "source.js",
"end": "(?=\\n|$)",
"name": "meta.embedded.js",
"patterns": [
{
"include": "#javascript-statement"
}
]
},
{
"include": "#content-concise-mode"
}
],
"repository": {
"attrs": {
"patterns": [
{
"applyEndPatternLast": 1,
"begin": "(?:\\s+|,)(?:(key|on[a-zA-Z0-9_$-]+|[a-zA-Z0-9_$]+Change|no-update(?:-body)?(?:-if)?)|([a-zA-Z0-9_$][a-zA-Z0-9_$-]*))(:[a-zA-Z0-9_$][a-zA-Z0-9_$-]*)?",
"beginCaptures": {
"1": {
"name": "support.type.attribute-name.marko"
},
"2": {
"name": "entity.other.attribute-name.marko"
},
"3": {
"name": "support.function.attribute-name.marko"
}
},
"comment": "Attribute with optional value",
"end": "(?=.|$)",
"name": "meta.marko-attribute",
"patterns": [
{
"include": "#html-args-or-method"
},
{
"applyEndPatternLast": 1,
"begin": "\\s*(:?=)\\s*",
"beginCaptures": {
"1": {
"patterns": [
{
"include": "source.js"
}
]
}
},
"comment": "Attribute value",
"contentName": "source.js",
"end": "(?=.|$)",
"name": "meta.embedded.js",
"patterns": [
{
"include": "#javascript-expression"
}
]
}
]
},
{
"applyEndPatternLast": 1,
"begin": "(?:\\s+|,)\\.\\.\\.",
"beginCaptures": {
"1": {
"name": "keyword.operator.spread.marko"
}
},
"comment": "A ...spread attribute",
"contentName": "source.js",
"end": "(?=.|$)",
"name": "meta.marko-spread-attribute",
"patterns": [
{
"include": "#javascript-expression"
}
]
},
{
"begin": "\\s*(,(?!,))",
"captures": {
"1": {
"patterns": [
{
"include": "source.js"
}
]
}
},
"comment": "Consume any whitespace after a comma",
"end": "(?!\\S)"
},
{
"include": "#javascript-comment-multiline"
},
{
"include": "#invalid"
}
]
},
"concise-html-block": {
"begin": "\\s*(--+)\\s*$",
"beginCaptures": {
"2": {
"name": "punctuation.section.scope.begin.marko"
}
},
"comment": "--- HTML block within concise mode content. ---",
"end": "\\1",
"endCaptures": {
"1": {
"name": "punctuation.section.scope.end.marko"
}
},
"name": "meta.section.marko-html-block",
"patterns": [
{
"include": "#content-html-mode"
}
]
},
"concise-html-line": {
"captures": {
"1": {
"name": "punctuation.section.scope.begin.marko"
},
"2": {
"patterns": [
{
"include": "#html-comments"
},
{
"include": "#tag-html"
},
{
"match": "\\\\.",
"name": "string"
},
{
"include": "#placeholder"
},
{
"match": ".+?",
"name": "string"
}
]
}
},
"comment": "-- HTML line within concise mode content. (content-html-mode w/o scriptlet)",
"match": "\\s*(--+)(?=\\s+\\S)(.*$)",
"name": "meta.section.marko-html-line"
},
"concise-open-tag-content": {
"patterns": [
{
"include": "#tag-before-attrs"
},
{
"begin": "\\s*\\[",
"beginCaptures": {
"0": {
"name": "punctuation.section.scope.begin.marko"
}
},
"end": "]",
"endCaptures": {
"0": {
"name": "punctuation.section.scope.end.marko"
}
},
"patterns": [
{
"include": "#attrs"
},
{
"include": "#invalid"
}
]
},
{
"begin": "(?!^)(?= )",
"end": "(?=--)|(?<!,)(?=\\n)",
"patterns": [
{
"include": "#attrs"
},
{
"include": "#invalid"
}
]
}
]
},
"concise-script-block": {
"begin": "(\\s+)(--+)\\s*$",
"beginCaptures": {
"2": {
"name": "punctuation.section.scope.begin.marko"
}
},
"comment": "--- Embedded concise script content block. ---",
"end": "(\\2)|(?=^(?!\\1)\\s*\\S)",
"endCaptures": {
"1": {
"name": "punctuation.section.scope.end.marko"
}
},
"name": "meta.section.marko-script-block",
"patterns": [
{
"include": "#content-embedded-script"
}
]
},
"concise-script-line": {
"applyEndPatternLast": 1,
"begin": "\\s*(--+)",
"beginCaptures": {
"1": {
"name": "punctuation.section.scope.begin.marko"
}
},
"comment": "-- Embedded concise script content line.",
"end": "$",
"name": "meta.section.marko-script-line",
"patterns": [
{
"include": "#content-embedded-script"
}
]
},
"concise-style-block": {
"begin": "(\\s+)(--+)\\s*$",
"beginCaptures": {
"2": {
"name": "punctuation.section.scope.begin.marko"
}
},
"comment": "--- Embedded concise style content block. ---",
"contentName": "source.css",
"end": "(\\2)|(?=^(?!\\1)\\s*\\S)",
"endCaptures": {
"1": {
"name": "punctuation.section.scope.end.marko"
}
},
"name": "meta.section.marko-style-block",
"patterns": [
{
"include": "#content-embedded-style"
}
]
},
"concise-style-block-less": {
"begin": "(\\s+)(--+)\\s*$",
"beginCaptures": {
"2": {
"name": "punctuation.section.scope.begin.marko"
}
},
"comment": "--- Embedded concise style content block. ---",
"contentName": "source.less",
"end": "(\\2)|(?=^(?!\\1)\\s*\\S)",
"endCaptures": {
"1": {
"name": "punctuation.section.scope.end.marko"
}
},
"name": "meta.section.marko-style-block",
"patterns": [
{
"include": "#content-embedded-style-less"
}
]
},
"concise-style-block-scss": {
"begin": "(\\s+)(--+)\\s*$",
"beginCaptures": {
"2": {
"name": "punctuation.section.scope.begin.marko"
}
},
"comment": "--- Embedded concise style content block. ---",
"contentName": "source.scss",
"end": "(\\2)|(?=^(?!\\1)\\s*\\S)",
"endCaptures": {
"1": {
"name": "punctuation.section.scope.end.marko"
}
},
"name": "meta.section.marko-style-block",
"patterns": [
{
"include": "#content-embedded-style-scss"
}
]
},
"concise-style-line": {
"applyEndPatternLast": 1,
"begin": "\\s*(--+)",
"beginCaptures": {
"1": {
"name": "punctuation.section.scope.begin.marko"
}
},
"comment": "-- Embedded concise style content line.",
"contentName": "source.css",
"end": "$",
"name": "meta.section.marko-style-line",
"patterns": [
{
"include": "#content-embedded-style"
}
]
},
"concise-style-line-less": {
"applyEndPatternLast": 1,
"begin": "\\s*(--+)",
"beginCaptures": {
"1": {
"name": "punctuation.section.scope.begin.marko"
}
},
"comment": "-- Embedded concise style content line.",
"contentName": "source.less",
"end": "$",
"name": "meta.section.marko-style-line",
"patterns": [
{
"include": "#content-embedded-style-less"
}
]
},
"concise-style-line-scss": {
"applyEndPatternLast": 1,
"begin": "\\s*(--+)",
"beginCaptures": {
"1": {
"name": "punctuation.section.scope.begin.marko"
}
},
"comment": "-- Embedded concise style content line.",
"contentName": "source.scss",
"end": "$",
"name": "meta.section.marko-style-line",
"patterns": [
{
"include": "#content-embedded-style-scss"
}
]
},
"content-concise-mode": {
"comment": "Concise mode content block.",
"name": "meta.marko-concise-content",
"patterns": [
{
"include": "#scriptlet"
},
{
"include": "#javascript-comments"
},
{
"include": "#html-comments"
},
{
"include": "#concise-html-block"
},
{
"include": "#concise-html-line"
},
{
"include": "#tag-html"
},
{
"comment": "A concise html tag.",
"patterns": [
{
"begin": "^(\\s*)(?=style\\.less\\b)",
"comment": "Concise style tag less",
"patterns": [
{
"include": "#concise-open-tag-content"
},
{
"include": "#concise-style-block-less"
},
{
"include": "#concise-style-line-less"
}
],
"while": "(?=^\\1\\s+(\\S|$))"
},
{
"begin": "^(\\s*)(?=style\\.scss\\b)",
"comment": "Concise style tag scss",
"patterns": [
{
"include": "#concise-open-tag-content"
},
{
"include": "#concise-style-block-scss"
},
{
"include": "#concise-style-line-scss"
}
],
"while": "(?=^\\1\\s+(\\S|$))"
},
{
"begin": "^(\\s*)(?=style\\b)",
"comment": "Concise style tag",
"patterns": [
{
"include": "#concise-open-tag-content"
},
{
"include": "#concise-style-block"
},
{
"include": "#concise-style-line"
}
],
"while": "(?=^\\1\\s+(\\S|$))"
},
{
"begin": "^(\\s*)(?=script\\b)",
"comment": "Concise script tag",
"patterns": [
{
"include": "#concise-open-tag-content"
},
{
"include": "#concise-script-block"
},
{
"include": "#concise-script-line"
}
],
"while": "(?=^\\1\\s+(\\S|$))"
},
{
"begin": "^(\\s*)(?=[a-zA-Z0-9_$@])",
"comment": "Normal concise tag",
"patterns": [
{
"include": "#concise-open-tag-content"
},
{
"include": "#content-concise-mode"
}
],
"while": "(?=^\\1\\s+(\\S|$))"
}
]
},
{
"include": "#invalid"
}
]
},
"content-embedded-script": {
"name": "meta.embedded.js",
"patterns": [
{
"include": "#placeholder"
},
{
"include": "source.js"
}
]
},
"content-embedded-style": {
"name": "meta.embedded.css",
"patterns": [
{
"include": "#placeholder"
},
{
"include": "source.css"
}
]
},
"content-embedded-style-less": {
"name": "meta.embedded.css.less",
"patterns": [
{
"include": "#placeholder"
},
{
"include": "source.css.less"
}
]
},
"content-embedded-style-scss": {
"name": "meta.embedded.css.scss",
"patterns": [
{
"include": "#placeholder"
},
{
"include": "source.css.scss"
}
]
},
"content-html-mode": {
"comment": "HTML mode content block.",
"patterns": [
{
"include": "#scriptlet"
},
{
"include": "#html-comments"
},
{
"include": "#tag-html"
},
{
"match": "\\\\.",
"name": "string"
},
{
"include": "#placeholder"
},
{
"match": ".+?",
"name": "string"
}
]
},
"html-args-or-method": {
"patterns": [
{
"include": "#javascript-args"
},
{
"begin": "(?<=\\))\\s*(?=\\{)",
"comment": "Attribute method shorthand following parens",
"contentName": "source.js",
"end": "(?<=\\})",
"name": "meta.embedded.js",
"patterns": [
{
"include": "source.js"
}
]
}
]
},
"html-comments": {
"patterns": [
{
"begin": "\\s*(<!(--)?)",
"beginCaptures": {
"1": {
"name": "punctuation.definition.comment.marko"
}
},
"comment": "HTML comments, doctypes & cdata",
"end": "\\2>",
"endCaptures": {
"0": {
"name": "punctuation.definition.comment.marko"
}
},
"name": "comment.block.marko"
},
{
"begin": "\\s*(<html-comment>)",
"beginCaptures": {
"1": {
"name": "punctuation.definition.comment.marko"
}
},
"comment": "Preserved HTML comment tag",
"end": "</html-comment>",
"endCaptures": {
"0": {
"name": "punctuation.definition.comment.marko"
}
},
"name": "comment.block.marko"
}
]
},
"invalid": {
"match": "[^\\s]",
"name": "invalid.illegal.character-not-allowed-here.marko"
},
"javascript-args": {
"begin": "(?=\\()",
"comment": "Javascript style arguments",
"contentName": "source.js",
"end": "(?<=\\))",
"name": "meta.embedded.js",
"patterns": [
{
"include": "source.js"
}
]
},
"javascript-comment-line": {
"captures": {
"0": {
"patterns": [
{
"include": "source.js"
}
]
}
},
"comment": "JavaScript // single line comment",
"contentName": "source.js",
"match": "\\s*//.*$"
},
"javascript-comment-multiline": {
"begin": "\\s*(?=/\\*)",
"comment": "JavaScript /* block comment */",
"contentName": "source.js",
"end": "(?<=\\*/)",
"patterns": [
{
"include": "source.js"
}
]
},
"javascript-comments": {
"patterns": [
{
"include": "#javascript-comment-multiline"
},
{
"include": "#javascript-comment-line"
}
]
},
"javascript-enclosed": {
"comment": "Matches JavaScript content and ensures enclosed blocks are matched.",
"patterns": [
{
"include": "#javascript-comments"
},
{
"include": "#javascript-args"
},
{
"begin": "(?={)",
"end": "(?<=})",
"patterns": [
{
"include": "source.js"
}
]
},
{
"begin": "(?=\\[)",
"end": "(?<=])",
"patterns": [
{
"include": "source.js"
}
]
},
{
"begin": "(?=\")",
"end": "(?<=\")",
"patterns": [
{
"include": "source.js"
}
]
},
{
"begin": "(?=')",
"end": "(?<=')",
"patterns": [
{
"include": "source.js"
}
]
},
{
"begin": "(?=`)",
"end": "(?<=`)",
"patterns": [
{
"include": "source.js"
}
]
},
{
"begin": "/(?!<[\\]})A-Z0-9.<%]\\s*/)(?!/?>|$)",
"captures": {
"0": {
"name": "string.regexp.js"
}
},
"contentName": "source.js",
"end": "/[gimsuy]*",
"patterns": [
{
"include": "source.js#regexp"
},
{
"include": "source.js"
}
]
},
{
"begin": "(?x)\\s*(?:\n\t\t\t\t\t\t\t\t(?:\\b(?:new|typeof|instanceof|in)\\b)| # Keyword operators\n\t\t\t\t\t\t\t\t\\&\\&|\\|\\|| # Logical operators\n\t\t\t\t\t\t\t\t[\\^|&]| # Bitwise operators\n\t\t\t\t\t\t\t\t[!=]=|[!=]==|<|<[=<]|=>| # Comparison operators (Note you cannot use * or ? here)\n\t\t\t\t\t\t\t\t[?:]| # Ternary operators\n\t\t\t\t\t\t\t\t[-+*%](?!-) # Arithmetic operators\n\t\t\t\t\t\t\t)",
"captures": {
"0": {
"patterns": [
{
"include": "source.js"
}
]
}
},
"end": "(?=\\S)"
}
]
},
"javascript-expression": {
"patterns": [
{
"include": "#javascript-enclosed"
},
{
"captures": {
"0": {
"patterns": [
{
"include": "source.js"
}
]
}
},
"comment": "Match identifiers and member expressions",
"match": "[0-9a-zA-Z$_.]+"
}
]
},
"javascript-statement": {
"patterns": [
{
"include": "#javascript-enclosed"
},
{
"include": "source.js"
}
]
},
"open-tag-content": {
"patterns": [
{
"include": "#tag-before-attrs"
},
{
"begin": "(?= )",
"comment": "Attributes begin after the first space within the tag name",
"end": "(?=/?>)",
"patterns": [
{
"include": "#attrs"
}
]
}
]
},
"placeholder": {
"begin": "\\$!?{",
"beginCaptures": {
"0": {
"name": "punctuation.definition.template-expression.begin.js"
}
},
"comment": "${ } placeholder",
"contentName": "source.js",
"end": "}",
"endCaptures": {
"0": {
"name": "punctuation.definition.template-expression.end.js"
}
},
"patterns": [
{
"include": "source.js"
}
]
},
"scriptlet": {
"begin": "^\\s*(\\$)\\s+",
"beginCaptures": {
"1": {
"name": "keyword.control.scriptlet.marko"
}
},
"comment": "An inline JavaScript scriptlet.",
"contentName": "source.js",
"end": "$",
"name": "meta.embedded.js",
"patterns": [
{
"include": "#javascript-statement"
}
]
},
"tag-before-attrs": {
"comment": "Everything in a tag before the attributes content",
"patterns": [
{
"include": "#tag-name"
},
{
"comment": "Shorthand class or ID attribute",
"match": "[#.][a-zA-Z0-9_$][a-zA-Z0-9_$-]*",
"name": "entity.other.attribute-name.marko"
},
{
"begin": "/(?!/)",
"beginCaptures": {
"0": {
"name": "punctuation.separator.key-value.marko"
}
},
"comment": "Variable for a tag",
"contentName": "source.js",
"end": "(?=:?\\=|\\s|>|$|\\||\\(|/)",
"name": "meta.embedded.js",
"patterns": [
{
"comment": "Match identifiers",
"match": "[a-zA-Z$_][0-9a-zA-Z$_]*",
"name": "variable.other.constant.object.js"
},
{
"include": "source.js#object-binding-pattern"
},
{
"include": "source.js#array-binding-pattern"
},
{
"include": "source.js#var-single-variable"
},
{
"include": "#javascript-expression"
}
]
},
{
"applyEndPatternLast": 1,
"begin": "\\s*(:?=)\\s*",
"beginCaptures": {
"1": {
"patterns": [
{
"include": "source.js"
}
]
}
},
"comment": "Default attribute value",
"contentName": "source.js",
"end": "(?=.|$)",
"name": "meta.embedded.js",
"patterns": [
{
"include": "#javascript-expression"
}
]
},
{
"begin": "\\|",
"beginCaptures": {
"0": {
"name": "punctuation.section.scope.begin.marko"
}
},
"comment": "Parameters for a tag",
"end": "\\|",
"endCaptures": {
"0": {
"name": "punctuation.section.scope.end.marko"
}
},
"patterns": [
{
"include": "source.js#function-parameters-body"
},
{
"include": "source.js"
}
]
},
{
"include": "#html-args-or-method"
}
]
},
"tag-html": {
"comment": "Matches an HTML tag and its contents",
"patterns": [
{
"begin": "\\s*(<)(?=(area|base|br|col|embed|hr|img|input|link|meta|param|source|track|wbr)\\b)",
"beginCaptures": {
"1": {
"name": "punctuation.definition.tag.end.marko"
}
},
"comment": "HTML void elements",
"end": "/?>",
"endCaptures": {
"0": {
"name": "punctuation.definition.tag.end.marko"
}
},
"patterns": [
{
"include": "#open-tag-content"
}
]
},
{
"begin": "\\s*(<)(?=style\\.less\\b)",
"beginCaptures": {
"1": {
"name": "punctuation.definition.tag.begin.marko"
}
},
"comment": "HTML style tag with less",
"end": "/>|(?<=\\>)",
"endCaptures": {
"0": {
"name": "punctuation.definition.tag.end.marko"
}
},
"patterns": [
{
"include": "#open-tag-content"
},
{
"begin": ">",
"beginCaptures": {
"0": {
"name": "punctuation.definition.tag.end.marko"
}
},
"comment": "Style body content",
"contentName": "source.less",
"end": "\\s*(</)(style)?(>)",
"endCaptures": {
"1": {
"name": "punctuation.definition.tag.end.marko"
},
"2": {
"patterns": [
{
"include": "#tag-name"
}
]
},
"3": {
"name": "punctuation.definition.tag.end.marko"
}
},
"patterns": [
{
"include": "#content-embedded-style-less"
}
]
}
]
},
{
"begin": "\\s*(<)(?=style\\.scss\\b)",
"beginCaptures": {
"1": {
"name": "punctuation.definition.tag.begin.marko"
}
},
"comment": "HTML style tag with scss",
"end": "/>|(?<=\\>)",
"endCaptures": {
"0": {
"name": "punctuation.definition.tag.end.marko"
}
},
"patterns": [
{
"include": "#open-tag-content"
},
{
"begin": ">",
"beginCaptures": {
"0": {
"name": "punctuation.definition.tag.end.marko"
}
},
"comment": "Style body content",
"contentName": "source.less",
"end": "\\s*(</)(style)?(>)",
"endCaptures": {
"1": {
"name": "punctuation.definition.tag.end.marko"
},
"2": {
"patterns": [
{
"include": "#tag-name"
}
]
},
"3": {
"name": "punctuation.definition.tag.end.marko"
}
},
"patterns": [
{
"include": "#content-embedded-style-scss"
}
]
}
]
},
{
"begin": "\\s*(<)(?=style\\b)",
"beginCaptures": {
"1": {
"name": "punctuation.definition.tag.begin.marko"
}
},
"comment": "HTML style tag",
"end": "/>|(?<=\\>)",
"endCaptures": {
"0": {
"name": "punctuation.definition.tag.end.marko"
}
},
"patterns": [
{
"include": "#open-tag-content"
},
{
"begin": ">",
"beginCaptures": {
"0": {
"name": "punctuation.definition.tag.end.marko"
}
},
"comment": "Style body content",
"contentName": "source.css",
"end": "\\s*(</)(style)?(>)",
"endCaptures": {
"1": {
"name": "punctuation.definition.tag.end.marko"
},
"2": {
"patterns": [
{
"include": "#tag-name"
}
]
},
"3": {
"name": "punctuation.definition.tag.end.marko"
}
},
"patterns": [
{
"include": "#content-embedded-style"
}
]
}
]
},
{
"begin": "\\s*(<)(?=script\\b)",
"beginCaptures": {
"1": {
"name": "punctuation.definition.tag.begin.marko"
}
},
"comment": "HTML script tag",
"end": "/>|(?<=\\>)",
"endCaptures": {
"0": {
"name": "punctuation.definition.tag.end.marko"
}
},
"patterns": [
{
"include": "#open-tag-content"
},
{
"begin": ">",
"beginCaptures": {
"0": {
"name": "punctuation.definition.tag.end.marko"
}
},
"comment": "Script body content",
"contentName": "source.js",
"end": "\\s*(</)(script)?(>)",
"endCaptures": {
"1": {
"name": "punctuation.definition.tag.end.marko"
},
"2": {
"patterns": [
{
"include": "#tag-name"
}
]
},
"3": {
"name": "punctuation.definition.tag.end.marko"
}
},
"patterns": [
{
"include": "#content-embedded-script"
}
]
}
]
},
{
"begin": "\\s*(<)(?=[a-zA-Z0-9_$@])",
"beginCaptures": {
"1": {
"name": "punctuation.definition.tag.begin.marko"
}
},
"comment": "HTML normal tag",
"end": "/>|(?<=\\>)",
"endCaptures": {
"0": {
"name": "punctuation.definition.tag.end.marko"
}
},
"patterns": [
{
"include": "#open-tag-content"
},
{
"begin": ">",
"beginCaptures": {
"0": {
"name": "punctuation.definition.tag.end.marko"
}
},
"comment": "Body content",
"end": "\\s*(</)([a-zA-Z0-9_$:@-]+)?(.*?)(>)",
"endCaptures": {
"1": {
"name": "punctuation.definition.tag.end.marko"
},
"2": {
"patterns": [
{
"include": "#tag-name"
}
]
},
"3": {
"patterns": [
{
"include": "#invalid"
}
]
},
"4": {
"name": "punctuation.definition.tag.end.marko"
}
},
"patterns": [
{
"include": "#content-html-mode"
}
]
}
]
}
]
},
"tag-name": {
"patterns": [
{
"begin": "\\${",
"beginCaptures": {
"0": {
"name": "punctuation.definition.template-expression.begin.js"
}
},
"comment": "Dynamic tag.",
"end": "}",
"endCaptures": {
"0": {
"name": "punctuation.definition.template-expression.end.js"
}
},
"patterns": [
{
"include": "source.js"
}
]
},
{
"captures": {
"1": {
"name": "entity.name.tag.marko"
},
"2": {
"name": "storage.type.marko.css"
},
"3": {
"patterns": [
{
"comment": "Core tag.",
"match": "(attrs|return|import)(?=\\b)",
"name": "support.type.builtin.marko"
},
{
"comment": "Core tag.",
"match": "(for|if|while|else-if|else|macro|tag|await|let|const|effect|set|get|id|lifecycle)(?=\\b)",
"name": "support.function.marko"
},
{
"comment": "Attribute tag.",
"match": "@.+",
"name": "entity.other.attribute-name.marko"
},
{
"comment": "Native or userland tag.",
"match": ".+",
"name": "entity.name.tag.marko"
}
]
}
},
"match": "(style)\\.([a-zA-Z0-9$_-]+(?:\\.[a-zA-Z0-9$_-]+)*)|([a-zA-Z0-9_$@][a-zA-Z0-9_$@:-]*)"
}
]
}
},
"scopeName": "text.marko",
"uuid": "46c8c3f8-cabe-466a-a633-5deafdc51253"
} | 5550 | {
"fileTypes": [
"marko"
],
"name": "<NAME>",
"patterns": [
{
"begin": "^\\s*(style)\\s+(\\{)",
"beginCaptures": {
"1": {
"name": "storage.type.marko.css"
},
"2": {
"name": "punctuation.section.scope.begin.marko.css"
}
},
"comment": "CSS style block, eg: style { color: green }",
"contentName": "source.css",
"end": "\\}",
"endCaptures": {
"0": {
"name": "punctuation.section.scope.end.marko.css"
}
},
"name": "meta.embedded.css",
"patterns": [
{
"include": "source.css"
}
]
},
{
"begin": "^\\s*(style)\\.(less)\\s+(\\{)",
"beginCaptures": {
"1": {
"name": "storage.type.marko.css"
},
"2": {
"name": "storage.modifier.marko.css"
},
"3": {
"name": "punctuation.section.scope.begin.marko.css"
}
},
"comment": "Less style block, eg: style.less { color: green }",
"contentName": "source.less",
"end": "\\}",
"endCaptures": {
"0": {
"name": "punctuation.section.scope.end.marko.css"
}
},
"name": "meta.embedded.less",
"patterns": [
{
"include": "source.css.less"
}
]
},
{
"begin": "^\\s*(style)\\.(scss)\\s+(\\{)",
"beginCaptures": {
"1": {
"name": "storage.type.marko.css"
},
"2": {
"name": "storage.modifier.marko.css"
},
"3": {
"name": "punctuation.section.scope.begin.marko.css"
}
},
"comment": "SCSS style block, eg: style.scss { color: green }",
"contentName": "source.scss",
"end": "\\}",
"endCaptures": {
"0": {
"name": "punctuation.section.scope.end.marko.css"
}
},
"name": "meta.embedded.scss",
"patterns": [
{
"include": "source.css.scss"
}
]
},
{
"begin": "^\\s*(?:(static )|(?=(?:class|import|export) ))",
"beginCaptures": {
"1": {
"name": "keyword.control.static.marko"
}
},
"comment": "Top level blocks parsed as JavaScript",
"contentName": "source.js",
"end": "(?=\\n|$)",
"name": "meta.embedded.js",
"patterns": [
{
"include": "#javascript-statement"
}
]
},
{
"include": "#content-concise-mode"
}
],
"repository": {
"attrs": {
"patterns": [
{
"applyEndPatternLast": 1,
"begin": "(?:\\s+|,)(?:(key|on[a-zA-Z0-9_$-]+|[a-zA-Z0-9_$]+Change|no-update(?:-body)?(?:-if)?)|([a-zA-Z0-9_$][a-zA-Z0-9_$-]*))(:[a-zA-Z0-9_$][a-zA-Z0-9_$-]*)?",
"beginCaptures": {
"1": {
"name": "support.type.attribute-name.marko"
},
"2": {
"name": "entity.other.attribute-name.marko"
},
"3": {
"name": "support.function.attribute-name.marko"
}
},
"comment": "Attribute with optional value",
"end": "(?=.|$)",
"name": "meta.marko-attribute",
"patterns": [
{
"include": "#html-args-or-method"
},
{
"applyEndPatternLast": 1,
"begin": "\\s*(:?=)\\s*",
"beginCaptures": {
"1": {
"patterns": [
{
"include": "source.js"
}
]
}
},
"comment": "Attribute value",
"contentName": "source.js",
"end": "(?=.|$)",
"name": "meta.embedded.js",
"patterns": [
{
"include": "#javascript-expression"
}
]
}
]
},
{
"applyEndPatternLast": 1,
"begin": "(?:\\s+|,)\\.\\.\\.",
"beginCaptures": {
"1": {
"name": "keyword.operator.spread.marko"
}
},
"comment": "A ...spread attribute",
"contentName": "source.js",
"end": "(?=.|$)",
"name": "meta.marko-spread-attribute",
"patterns": [
{
"include": "#javascript-expression"
}
]
},
{
"begin": "\\s*(,(?!,))",
"captures": {
"1": {
"patterns": [
{
"include": "source.js"
}
]
}
},
"comment": "Consume any whitespace after a comma",
"end": "(?!\\S)"
},
{
"include": "#javascript-comment-multiline"
},
{
"include": "#invalid"
}
]
},
"concise-html-block": {
"begin": "\\s*(--+)\\s*$",
"beginCaptures": {
"2": {
"name": "punctuation.section.scope.begin.marko"
}
},
"comment": "--- HTML block within concise mode content. ---",
"end": "\\1",
"endCaptures": {
"1": {
"name": "punctuation.section.scope.end.marko"
}
},
"name": "meta.section.marko-html-block",
"patterns": [
{
"include": "#content-html-mode"
}
]
},
"concise-html-line": {
"captures": {
"1": {
"name": "punctuation.section.scope.begin.marko"
},
"2": {
"patterns": [
{
"include": "#html-comments"
},
{
"include": "#tag-html"
},
{
"match": "\\\\.",
"name": "string"
},
{
"include": "#placeholder"
},
{
"match": ".+?",
"name": "string"
}
]
}
},
"comment": "-- HTML line within concise mode content. (content-html-mode w/o scriptlet)",
"match": "\\s*(--+)(?=\\s+\\S)(.*$)",
"name": "meta.section.marko-html-line"
},
"concise-open-tag-content": {
"patterns": [
{
"include": "#tag-before-attrs"
},
{
"begin": "\\s*\\[",
"beginCaptures": {
"0": {
"name": "punctuation.section.scope.begin.marko"
}
},
"end": "]",
"endCaptures": {
"0": {
"name": "punctuation.section.scope.end.marko"
}
},
"patterns": [
{
"include": "#attrs"
},
{
"include": "#invalid"
}
]
},
{
"begin": "(?!^)(?= )",
"end": "(?=--)|(?<!,)(?=\\n)",
"patterns": [
{
"include": "#attrs"
},
{
"include": "#invalid"
}
]
}
]
},
"concise-script-block": {
"begin": "(\\s+)(--+)\\s*$",
"beginCaptures": {
"2": {
"name": "punctuation.section.scope.begin.marko"
}
},
"comment": "--- Embedded concise script content block. ---",
"end": "(\\2)|(?=^(?!\\1)\\s*\\S)",
"endCaptures": {
"1": {
"name": "punctuation.section.scope.end.marko"
}
},
"name": "meta.section.marko-script-block",
"patterns": [
{
"include": "#content-embedded-script"
}
]
},
"concise-script-line": {
"applyEndPatternLast": 1,
"begin": "\\s*(--+)",
"beginCaptures": {
"1": {
"name": "punctuation.section.scope.begin.marko"
}
},
"comment": "-- Embedded concise script content line.",
"end": "$",
"name": "meta.section.marko-script-line",
"patterns": [
{
"include": "#content-embedded-script"
}
]
},
"concise-style-block": {
"begin": "(\\s+)(--+)\\s*$",
"beginCaptures": {
"2": {
"name": "punctuation.section.scope.begin.marko"
}
},
"comment": "--- Embedded concise style content block. ---",
"contentName": "source.css",
"end": "(\\2)|(?=^(?!\\1)\\s*\\S)",
"endCaptures": {
"1": {
"name": "punctuation.section.scope.end.marko"
}
},
"name": "meta.section.marko-style-block",
"patterns": [
{
"include": "#content-embedded-style"
}
]
},
"concise-style-block-less": {
"begin": "(\\s+)(--+)\\s*$",
"beginCaptures": {
"2": {
"name": "punctuation.section.scope.begin.marko"
}
},
"comment": "--- Embedded concise style content block. ---",
"contentName": "source.less",
"end": "(\\2)|(?=^(?!\\1)\\s*\\S)",
"endCaptures": {
"1": {
"name": "punctuation.section.scope.end.marko"
}
},
"name": "meta.section.marko-style-block",
"patterns": [
{
"include": "#content-embedded-style-less"
}
]
},
"concise-style-block-scss": {
"begin": "(\\s+)(--+)\\s*$",
"beginCaptures": {
"2": {
"name": "punctuation.section.scope.begin.marko"
}
},
"comment": "--- Embedded concise style content block. ---",
"contentName": "source.scss",
"end": "(\\2)|(?=^(?!\\1)\\s*\\S)",
"endCaptures": {
"1": {
"name": "punctuation.section.scope.end.marko"
}
},
"name": "meta.section.marko-style-block",
"patterns": [
{
"include": "#content-embedded-style-scss"
}
]
},
"concise-style-line": {
"applyEndPatternLast": 1,
"begin": "\\s*(--+)",
"beginCaptures": {
"1": {
"name": "punctuation.section.scope.begin.marko"
}
},
"comment": "-- Embedded concise style content line.",
"contentName": "source.css",
"end": "$",
"name": "meta.section.marko-style-line",
"patterns": [
{
"include": "#content-embedded-style"
}
]
},
"concise-style-line-less": {
"applyEndPatternLast": 1,
"begin": "\\s*(--+)",
"beginCaptures": {
"1": {
"name": "punctuation.section.scope.begin.marko"
}
},
"comment": "-- Embedded concise style content line.",
"contentName": "source.less",
"end": "$",
"name": "meta.section.marko-style-line",
"patterns": [
{
"include": "#content-embedded-style-less"
}
]
},
"concise-style-line-scss": {
"applyEndPatternLast": 1,
"begin": "\\s*(--+)",
"beginCaptures": {
"1": {
"name": "punctuation.section.scope.begin.marko"
}
},
"comment": "-- Embedded concise style content line.",
"contentName": "source.scss",
"end": "$",
"name": "meta.section.marko-style-line",
"patterns": [
{
"include": "#content-embedded-style-scss"
}
]
},
"content-concise-mode": {
"comment": "Concise mode content block.",
"name": "meta.marko-concise-content",
"patterns": [
{
"include": "#scriptlet"
},
{
"include": "#javascript-comments"
},
{
"include": "#html-comments"
},
{
"include": "#concise-html-block"
},
{
"include": "#concise-html-line"
},
{
"include": "#tag-html"
},
{
"comment": "A concise html tag.",
"patterns": [
{
"begin": "^(\\s*)(?=style\\.less\\b)",
"comment": "Concise style tag less",
"patterns": [
{
"include": "#concise-open-tag-content"
},
{
"include": "#concise-style-block-less"
},
{
"include": "#concise-style-line-less"
}
],
"while": "(?=^\\1\\s+(\\S|$))"
},
{
"begin": "^(\\s*)(?=style\\.scss\\b)",
"comment": "Concise style tag scss",
"patterns": [
{
"include": "#concise-open-tag-content"
},
{
"include": "#concise-style-block-scss"
},
{
"include": "#concise-style-line-scss"
}
],
"while": "(?=^\\1\\s+(\\S|$))"
},
{
"begin": "^(\\s*)(?=style\\b)",
"comment": "Concise style tag",
"patterns": [
{
"include": "#concise-open-tag-content"
},
{
"include": "#concise-style-block"
},
{
"include": "#concise-style-line"
}
],
"while": "(?=^\\1\\s+(\\S|$))"
},
{
"begin": "^(\\s*)(?=script\\b)",
"comment": "Concise script tag",
"patterns": [
{
"include": "#concise-open-tag-content"
},
{
"include": "#concise-script-block"
},
{
"include": "#concise-script-line"
}
],
"while": "(?=^\\1\\s+(\\S|$))"
},
{
"begin": "^(\\s*)(?=[a-zA-Z0-9_$@])",
"comment": "Normal concise tag",
"patterns": [
{
"include": "#concise-open-tag-content"
},
{
"include": "#content-concise-mode"
}
],
"while": "(?=^\\1\\s+(\\S|$))"
}
]
},
{
"include": "#invalid"
}
]
},
"content-embedded-script": {
"name": "meta.embedded.js",
"patterns": [
{
"include": "#placeholder"
},
{
"include": "source.js"
}
]
},
"content-embedded-style": {
"name": "meta.embedded.css",
"patterns": [
{
"include": "#placeholder"
},
{
"include": "source.css"
}
]
},
"content-embedded-style-less": {
"name": "meta.embedded.css.less",
"patterns": [
{
"include": "#placeholder"
},
{
"include": "source.css.less"
}
]
},
"content-embedded-style-scss": {
"name": "meta.embedded.css.scss",
"patterns": [
{
"include": "#placeholder"
},
{
"include": "source.css.scss"
}
]
},
"content-html-mode": {
"comment": "HTML mode content block.",
"patterns": [
{
"include": "#scriptlet"
},
{
"include": "#html-comments"
},
{
"include": "#tag-html"
},
{
"match": "\\\\.",
"name": "string"
},
{
"include": "#placeholder"
},
{
"match": ".+?",
"name": "string"
}
]
},
"html-args-or-method": {
"patterns": [
{
"include": "#javascript-args"
},
{
"begin": "(?<=\\))\\s*(?=\\{)",
"comment": "Attribute method shorthand following parens",
"contentName": "source.js",
"end": "(?<=\\})",
"name": "meta.embedded.js",
"patterns": [
{
"include": "source.js"
}
]
}
]
},
"html-comments": {
"patterns": [
{
"begin": "\\s*(<!(--)?)",
"beginCaptures": {
"1": {
"name": "punctuation.definition.comment.marko"
}
},
"comment": "HTML comments, doctypes & cdata",
"end": "\\2>",
"endCaptures": {
"0": {
"name": "punctuation.definition.comment.marko"
}
},
"name": "comment.block.marko"
},
{
"begin": "\\s*(<html-comment>)",
"beginCaptures": {
"1": {
"name": "punctuation.definition.comment.marko"
}
},
"comment": "Preserved HTML comment tag",
"end": "</html-comment>",
"endCaptures": {
"0": {
"name": "punctuation.definition.comment.marko"
}
},
"name": "comment.block.marko"
}
]
},
"invalid": {
"match": "[^\\s]",
"name": "invalid.illegal.character-not-allowed-here.marko"
},
"javascript-args": {
"begin": "(?=\\()",
"comment": "Javascript style arguments",
"contentName": "source.js",
"end": "(?<=\\))",
"name": "meta.embedded.js",
"patterns": [
{
"include": "source.js"
}
]
},
"javascript-comment-line": {
"captures": {
"0": {
"patterns": [
{
"include": "source.js"
}
]
}
},
"comment": "JavaScript // single line comment",
"contentName": "source.js",
"match": "\\s*//.*$"
},
"javascript-comment-multiline": {
"begin": "\\s*(?=/\\*)",
"comment": "JavaScript /* block comment */",
"contentName": "source.js",
"end": "(?<=\\*/)",
"patterns": [
{
"include": "source.js"
}
]
},
"javascript-comments": {
"patterns": [
{
"include": "#javascript-comment-multiline"
},
{
"include": "#javascript-comment-line"
}
]
},
"javascript-enclosed": {
"comment": "Matches JavaScript content and ensures enclosed blocks are matched.",
"patterns": [
{
"include": "#javascript-comments"
},
{
"include": "#javascript-args"
},
{
"begin": "(?={)",
"end": "(?<=})",
"patterns": [
{
"include": "source.js"
}
]
},
{
"begin": "(?=\\[)",
"end": "(?<=])",
"patterns": [
{
"include": "source.js"
}
]
},
{
"begin": "(?=\")",
"end": "(?<=\")",
"patterns": [
{
"include": "source.js"
}
]
},
{
"begin": "(?=')",
"end": "(?<=')",
"patterns": [
{
"include": "source.js"
}
]
},
{
"begin": "(?=`)",
"end": "(?<=`)",
"patterns": [
{
"include": "source.js"
}
]
},
{
"begin": "/(?!<[\\]})A-Z0-9.<%]\\s*/)(?!/?>|$)",
"captures": {
"0": {
"name": "string.regexp.js"
}
},
"contentName": "source.js",
"end": "/[gimsuy]*",
"patterns": [
{
"include": "source.js#regexp"
},
{
"include": "source.js"
}
]
},
{
"begin": "(?x)\\s*(?:\n\t\t\t\t\t\t\t\t(?:\\b(?:new|typeof|instanceof|in)\\b)| # Keyword operators\n\t\t\t\t\t\t\t\t\\&\\&|\\|\\|| # Logical operators\n\t\t\t\t\t\t\t\t[\\^|&]| # Bitwise operators\n\t\t\t\t\t\t\t\t[!=]=|[!=]==|<|<[=<]|=>| # Comparison operators (Note you cannot use * or ? here)\n\t\t\t\t\t\t\t\t[?:]| # Ternary operators\n\t\t\t\t\t\t\t\t[-+*%](?!-) # Arithmetic operators\n\t\t\t\t\t\t\t)",
"captures": {
"0": {
"patterns": [
{
"include": "source.js"
}
]
}
},
"end": "(?=\\S)"
}
]
},
"javascript-expression": {
"patterns": [
{
"include": "#javascript-enclosed"
},
{
"captures": {
"0": {
"patterns": [
{
"include": "source.js"
}
]
}
},
"comment": "Match identifiers and member expressions",
"match": "[0-9a-zA-Z$_.]+"
}
]
},
"javascript-statement": {
"patterns": [
{
"include": "#javascript-enclosed"
},
{
"include": "source.js"
}
]
},
"open-tag-content": {
"patterns": [
{
"include": "#tag-before-attrs"
},
{
"begin": "(?= )",
"comment": "Attributes begin after the first space within the tag name",
"end": "(?=/?>)",
"patterns": [
{
"include": "#attrs"
}
]
}
]
},
"placeholder": {
"begin": "\\$!?{",
"beginCaptures": {
"0": {
"name": "punctuation.definition.template-expression.begin.js"
}
},
"comment": "${ } placeholder",
"contentName": "source.js",
"end": "}",
"endCaptures": {
"0": {
"name": "punctuation.definition.template-expression.end.js"
}
},
"patterns": [
{
"include": "source.js"
}
]
},
"scriptlet": {
"begin": "^\\s*(\\$)\\s+",
"beginCaptures": {
"1": {
"name": "keyword.control.scriptlet.marko"
}
},
"comment": "An inline JavaScript scriptlet.",
"contentName": "source.js",
"end": "$",
"name": "meta.embedded.js",
"patterns": [
{
"include": "#javascript-statement"
}
]
},
"tag-before-attrs": {
"comment": "Everything in a tag before the attributes content",
"patterns": [
{
"include": "#tag-name"
},
{
"comment": "Shorthand class or ID attribute",
"match": "[#.][a-zA-Z0-9_$][a-zA-Z0-9_$-]*",
"name": "entity.other.attribute-name.marko"
},
{
"begin": "/(?!/)",
"beginCaptures": {
"0": {
"name": "punctuation.separator.key-value.marko"
}
},
"comment": "Variable for a tag",
"contentName": "source.js",
"end": "(?=:?\\=|\\s|>|$|\\||\\(|/)",
"name": "meta.embedded.js",
"patterns": [
{
"comment": "Match identifiers",
"match": "[a-zA-Z$_][0-9a-zA-Z$_]*",
"name": "variable.other.constant.object.js"
},
{
"include": "source.js#object-binding-pattern"
},
{
"include": "source.js#array-binding-pattern"
},
{
"include": "source.js#var-single-variable"
},
{
"include": "#javascript-expression"
}
]
},
{
"applyEndPatternLast": 1,
"begin": "\\s*(:?=)\\s*",
"beginCaptures": {
"1": {
"patterns": [
{
"include": "source.js"
}
]
}
},
"comment": "Default attribute value",
"contentName": "source.js",
"end": "(?=.|$)",
"name": "meta.embedded.js",
"patterns": [
{
"include": "#javascript-expression"
}
]
},
{
"begin": "\\|",
"beginCaptures": {
"0": {
"name": "punctuation.section.scope.begin.marko"
}
},
"comment": "Parameters for a tag",
"end": "\\|",
"endCaptures": {
"0": {
"name": "punctuation.section.scope.end.marko"
}
},
"patterns": [
{
"include": "source.js#function-parameters-body"
},
{
"include": "source.js"
}
]
},
{
"include": "#html-args-or-method"
}
]
},
"tag-html": {
"comment": "Matches an HTML tag and its contents",
"patterns": [
{
"begin": "\\s*(<)(?=(area|base|br|col|embed|hr|img|input|link|meta|param|source|track|wbr)\\b)",
"beginCaptures": {
"1": {
"name": "punctuation.definition.tag.end.marko"
}
},
"comment": "HTML void elements",
"end": "/?>",
"endCaptures": {
"0": {
"name": "punctuation.definition.tag.end.marko"
}
},
"patterns": [
{
"include": "#open-tag-content"
}
]
},
{
"begin": "\\s*(<)(?=style\\.less\\b)",
"beginCaptures": {
"1": {
"name": "punctuation.definition.tag.begin.marko"
}
},
"comment": "HTML style tag with less",
"end": "/>|(?<=\\>)",
"endCaptures": {
"0": {
"name": "punctuation.definition.tag.end.marko"
}
},
"patterns": [
{
"include": "#open-tag-content"
},
{
"begin": ">",
"beginCaptures": {
"0": {
"name": "punctuation.definition.tag.end.marko"
}
},
"comment": "Style body content",
"contentName": "source.less",
"end": "\\s*(</)(style)?(>)",
"endCaptures": {
"1": {
"name": "punctuation.definition.tag.end.marko"
},
"2": {
"patterns": [
{
"include": "#tag-name"
}
]
},
"3": {
"name": "punctuation.definition.tag.end.marko"
}
},
"patterns": [
{
"include": "#content-embedded-style-less"
}
]
}
]
},
{
"begin": "\\s*(<)(?=style\\.scss\\b)",
"beginCaptures": {
"1": {
"name": "punctuation.definition.tag.begin.marko"
}
},
"comment": "HTML style tag with scss",
"end": "/>|(?<=\\>)",
"endCaptures": {
"0": {
"name": "punctuation.definition.tag.end.marko"
}
},
"patterns": [
{
"include": "#open-tag-content"
},
{
"begin": ">",
"beginCaptures": {
"0": {
"name": "punctuation.definition.tag.end.marko"
}
},
"comment": "Style body content",
"contentName": "source.less",
"end": "\\s*(</)(style)?(>)",
"endCaptures": {
"1": {
"name": "punctuation.definition.tag.end.marko"
},
"2": {
"patterns": [
{
"include": "#tag-name"
}
]
},
"3": {
"name": "punctuation.definition.tag.end.marko"
}
},
"patterns": [
{
"include": "#content-embedded-style-scss"
}
]
}
]
},
{
"begin": "\\s*(<)(?=style\\b)",
"beginCaptures": {
"1": {
"name": "punctuation.definition.tag.begin.marko"
}
},
"comment": "HTML style tag",
"end": "/>|(?<=\\>)",
"endCaptures": {
"0": {
"name": "punctuation.definition.tag.end.marko"
}
},
"patterns": [
{
"include": "#open-tag-content"
},
{
"begin": ">",
"beginCaptures": {
"0": {
"name": "punctuation.definition.tag.end.marko"
}
},
"comment": "Style body content",
"contentName": "source.css",
"end": "\\s*(</)(style)?(>)",
"endCaptures": {
"1": {
"name": "punctuation.definition.tag.end.marko"
},
"2": {
"patterns": [
{
"include": "#tag-name"
}
]
},
"3": {
"name": "punctuation.definition.tag.end.marko"
}
},
"patterns": [
{
"include": "#content-embedded-style"
}
]
}
]
},
{
"begin": "\\s*(<)(?=script\\b)",
"beginCaptures": {
"1": {
"name": "punctuation.definition.tag.begin.marko"
}
},
"comment": "HTML script tag",
"end": "/>|(?<=\\>)",
"endCaptures": {
"0": {
"name": "punctuation.definition.tag.end.marko"
}
},
"patterns": [
{
"include": "#open-tag-content"
},
{
"begin": ">",
"beginCaptures": {
"0": {
"name": "punctuation.definition.tag.end.marko"
}
},
"comment": "Script body content",
"contentName": "source.js",
"end": "\\s*(</)(script)?(>)",
"endCaptures": {
"1": {
"name": "punctuation.definition.tag.end.marko"
},
"2": {
"patterns": [
{
"include": "#tag-name"
}
]
},
"3": {
"name": "punctuation.definition.tag.end.marko"
}
},
"patterns": [
{
"include": "#content-embedded-script"
}
]
}
]
},
{
"begin": "\\s*(<)(?=[a-zA-Z0-9_$@])",
"beginCaptures": {
"1": {
"name": "punctuation.definition.tag.begin.marko"
}
},
"comment": "HTML normal tag",
"end": "/>|(?<=\\>)",
"endCaptures": {
"0": {
"name": "punctuation.definition.tag.end.marko"
}
},
"patterns": [
{
"include": "#open-tag-content"
},
{
"begin": ">",
"beginCaptures": {
"0": {
"name": "punctuation.definition.tag.end.marko"
}
},
"comment": "Body content",
"end": "\\s*(</)([a-zA-Z0-9_$:@-]+)?(.*?)(>)",
"endCaptures": {
"1": {
"name": "punctuation.definition.tag.end.marko"
},
"2": {
"patterns": [
{
"include": "#tag-name"
}
]
},
"3": {
"patterns": [
{
"include": "#invalid"
}
]
},
"4": {
"name": "punctuation.definition.tag.end.marko"
}
},
"patterns": [
{
"include": "#content-html-mode"
}
]
}
]
}
]
},
"tag-name": {
"patterns": [
{
"begin": "\\${",
"beginCaptures": {
"0": {
"name": "punctuation.definition.template-expression.begin.js"
}
},
"comment": "Dynamic tag.",
"end": "}",
"endCaptures": {
"0": {
"name": "punctuation.definition.template-expression.end.js"
}
},
"patterns": [
{
"include": "source.js"
}
]
},
{
"captures": {
"1": {
"name": "entity.name.tag.marko"
},
"2": {
"name": "storage.type.marko.css"
},
"3": {
"patterns": [
{
"comment": "Core tag.",
"match": "(attrs|return|import)(?=\\b)",
"name": "support.type.builtin.marko"
},
{
"comment": "Core tag.",
"match": "(for|if|while|else-if|else|macro|tag|await|let|const|effect|set|get|id|lifecycle)(?=\\b)",
"name": "support.function.marko"
},
{
"comment": "Attribute tag.",
"match": "@.+",
"name": "entity.other.attribute-name.marko"
},
{
"comment": "Native or userland tag.",
"match": ".+",
"name": "entity.name.tag.marko"
}
]
}
},
"match": "(style)\\.([a-zA-Z0-9$_-]+(?:\\.[a-zA-Z0-9$_-]+)*)|([a-zA-Z0-9_$@][a-zA-Z0-9_$@:-]*)"
}
]
}
},
"scopeName": "text.marko",
"uuid": "46c8c3f8-cabe-466a-a633-5deafdc51253"
} | true | {
"fileTypes": [
"marko"
],
"name": "PI:NAME:<NAME>END_PI",
"patterns": [
{
"begin": "^\\s*(style)\\s+(\\{)",
"beginCaptures": {
"1": {
"name": "storage.type.marko.css"
},
"2": {
"name": "punctuation.section.scope.begin.marko.css"
}
},
"comment": "CSS style block, eg: style { color: green }",
"contentName": "source.css",
"end": "\\}",
"endCaptures": {
"0": {
"name": "punctuation.section.scope.end.marko.css"
}
},
"name": "meta.embedded.css",
"patterns": [
{
"include": "source.css"
}
]
},
{
"begin": "^\\s*(style)\\.(less)\\s+(\\{)",
"beginCaptures": {
"1": {
"name": "storage.type.marko.css"
},
"2": {
"name": "storage.modifier.marko.css"
},
"3": {
"name": "punctuation.section.scope.begin.marko.css"
}
},
"comment": "Less style block, eg: style.less { color: green }",
"contentName": "source.less",
"end": "\\}",
"endCaptures": {
"0": {
"name": "punctuation.section.scope.end.marko.css"
}
},
"name": "meta.embedded.less",
"patterns": [
{
"include": "source.css.less"
}
]
},
{
"begin": "^\\s*(style)\\.(scss)\\s+(\\{)",
"beginCaptures": {
"1": {
"name": "storage.type.marko.css"
},
"2": {
"name": "storage.modifier.marko.css"
},
"3": {
"name": "punctuation.section.scope.begin.marko.css"
}
},
"comment": "SCSS style block, eg: style.scss { color: green }",
"contentName": "source.scss",
"end": "\\}",
"endCaptures": {
"0": {
"name": "punctuation.section.scope.end.marko.css"
}
},
"name": "meta.embedded.scss",
"patterns": [
{
"include": "source.css.scss"
}
]
},
{
"begin": "^\\s*(?:(static )|(?=(?:class|import|export) ))",
"beginCaptures": {
"1": {
"name": "keyword.control.static.marko"
}
},
"comment": "Top level blocks parsed as JavaScript",
"contentName": "source.js",
"end": "(?=\\n|$)",
"name": "meta.embedded.js",
"patterns": [
{
"include": "#javascript-statement"
}
]
},
{
"include": "#content-concise-mode"
}
],
"repository": {
"attrs": {
"patterns": [
{
"applyEndPatternLast": 1,
"begin": "(?:\\s+|,)(?:(key|on[a-zA-Z0-9_$-]+|[a-zA-Z0-9_$]+Change|no-update(?:-body)?(?:-if)?)|([a-zA-Z0-9_$][a-zA-Z0-9_$-]*))(:[a-zA-Z0-9_$][a-zA-Z0-9_$-]*)?",
"beginCaptures": {
"1": {
"name": "support.type.attribute-name.marko"
},
"2": {
"name": "entity.other.attribute-name.marko"
},
"3": {
"name": "support.function.attribute-name.marko"
}
},
"comment": "Attribute with optional value",
"end": "(?=.|$)",
"name": "meta.marko-attribute",
"patterns": [
{
"include": "#html-args-or-method"
},
{
"applyEndPatternLast": 1,
"begin": "\\s*(:?=)\\s*",
"beginCaptures": {
"1": {
"patterns": [
{
"include": "source.js"
}
]
}
},
"comment": "Attribute value",
"contentName": "source.js",
"end": "(?=.|$)",
"name": "meta.embedded.js",
"patterns": [
{
"include": "#javascript-expression"
}
]
}
]
},
{
"applyEndPatternLast": 1,
"begin": "(?:\\s+|,)\\.\\.\\.",
"beginCaptures": {
"1": {
"name": "keyword.operator.spread.marko"
}
},
"comment": "A ...spread attribute",
"contentName": "source.js",
"end": "(?=.|$)",
"name": "meta.marko-spread-attribute",
"patterns": [
{
"include": "#javascript-expression"
}
]
},
{
"begin": "\\s*(,(?!,))",
"captures": {
"1": {
"patterns": [
{
"include": "source.js"
}
]
}
},
"comment": "Consume any whitespace after a comma",
"end": "(?!\\S)"
},
{
"include": "#javascript-comment-multiline"
},
{
"include": "#invalid"
}
]
},
"concise-html-block": {
"begin": "\\s*(--+)\\s*$",
"beginCaptures": {
"2": {
"name": "punctuation.section.scope.begin.marko"
}
},
"comment": "--- HTML block within concise mode content. ---",
"end": "\\1",
"endCaptures": {
"1": {
"name": "punctuation.section.scope.end.marko"
}
},
"name": "meta.section.marko-html-block",
"patterns": [
{
"include": "#content-html-mode"
}
]
},
"concise-html-line": {
"captures": {
"1": {
"name": "punctuation.section.scope.begin.marko"
},
"2": {
"patterns": [
{
"include": "#html-comments"
},
{
"include": "#tag-html"
},
{
"match": "\\\\.",
"name": "string"
},
{
"include": "#placeholder"
},
{
"match": ".+?",
"name": "string"
}
]
}
},
"comment": "-- HTML line within concise mode content. (content-html-mode w/o scriptlet)",
"match": "\\s*(--+)(?=\\s+\\S)(.*$)",
"name": "meta.section.marko-html-line"
},
"concise-open-tag-content": {
"patterns": [
{
"include": "#tag-before-attrs"
},
{
"begin": "\\s*\\[",
"beginCaptures": {
"0": {
"name": "punctuation.section.scope.begin.marko"
}
},
"end": "]",
"endCaptures": {
"0": {
"name": "punctuation.section.scope.end.marko"
}
},
"patterns": [
{
"include": "#attrs"
},
{
"include": "#invalid"
}
]
},
{
"begin": "(?!^)(?= )",
"end": "(?=--)|(?<!,)(?=\\n)",
"patterns": [
{
"include": "#attrs"
},
{
"include": "#invalid"
}
]
}
]
},
"concise-script-block": {
"begin": "(\\s+)(--+)\\s*$",
"beginCaptures": {
"2": {
"name": "punctuation.section.scope.begin.marko"
}
},
"comment": "--- Embedded concise script content block. ---",
"end": "(\\2)|(?=^(?!\\1)\\s*\\S)",
"endCaptures": {
"1": {
"name": "punctuation.section.scope.end.marko"
}
},
"name": "meta.section.marko-script-block",
"patterns": [
{
"include": "#content-embedded-script"
}
]
},
"concise-script-line": {
"applyEndPatternLast": 1,
"begin": "\\s*(--+)",
"beginCaptures": {
"1": {
"name": "punctuation.section.scope.begin.marko"
}
},
"comment": "-- Embedded concise script content line.",
"end": "$",
"name": "meta.section.marko-script-line",
"patterns": [
{
"include": "#content-embedded-script"
}
]
},
"concise-style-block": {
"begin": "(\\s+)(--+)\\s*$",
"beginCaptures": {
"2": {
"name": "punctuation.section.scope.begin.marko"
}
},
"comment": "--- Embedded concise style content block. ---",
"contentName": "source.css",
"end": "(\\2)|(?=^(?!\\1)\\s*\\S)",
"endCaptures": {
"1": {
"name": "punctuation.section.scope.end.marko"
}
},
"name": "meta.section.marko-style-block",
"patterns": [
{
"include": "#content-embedded-style"
}
]
},
"concise-style-block-less": {
"begin": "(\\s+)(--+)\\s*$",
"beginCaptures": {
"2": {
"name": "punctuation.section.scope.begin.marko"
}
},
"comment": "--- Embedded concise style content block. ---",
"contentName": "source.less",
"end": "(\\2)|(?=^(?!\\1)\\s*\\S)",
"endCaptures": {
"1": {
"name": "punctuation.section.scope.end.marko"
}
},
"name": "meta.section.marko-style-block",
"patterns": [
{
"include": "#content-embedded-style-less"
}
]
},
"concise-style-block-scss": {
"begin": "(\\s+)(--+)\\s*$",
"beginCaptures": {
"2": {
"name": "punctuation.section.scope.begin.marko"
}
},
"comment": "--- Embedded concise style content block. ---",
"contentName": "source.scss",
"end": "(\\2)|(?=^(?!\\1)\\s*\\S)",
"endCaptures": {
"1": {
"name": "punctuation.section.scope.end.marko"
}
},
"name": "meta.section.marko-style-block",
"patterns": [
{
"include": "#content-embedded-style-scss"
}
]
},
"concise-style-line": {
"applyEndPatternLast": 1,
"begin": "\\s*(--+)",
"beginCaptures": {
"1": {
"name": "punctuation.section.scope.begin.marko"
}
},
"comment": "-- Embedded concise style content line.",
"contentName": "source.css",
"end": "$",
"name": "meta.section.marko-style-line",
"patterns": [
{
"include": "#content-embedded-style"
}
]
},
"concise-style-line-less": {
"applyEndPatternLast": 1,
"begin": "\\s*(--+)",
"beginCaptures": {
"1": {
"name": "punctuation.section.scope.begin.marko"
}
},
"comment": "-- Embedded concise style content line.",
"contentName": "source.less",
"end": "$",
"name": "meta.section.marko-style-line",
"patterns": [
{
"include": "#content-embedded-style-less"
}
]
},
"concise-style-line-scss": {
"applyEndPatternLast": 1,
"begin": "\\s*(--+)",
"beginCaptures": {
"1": {
"name": "punctuation.section.scope.begin.marko"
}
},
"comment": "-- Embedded concise style content line.",
"contentName": "source.scss",
"end": "$",
"name": "meta.section.marko-style-line",
"patterns": [
{
"include": "#content-embedded-style-scss"
}
]
},
"content-concise-mode": {
"comment": "Concise mode content block.",
"name": "meta.marko-concise-content",
"patterns": [
{
"include": "#scriptlet"
},
{
"include": "#javascript-comments"
},
{
"include": "#html-comments"
},
{
"include": "#concise-html-block"
},
{
"include": "#concise-html-line"
},
{
"include": "#tag-html"
},
{
"comment": "A concise html tag.",
"patterns": [
{
"begin": "^(\\s*)(?=style\\.less\\b)",
"comment": "Concise style tag less",
"patterns": [
{
"include": "#concise-open-tag-content"
},
{
"include": "#concise-style-block-less"
},
{
"include": "#concise-style-line-less"
}
],
"while": "(?=^\\1\\s+(\\S|$))"
},
{
"begin": "^(\\s*)(?=style\\.scss\\b)",
"comment": "Concise style tag scss",
"patterns": [
{
"include": "#concise-open-tag-content"
},
{
"include": "#concise-style-block-scss"
},
{
"include": "#concise-style-line-scss"
}
],
"while": "(?=^\\1\\s+(\\S|$))"
},
{
"begin": "^(\\s*)(?=style\\b)",
"comment": "Concise style tag",
"patterns": [
{
"include": "#concise-open-tag-content"
},
{
"include": "#concise-style-block"
},
{
"include": "#concise-style-line"
}
],
"while": "(?=^\\1\\s+(\\S|$))"
},
{
"begin": "^(\\s*)(?=script\\b)",
"comment": "Concise script tag",
"patterns": [
{
"include": "#concise-open-tag-content"
},
{
"include": "#concise-script-block"
},
{
"include": "#concise-script-line"
}
],
"while": "(?=^\\1\\s+(\\S|$))"
},
{
"begin": "^(\\s*)(?=[a-zA-Z0-9_$@])",
"comment": "Normal concise tag",
"patterns": [
{
"include": "#concise-open-tag-content"
},
{
"include": "#content-concise-mode"
}
],
"while": "(?=^\\1\\s+(\\S|$))"
}
]
},
{
"include": "#invalid"
}
]
},
"content-embedded-script": {
"name": "meta.embedded.js",
"patterns": [
{
"include": "#placeholder"
},
{
"include": "source.js"
}
]
},
"content-embedded-style": {
"name": "meta.embedded.css",
"patterns": [
{
"include": "#placeholder"
},
{
"include": "source.css"
}
]
},
"content-embedded-style-less": {
"name": "meta.embedded.css.less",
"patterns": [
{
"include": "#placeholder"
},
{
"include": "source.css.less"
}
]
},
"content-embedded-style-scss": {
"name": "meta.embedded.css.scss",
"patterns": [
{
"include": "#placeholder"
},
{
"include": "source.css.scss"
}
]
},
"content-html-mode": {
"comment": "HTML mode content block.",
"patterns": [
{
"include": "#scriptlet"
},
{
"include": "#html-comments"
},
{
"include": "#tag-html"
},
{
"match": "\\\\.",
"name": "string"
},
{
"include": "#placeholder"
},
{
"match": ".+?",
"name": "string"
}
]
},
"html-args-or-method": {
"patterns": [
{
"include": "#javascript-args"
},
{
"begin": "(?<=\\))\\s*(?=\\{)",
"comment": "Attribute method shorthand following parens",
"contentName": "source.js",
"end": "(?<=\\})",
"name": "meta.embedded.js",
"patterns": [
{
"include": "source.js"
}
]
}
]
},
"html-comments": {
"patterns": [
{
"begin": "\\s*(<!(--)?)",
"beginCaptures": {
"1": {
"name": "punctuation.definition.comment.marko"
}
},
"comment": "HTML comments, doctypes & cdata",
"end": "\\2>",
"endCaptures": {
"0": {
"name": "punctuation.definition.comment.marko"
}
},
"name": "comment.block.marko"
},
{
"begin": "\\s*(<html-comment>)",
"beginCaptures": {
"1": {
"name": "punctuation.definition.comment.marko"
}
},
"comment": "Preserved HTML comment tag",
"end": "</html-comment>",
"endCaptures": {
"0": {
"name": "punctuation.definition.comment.marko"
}
},
"name": "comment.block.marko"
}
]
},
"invalid": {
"match": "[^\\s]",
"name": "invalid.illegal.character-not-allowed-here.marko"
},
"javascript-args": {
"begin": "(?=\\()",
"comment": "Javascript style arguments",
"contentName": "source.js",
"end": "(?<=\\))",
"name": "meta.embedded.js",
"patterns": [
{
"include": "source.js"
}
]
},
"javascript-comment-line": {
"captures": {
"0": {
"patterns": [
{
"include": "source.js"
}
]
}
},
"comment": "JavaScript // single line comment",
"contentName": "source.js",
"match": "\\s*//.*$"
},
"javascript-comment-multiline": {
"begin": "\\s*(?=/\\*)",
"comment": "JavaScript /* block comment */",
"contentName": "source.js",
"end": "(?<=\\*/)",
"patterns": [
{
"include": "source.js"
}
]
},
"javascript-comments": {
"patterns": [
{
"include": "#javascript-comment-multiline"
},
{
"include": "#javascript-comment-line"
}
]
},
"javascript-enclosed": {
"comment": "Matches JavaScript content and ensures enclosed blocks are matched.",
"patterns": [
{
"include": "#javascript-comments"
},
{
"include": "#javascript-args"
},
{
"begin": "(?={)",
"end": "(?<=})",
"patterns": [
{
"include": "source.js"
}
]
},
{
"begin": "(?=\\[)",
"end": "(?<=])",
"patterns": [
{
"include": "source.js"
}
]
},
{
"begin": "(?=\")",
"end": "(?<=\")",
"patterns": [
{
"include": "source.js"
}
]
},
{
"begin": "(?=')",
"end": "(?<=')",
"patterns": [
{
"include": "source.js"
}
]
},
{
"begin": "(?=`)",
"end": "(?<=`)",
"patterns": [
{
"include": "source.js"
}
]
},
{
"begin": "/(?!<[\\]})A-Z0-9.<%]\\s*/)(?!/?>|$)",
"captures": {
"0": {
"name": "string.regexp.js"
}
},
"contentName": "source.js",
"end": "/[gimsuy]*",
"patterns": [
{
"include": "source.js#regexp"
},
{
"include": "source.js"
}
]
},
{
"begin": "(?x)\\s*(?:\n\t\t\t\t\t\t\t\t(?:\\b(?:new|typeof|instanceof|in)\\b)| # Keyword operators\n\t\t\t\t\t\t\t\t\\&\\&|\\|\\|| # Logical operators\n\t\t\t\t\t\t\t\t[\\^|&]| # Bitwise operators\n\t\t\t\t\t\t\t\t[!=]=|[!=]==|<|<[=<]|=>| # Comparison operators (Note you cannot use * or ? here)\n\t\t\t\t\t\t\t\t[?:]| # Ternary operators\n\t\t\t\t\t\t\t\t[-+*%](?!-) # Arithmetic operators\n\t\t\t\t\t\t\t)",
"captures": {
"0": {
"patterns": [
{
"include": "source.js"
}
]
}
},
"end": "(?=\\S)"
}
]
},
"javascript-expression": {
"patterns": [
{
"include": "#javascript-enclosed"
},
{
"captures": {
"0": {
"patterns": [
{
"include": "source.js"
}
]
}
},
"comment": "Match identifiers and member expressions",
"match": "[0-9a-zA-Z$_.]+"
}
]
},
"javascript-statement": {
"patterns": [
{
"include": "#javascript-enclosed"
},
{
"include": "source.js"
}
]
},
"open-tag-content": {
"patterns": [
{
"include": "#tag-before-attrs"
},
{
"begin": "(?= )",
"comment": "Attributes begin after the first space within the tag name",
"end": "(?=/?>)",
"patterns": [
{
"include": "#attrs"
}
]
}
]
},
"placeholder": {
"begin": "\\$!?{",
"beginCaptures": {
"0": {
"name": "punctuation.definition.template-expression.begin.js"
}
},
"comment": "${ } placeholder",
"contentName": "source.js",
"end": "}",
"endCaptures": {
"0": {
"name": "punctuation.definition.template-expression.end.js"
}
},
"patterns": [
{
"include": "source.js"
}
]
},
"scriptlet": {
"begin": "^\\s*(\\$)\\s+",
"beginCaptures": {
"1": {
"name": "keyword.control.scriptlet.marko"
}
},
"comment": "An inline JavaScript scriptlet.",
"contentName": "source.js",
"end": "$",
"name": "meta.embedded.js",
"patterns": [
{
"include": "#javascript-statement"
}
]
},
"tag-before-attrs": {
"comment": "Everything in a tag before the attributes content",
"patterns": [
{
"include": "#tag-name"
},
{
"comment": "Shorthand class or ID attribute",
"match": "[#.][a-zA-Z0-9_$][a-zA-Z0-9_$-]*",
"name": "entity.other.attribute-name.marko"
},
{
"begin": "/(?!/)",
"beginCaptures": {
"0": {
"name": "punctuation.separator.key-value.marko"
}
},
"comment": "Variable for a tag",
"contentName": "source.js",
"end": "(?=:?\\=|\\s|>|$|\\||\\(|/)",
"name": "meta.embedded.js",
"patterns": [
{
"comment": "Match identifiers",
"match": "[a-zA-Z$_][0-9a-zA-Z$_]*",
"name": "variable.other.constant.object.js"
},
{
"include": "source.js#object-binding-pattern"
},
{
"include": "source.js#array-binding-pattern"
},
{
"include": "source.js#var-single-variable"
},
{
"include": "#javascript-expression"
}
]
},
{
"applyEndPatternLast": 1,
"begin": "\\s*(:?=)\\s*",
"beginCaptures": {
"1": {
"patterns": [
{
"include": "source.js"
}
]
}
},
"comment": "Default attribute value",
"contentName": "source.js",
"end": "(?=.|$)",
"name": "meta.embedded.js",
"patterns": [
{
"include": "#javascript-expression"
}
]
},
{
"begin": "\\|",
"beginCaptures": {
"0": {
"name": "punctuation.section.scope.begin.marko"
}
},
"comment": "Parameters for a tag",
"end": "\\|",
"endCaptures": {
"0": {
"name": "punctuation.section.scope.end.marko"
}
},
"patterns": [
{
"include": "source.js#function-parameters-body"
},
{
"include": "source.js"
}
]
},
{
"include": "#html-args-or-method"
}
]
},
"tag-html": {
"comment": "Matches an HTML tag and its contents",
"patterns": [
{
"begin": "\\s*(<)(?=(area|base|br|col|embed|hr|img|input|link|meta|param|source|track|wbr)\\b)",
"beginCaptures": {
"1": {
"name": "punctuation.definition.tag.end.marko"
}
},
"comment": "HTML void elements",
"end": "/?>",
"endCaptures": {
"0": {
"name": "punctuation.definition.tag.end.marko"
}
},
"patterns": [
{
"include": "#open-tag-content"
}
]
},
{
"begin": "\\s*(<)(?=style\\.less\\b)",
"beginCaptures": {
"1": {
"name": "punctuation.definition.tag.begin.marko"
}
},
"comment": "HTML style tag with less",
"end": "/>|(?<=\\>)",
"endCaptures": {
"0": {
"name": "punctuation.definition.tag.end.marko"
}
},
"patterns": [
{
"include": "#open-tag-content"
},
{
"begin": ">",
"beginCaptures": {
"0": {
"name": "punctuation.definition.tag.end.marko"
}
},
"comment": "Style body content",
"contentName": "source.less",
"end": "\\s*(</)(style)?(>)",
"endCaptures": {
"1": {
"name": "punctuation.definition.tag.end.marko"
},
"2": {
"patterns": [
{
"include": "#tag-name"
}
]
},
"3": {
"name": "punctuation.definition.tag.end.marko"
}
},
"patterns": [
{
"include": "#content-embedded-style-less"
}
]
}
]
},
{
"begin": "\\s*(<)(?=style\\.scss\\b)",
"beginCaptures": {
"1": {
"name": "punctuation.definition.tag.begin.marko"
}
},
"comment": "HTML style tag with scss",
"end": "/>|(?<=\\>)",
"endCaptures": {
"0": {
"name": "punctuation.definition.tag.end.marko"
}
},
"patterns": [
{
"include": "#open-tag-content"
},
{
"begin": ">",
"beginCaptures": {
"0": {
"name": "punctuation.definition.tag.end.marko"
}
},
"comment": "Style body content",
"contentName": "source.less",
"end": "\\s*(</)(style)?(>)",
"endCaptures": {
"1": {
"name": "punctuation.definition.tag.end.marko"
},
"2": {
"patterns": [
{
"include": "#tag-name"
}
]
},
"3": {
"name": "punctuation.definition.tag.end.marko"
}
},
"patterns": [
{
"include": "#content-embedded-style-scss"
}
]
}
]
},
{
"begin": "\\s*(<)(?=style\\b)",
"beginCaptures": {
"1": {
"name": "punctuation.definition.tag.begin.marko"
}
},
"comment": "HTML style tag",
"end": "/>|(?<=\\>)",
"endCaptures": {
"0": {
"name": "punctuation.definition.tag.end.marko"
}
},
"patterns": [
{
"include": "#open-tag-content"
},
{
"begin": ">",
"beginCaptures": {
"0": {
"name": "punctuation.definition.tag.end.marko"
}
},
"comment": "Style body content",
"contentName": "source.css",
"end": "\\s*(</)(style)?(>)",
"endCaptures": {
"1": {
"name": "punctuation.definition.tag.end.marko"
},
"2": {
"patterns": [
{
"include": "#tag-name"
}
]
},
"3": {
"name": "punctuation.definition.tag.end.marko"
}
},
"patterns": [
{
"include": "#content-embedded-style"
}
]
}
]
},
{
"begin": "\\s*(<)(?=script\\b)",
"beginCaptures": {
"1": {
"name": "punctuation.definition.tag.begin.marko"
}
},
"comment": "HTML script tag",
"end": "/>|(?<=\\>)",
"endCaptures": {
"0": {
"name": "punctuation.definition.tag.end.marko"
}
},
"patterns": [
{
"include": "#open-tag-content"
},
{
"begin": ">",
"beginCaptures": {
"0": {
"name": "punctuation.definition.tag.end.marko"
}
},
"comment": "Script body content",
"contentName": "source.js",
"end": "\\s*(</)(script)?(>)",
"endCaptures": {
"1": {
"name": "punctuation.definition.tag.end.marko"
},
"2": {
"patterns": [
{
"include": "#tag-name"
}
]
},
"3": {
"name": "punctuation.definition.tag.end.marko"
}
},
"patterns": [
{
"include": "#content-embedded-script"
}
]
}
]
},
{
"begin": "\\s*(<)(?=[a-zA-Z0-9_$@])",
"beginCaptures": {
"1": {
"name": "punctuation.definition.tag.begin.marko"
}
},
"comment": "HTML normal tag",
"end": "/>|(?<=\\>)",
"endCaptures": {
"0": {
"name": "punctuation.definition.tag.end.marko"
}
},
"patterns": [
{
"include": "#open-tag-content"
},
{
"begin": ">",
"beginCaptures": {
"0": {
"name": "punctuation.definition.tag.end.marko"
}
},
"comment": "Body content",
"end": "\\s*(</)([a-zA-Z0-9_$:@-]+)?(.*?)(>)",
"endCaptures": {
"1": {
"name": "punctuation.definition.tag.end.marko"
},
"2": {
"patterns": [
{
"include": "#tag-name"
}
]
},
"3": {
"patterns": [
{
"include": "#invalid"
}
]
},
"4": {
"name": "punctuation.definition.tag.end.marko"
}
},
"patterns": [
{
"include": "#content-html-mode"
}
]
}
]
}
]
},
"tag-name": {
"patterns": [
{
"begin": "\\${",
"beginCaptures": {
"0": {
"name": "punctuation.definition.template-expression.begin.js"
}
},
"comment": "Dynamic tag.",
"end": "}",
"endCaptures": {
"0": {
"name": "punctuation.definition.template-expression.end.js"
}
},
"patterns": [
{
"include": "source.js"
}
]
},
{
"captures": {
"1": {
"name": "entity.name.tag.marko"
},
"2": {
"name": "storage.type.marko.css"
},
"3": {
"patterns": [
{
"comment": "Core tag.",
"match": "(attrs|return|import)(?=\\b)",
"name": "support.type.builtin.marko"
},
{
"comment": "Core tag.",
"match": "(for|if|while|else-if|else|macro|tag|await|let|const|effect|set|get|id|lifecycle)(?=\\b)",
"name": "support.function.marko"
},
{
"comment": "Attribute tag.",
"match": "@.+",
"name": "entity.other.attribute-name.marko"
},
{
"comment": "Native or userland tag.",
"match": ".+",
"name": "entity.name.tag.marko"
}
]
}
},
"match": "(style)\\.([a-zA-Z0-9$_-]+(?:\\.[a-zA-Z0-9$_-]+)*)|([a-zA-Z0-9_$@][a-zA-Z0-9_$@:-]*)"
}
]
}
},
"scopeName": "text.marko",
"uuid": "46c8c3f8-cabe-466a-a633-5deafdc51253"
} |
[
{
"context": "|fs-nt)', '#REQUIRED')\n .dat('<owner>John</owner>')\n .ele('node')\n ",
"end": 1482,
"score": 0.6640788316726685,
"start": 1478,
"tag": "NAME",
"value": "John"
},
{
"context": "|fs-nt)', '#REQUIRED')\n .dat('<owner>John... | test/basic/doctype.coffee | tomhughes/xmlbuilder-js | 0 | suite 'Document Type Declaration:', ->
test 'SYSTEM dtd from create()', ->
eq(
xml('root', { sysID: 'hello.dtd' }).ele('node').txt('test').end()
'<?xml version="1.0"?><!DOCTYPE root SYSTEM "hello.dtd"><root><node>test</node></root>'
)
test 'Public dtd from create()', ->
eq(
xml('HTML', {
pubID: '-//W3C//DTD HTML 4.01//EN'
sysID: 'http://www.w3.org/TR/html4/strict.dtd'
}).end()
'<?xml version="1.0"?>' +
'<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01//EN" ' +
'"http://www.w3.org/TR/html4/strict.dtd">' +
'<HTML/>'
)
test 'Empty dtd from create()', ->
eq(
xml('root', { sysID: '' }).ele('node').txt('test').end()
'<?xml version="1.0"?><!DOCTYPE root><root><node>test</node></root>'
)
test 'Internal and external dtd', ->
eq(
xml('root')
.dtd('hello.dtd')
.ins('pub_border', 'thin')
.ele('img', 'EMPTY')
.com('Image attributes follow')
.att('img', 'height', 'CDATA', '#REQUIRED')
.att('img', 'visible', '(yes|no)', '#DEFAULT', "yes")
.not('fs', { sysID: 'http://my.fs.com/reader' })
.not('fs-nt', { pubID: 'FS Network Reader 1.0' })
.not('fs-nt', { pubID: 'FS Network Reader 1.0', sysID: 'http://my.fs.com/reader' })
.att('img', 'src', 'NOTATION (fs|fs-nt)', '#REQUIRED')
.dat('<owner>John</owner>')
.ele('node')
.ent('ent', 'my val')
.ent('ent', { sysID: 'http://www.myspec.com/ent' })
.ent('ent', { pubID: '-//MY//SPEC ENT//EN', sysID: 'http://www.myspec.com/ent' })
.ent('ent', { sysID: 'http://www.myspec.com/ent', nData: 'entprg' })
.ent('ent', { pubID: '-//MY//SPEC ENT//EN', sysID: 'http://www.myspec.com/ent', nData: 'entprg' })
.pent('ent', 'my val')
.pent('ent', { sysID: 'http://www.myspec.com/ent' })
.pent('ent', { pubID: '-//MY//SPEC ENT//EN', sysID: 'http://www.myspec.com/ent' })
.ele('nodearr', ['a', 'b'])
.root()
.ele('node').txt('test')
.end()
'<?xml version="1.0"?>' +
'<!DOCTYPE root SYSTEM "hello.dtd" [' +
'<?pub_border thin?>' +
'<!ELEMENT img EMPTY>' +
'<!-- Image attributes follow -->' +
'<!ATTLIST img height CDATA #REQUIRED>' +
'<!ATTLIST img visible (yes|no) "yes">' +
'<!NOTATION fs SYSTEM "http://my.fs.com/reader">' +
'<!NOTATION fs-nt PUBLIC "FS Network Reader 1.0">' +
'<!NOTATION fs-nt PUBLIC "FS Network Reader 1.0" "http://my.fs.com/reader">' +
'<!ATTLIST img src NOTATION (fs|fs-nt) #REQUIRED>' +
'<![CDATA[<owner>John</owner>]]>' +
'<!ELEMENT node (#PCDATA)>' +
'<!ENTITY ent "my val">' +
'<!ENTITY ent SYSTEM "http://www.myspec.com/ent">' +
'<!ENTITY ent PUBLIC "-//MY//SPEC ENT//EN" "http://www.myspec.com/ent">' +
'<!ENTITY ent SYSTEM "http://www.myspec.com/ent" NDATA entprg>' +
'<!ENTITY ent PUBLIC "-//MY//SPEC ENT//EN" "http://www.myspec.com/ent" NDATA entprg>' +
'<!ENTITY % ent "my val">' +
'<!ENTITY % ent SYSTEM "http://www.myspec.com/ent">' +
'<!ENTITY % ent PUBLIC "-//MY//SPEC ENT//EN" "http://www.myspec.com/ent">' +
'<!ELEMENT nodearr (a,b)>' +
']>' +
'<root><node>test</node></root>'
)
test 'Internal and external dtd (pretty print)', ->
eq(
xml('root')
.dtd('hello.dtd')
.ins('pub_border', 'thin')
.ele('img', 'EMPTY')
.com('Image attributes follow')
.att('img', 'height', 'CDATA', '#REQUIRED')
.att('img', 'visible', '(yes|no)', '#DEFAULT', "yes")
.not('fs', { sysID: 'http://my.fs.com/reader' })
.not('fs-nt', { pubID: 'FS Network Reader 1.0', sysID: 'http://my.fs.com/reader' })
.att('img', 'src', 'NOTATION (fs|fs-nt)', '#REQUIRED')
.dat('<owner>John</owner>')
.ele('node')
.ent('ent', 'my val')
.ent('ent', { sysID: 'http://www.myspec.com/ent' })
.ent('ent', { pubID: '-//MY//SPEC ENT//EN', sysID: 'http://www.myspec.com/ent' })
.ent('ent', { sysID: 'http://www.myspec.com/ent', nData: 'entprg' })
.ent('ent', { pubID: '-//MY//SPEC ENT//EN', sysID: 'http://www.myspec.com/ent', nData: 'entprg' })
.pent('ent', 'my val')
.pent('ent', { sysID: 'http://www.myspec.com/ent' })
.pent('ent', { pubID: '-//MY//SPEC ENT//EN', sysID: 'http://www.myspec.com/ent' })
.root()
.ele('node').txt('test')
.end({ pretty: true })
"""
<?xml version="1.0"?>
<!DOCTYPE root SYSTEM "hello.dtd" [
<?pub_border thin?>
<!ELEMENT img EMPTY>
<!-- Image attributes follow -->
<!ATTLIST img height CDATA #REQUIRED>
<!ATTLIST img visible (yes|no) "yes">
<!NOTATION fs SYSTEM "http://my.fs.com/reader">
<!NOTATION fs-nt PUBLIC "FS Network Reader 1.0" "http://my.fs.com/reader">
<!ATTLIST img src NOTATION (fs|fs-nt) #REQUIRED>
<![CDATA[<owner>John</owner>]]>
<!ELEMENT node (#PCDATA)>
<!ENTITY ent "my val">
<!ENTITY ent SYSTEM "http://www.myspec.com/ent">
<!ENTITY ent PUBLIC "-//MY//SPEC ENT//EN" "http://www.myspec.com/ent">
<!ENTITY ent SYSTEM "http://www.myspec.com/ent" NDATA entprg>
<!ENTITY ent PUBLIC "-//MY//SPEC ENT//EN" "http://www.myspec.com/ent" NDATA entprg>
<!ENTITY % ent "my val">
<!ENTITY % ent SYSTEM "http://www.myspec.com/ent">
<!ENTITY % ent PUBLIC "-//MY//SPEC ENT//EN" "http://www.myspec.com/ent">
]>
<root>
<node>test</node>
</root>
"""
)
| 70161 | suite 'Document Type Declaration:', ->
test 'SYSTEM dtd from create()', ->
eq(
xml('root', { sysID: 'hello.dtd' }).ele('node').txt('test').end()
'<?xml version="1.0"?><!DOCTYPE root SYSTEM "hello.dtd"><root><node>test</node></root>'
)
test 'Public dtd from create()', ->
eq(
xml('HTML', {
pubID: '-//W3C//DTD HTML 4.01//EN'
sysID: 'http://www.w3.org/TR/html4/strict.dtd'
}).end()
'<?xml version="1.0"?>' +
'<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01//EN" ' +
'"http://www.w3.org/TR/html4/strict.dtd">' +
'<HTML/>'
)
test 'Empty dtd from create()', ->
eq(
xml('root', { sysID: '' }).ele('node').txt('test').end()
'<?xml version="1.0"?><!DOCTYPE root><root><node>test</node></root>'
)
test 'Internal and external dtd', ->
eq(
xml('root')
.dtd('hello.dtd')
.ins('pub_border', 'thin')
.ele('img', 'EMPTY')
.com('Image attributes follow')
.att('img', 'height', 'CDATA', '#REQUIRED')
.att('img', 'visible', '(yes|no)', '#DEFAULT', "yes")
.not('fs', { sysID: 'http://my.fs.com/reader' })
.not('fs-nt', { pubID: 'FS Network Reader 1.0' })
.not('fs-nt', { pubID: 'FS Network Reader 1.0', sysID: 'http://my.fs.com/reader' })
.att('img', 'src', 'NOTATION (fs|fs-nt)', '#REQUIRED')
.dat('<owner><NAME></owner>')
.ele('node')
.ent('ent', 'my val')
.ent('ent', { sysID: 'http://www.myspec.com/ent' })
.ent('ent', { pubID: '-//MY//SPEC ENT//EN', sysID: 'http://www.myspec.com/ent' })
.ent('ent', { sysID: 'http://www.myspec.com/ent', nData: 'entprg' })
.ent('ent', { pubID: '-//MY//SPEC ENT//EN', sysID: 'http://www.myspec.com/ent', nData: 'entprg' })
.pent('ent', 'my val')
.pent('ent', { sysID: 'http://www.myspec.com/ent' })
.pent('ent', { pubID: '-//MY//SPEC ENT//EN', sysID: 'http://www.myspec.com/ent' })
.ele('nodearr', ['a', 'b'])
.root()
.ele('node').txt('test')
.end()
'<?xml version="1.0"?>' +
'<!DOCTYPE root SYSTEM "hello.dtd" [' +
'<?pub_border thin?>' +
'<!ELEMENT img EMPTY>' +
'<!-- Image attributes follow -->' +
'<!ATTLIST img height CDATA #REQUIRED>' +
'<!ATTLIST img visible (yes|no) "yes">' +
'<!NOTATION fs SYSTEM "http://my.fs.com/reader">' +
'<!NOTATION fs-nt PUBLIC "FS Network Reader 1.0">' +
'<!NOTATION fs-nt PUBLIC "FS Network Reader 1.0" "http://my.fs.com/reader">' +
'<!ATTLIST img src NOTATION (fs|fs-nt) #REQUIRED>' +
'<![CDATA[<owner>John</owner>]]>' +
'<!ELEMENT node (#PCDATA)>' +
'<!ENTITY ent "my val">' +
'<!ENTITY ent SYSTEM "http://www.myspec.com/ent">' +
'<!ENTITY ent PUBLIC "-//MY//SPEC ENT//EN" "http://www.myspec.com/ent">' +
'<!ENTITY ent SYSTEM "http://www.myspec.com/ent" NDATA entprg>' +
'<!ENTITY ent PUBLIC "-//MY//SPEC ENT//EN" "http://www.myspec.com/ent" NDATA entprg>' +
'<!ENTITY % ent "my val">' +
'<!ENTITY % ent SYSTEM "http://www.myspec.com/ent">' +
'<!ENTITY % ent PUBLIC "-//MY//SPEC ENT//EN" "http://www.myspec.com/ent">' +
'<!ELEMENT nodearr (a,b)>' +
']>' +
'<root><node>test</node></root>'
)
test 'Internal and external dtd (pretty print)', ->
eq(
xml('root')
.dtd('hello.dtd')
.ins('pub_border', 'thin')
.ele('img', 'EMPTY')
.com('Image attributes follow')
.att('img', 'height', 'CDATA', '#REQUIRED')
.att('img', 'visible', '(yes|no)', '#DEFAULT', "yes")
.not('fs', { sysID: 'http://my.fs.com/reader' })
.not('fs-nt', { pubID: 'FS Network Reader 1.0', sysID: 'http://my.fs.com/reader' })
.att('img', 'src', 'NOTATION (fs|fs-nt)', '#REQUIRED')
.dat('<owner><NAME></owner>')
.ele('node')
.ent('ent', 'my val')
.ent('ent', { sysID: 'http://www.myspec.com/ent' })
.ent('ent', { pubID: '-//MY//SPEC ENT//EN', sysID: 'http://www.myspec.com/ent' })
.ent('ent', { sysID: 'http://www.myspec.com/ent', nData: 'entprg' })
.ent('ent', { pubID: '-//MY//SPEC ENT//EN', sysID: 'http://www.myspec.com/ent', nData: 'entprg' })
.pent('ent', 'my val')
.pent('ent', { sysID: 'http://www.myspec.com/ent' })
.pent('ent', { pubID: '-//MY//SPEC ENT//EN', sysID: 'http://www.myspec.com/ent' })
.root()
.ele('node').txt('test')
.end({ pretty: true })
"""
<?xml version="1.0"?>
<!DOCTYPE root SYSTEM "hello.dtd" [
<?pub_border thin?>
<!ELEMENT img EMPTY>
<!-- Image attributes follow -->
<!ATTLIST img height CDATA #REQUIRED>
<!ATTLIST img visible (yes|no) "yes">
<!NOTATION fs SYSTEM "http://my.fs.com/reader">
<!NOTATION fs-nt PUBLIC "FS Network Reader 1.0" "http://my.fs.com/reader">
<!ATTLIST img src NOTATION (fs|fs-nt) #REQUIRED>
<![CDATA[<owner><NAME></owner>]]>
<!ELEMENT node (#PCDATA)>
<!ENTITY ent "my val">
<!ENTITY ent SYSTEM "http://www.myspec.com/ent">
<!ENTITY ent PUBLIC "-//MY//SPEC ENT//EN" "http://www.myspec.com/ent">
<!ENTITY ent SYSTEM "http://www.myspec.com/ent" NDATA entprg>
<!ENTITY ent PUBLIC "-//MY//SPEC ENT//EN" "http://www.myspec.com/ent" NDATA entprg>
<!ENTITY % ent "my val">
<!ENTITY % ent SYSTEM "http://www.myspec.com/ent">
<!ENTITY % ent PUBLIC "-//MY//SPEC ENT//EN" "http://www.myspec.com/ent">
]>
<root>
<node>test</node>
</root>
"""
)
| true | suite 'Document Type Declaration:', ->
test 'SYSTEM dtd from create()', ->
eq(
xml('root', { sysID: 'hello.dtd' }).ele('node').txt('test').end()
'<?xml version="1.0"?><!DOCTYPE root SYSTEM "hello.dtd"><root><node>test</node></root>'
)
test 'Public dtd from create()', ->
eq(
xml('HTML', {
pubID: '-//W3C//DTD HTML 4.01//EN'
sysID: 'http://www.w3.org/TR/html4/strict.dtd'
}).end()
'<?xml version="1.0"?>' +
'<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01//EN" ' +
'"http://www.w3.org/TR/html4/strict.dtd">' +
'<HTML/>'
)
test 'Empty dtd from create()', ->
eq(
xml('root', { sysID: '' }).ele('node').txt('test').end()
'<?xml version="1.0"?><!DOCTYPE root><root><node>test</node></root>'
)
test 'Internal and external dtd', ->
eq(
xml('root')
.dtd('hello.dtd')
.ins('pub_border', 'thin')
.ele('img', 'EMPTY')
.com('Image attributes follow')
.att('img', 'height', 'CDATA', '#REQUIRED')
.att('img', 'visible', '(yes|no)', '#DEFAULT', "yes")
.not('fs', { sysID: 'http://my.fs.com/reader' })
.not('fs-nt', { pubID: 'FS Network Reader 1.0' })
.not('fs-nt', { pubID: 'FS Network Reader 1.0', sysID: 'http://my.fs.com/reader' })
.att('img', 'src', 'NOTATION (fs|fs-nt)', '#REQUIRED')
.dat('<owner>PI:NAME:<NAME>END_PI</owner>')
.ele('node')
.ent('ent', 'my val')
.ent('ent', { sysID: 'http://www.myspec.com/ent' })
.ent('ent', { pubID: '-//MY//SPEC ENT//EN', sysID: 'http://www.myspec.com/ent' })
.ent('ent', { sysID: 'http://www.myspec.com/ent', nData: 'entprg' })
.ent('ent', { pubID: '-//MY//SPEC ENT//EN', sysID: 'http://www.myspec.com/ent', nData: 'entprg' })
.pent('ent', 'my val')
.pent('ent', { sysID: 'http://www.myspec.com/ent' })
.pent('ent', { pubID: '-//MY//SPEC ENT//EN', sysID: 'http://www.myspec.com/ent' })
.ele('nodearr', ['a', 'b'])
.root()
.ele('node').txt('test')
.end()
'<?xml version="1.0"?>' +
'<!DOCTYPE root SYSTEM "hello.dtd" [' +
'<?pub_border thin?>' +
'<!ELEMENT img EMPTY>' +
'<!-- Image attributes follow -->' +
'<!ATTLIST img height CDATA #REQUIRED>' +
'<!ATTLIST img visible (yes|no) "yes">' +
'<!NOTATION fs SYSTEM "http://my.fs.com/reader">' +
'<!NOTATION fs-nt PUBLIC "FS Network Reader 1.0">' +
'<!NOTATION fs-nt PUBLIC "FS Network Reader 1.0" "http://my.fs.com/reader">' +
'<!ATTLIST img src NOTATION (fs|fs-nt) #REQUIRED>' +
'<![CDATA[<owner>John</owner>]]>' +
'<!ELEMENT node (#PCDATA)>' +
'<!ENTITY ent "my val">' +
'<!ENTITY ent SYSTEM "http://www.myspec.com/ent">' +
'<!ENTITY ent PUBLIC "-//MY//SPEC ENT//EN" "http://www.myspec.com/ent">' +
'<!ENTITY ent SYSTEM "http://www.myspec.com/ent" NDATA entprg>' +
'<!ENTITY ent PUBLIC "-//MY//SPEC ENT//EN" "http://www.myspec.com/ent" NDATA entprg>' +
'<!ENTITY % ent "my val">' +
'<!ENTITY % ent SYSTEM "http://www.myspec.com/ent">' +
'<!ENTITY % ent PUBLIC "-//MY//SPEC ENT//EN" "http://www.myspec.com/ent">' +
'<!ELEMENT nodearr (a,b)>' +
']>' +
'<root><node>test</node></root>'
)
test 'Internal and external dtd (pretty print)', ->
eq(
xml('root')
.dtd('hello.dtd')
.ins('pub_border', 'thin')
.ele('img', 'EMPTY')
.com('Image attributes follow')
.att('img', 'height', 'CDATA', '#REQUIRED')
.att('img', 'visible', '(yes|no)', '#DEFAULT', "yes")
.not('fs', { sysID: 'http://my.fs.com/reader' })
.not('fs-nt', { pubID: 'FS Network Reader 1.0', sysID: 'http://my.fs.com/reader' })
.att('img', 'src', 'NOTATION (fs|fs-nt)', '#REQUIRED')
.dat('<owner>PI:NAME:<NAME>END_PI</owner>')
.ele('node')
.ent('ent', 'my val')
.ent('ent', { sysID: 'http://www.myspec.com/ent' })
.ent('ent', { pubID: '-//MY//SPEC ENT//EN', sysID: 'http://www.myspec.com/ent' })
.ent('ent', { sysID: 'http://www.myspec.com/ent', nData: 'entprg' })
.ent('ent', { pubID: '-//MY//SPEC ENT//EN', sysID: 'http://www.myspec.com/ent', nData: 'entprg' })
.pent('ent', 'my val')
.pent('ent', { sysID: 'http://www.myspec.com/ent' })
.pent('ent', { pubID: '-//MY//SPEC ENT//EN', sysID: 'http://www.myspec.com/ent' })
.root()
.ele('node').txt('test')
.end({ pretty: true })
"""
<?xml version="1.0"?>
<!DOCTYPE root SYSTEM "hello.dtd" [
<?pub_border thin?>
<!ELEMENT img EMPTY>
<!-- Image attributes follow -->
<!ATTLIST img height CDATA #REQUIRED>
<!ATTLIST img visible (yes|no) "yes">
<!NOTATION fs SYSTEM "http://my.fs.com/reader">
<!NOTATION fs-nt PUBLIC "FS Network Reader 1.0" "http://my.fs.com/reader">
<!ATTLIST img src NOTATION (fs|fs-nt) #REQUIRED>
<![CDATA[<owner>PI:NAME:<NAME>END_PI</owner>]]>
<!ELEMENT node (#PCDATA)>
<!ENTITY ent "my val">
<!ENTITY ent SYSTEM "http://www.myspec.com/ent">
<!ENTITY ent PUBLIC "-//MY//SPEC ENT//EN" "http://www.myspec.com/ent">
<!ENTITY ent SYSTEM "http://www.myspec.com/ent" NDATA entprg>
<!ENTITY ent PUBLIC "-//MY//SPEC ENT//EN" "http://www.myspec.com/ent" NDATA entprg>
<!ENTITY % ent "my val">
<!ENTITY % ent SYSTEM "http://www.myspec.com/ent">
<!ENTITY % ent PUBLIC "-//MY//SPEC ENT//EN" "http://www.myspec.com/ent">
]>
<root>
<node>test</node>
</root>
"""
)
|
[
{
"context": " \"properties\": {\n 0: \"Salzburg\"\n 1: \"41,5%\"\n }",
"end": 2209,
"score": 0.9822872281074524,
"start": 2201,
"tag": "NAME",
"value": "Salzburg"
},
{
"context": " \"properties\": {\n ... | app/components/converter/service-test.coffee | vidatio/web-app | 0 | "use strict"
describe "Service Converter", ->
beforeEach ->
module "app"
inject (ConverterService, $injector, $q) ->
@injector = $injector
@Converter = ConverterService
@deferred = $q.defer()
spyOn(@Converter, 'convertSHP2GeoJSON').and.returnValue(@deferred.promise)
it 'should be defined and included', ->
expect(@Converter).toBeDefined()
expect(@injector.has("ConverterService"))
it 'should convert arrays to geoJSON', ->
dataset = [["90", "70"], ["80", "80"], ["70", "90"]]
geoJSON =
"type": "FeatureCollection"
"features": [{
"type": "Feature"
"geometry":
"type": "Point"
"coordinates": [90, 70]
"properties": {}
}, {
"type": "Feature"
"geometry":
"type": "Point"
"coordinates": [80, 80]
"properties": {}
}, {
"type": "Feature"
"geometry":
"type": "Point"
"coordinates": [70, 90]
"properties": {}
}
]
expect(@Converter.convertArrays2GeoJSON(dataset, [], {x: 0, y: 1})).toEqual(geoJSON)
it 'should convert arrays with properties to GeoJSON', ->
header = ["City", "Content", "GEOMETRIE"]
dataset = [
["Innsbruck", "40,5%", "POINT (49 11)"]
["Salzburg", "41,5%", "POINT (49 12)"]
["Innsbruck", "42,5%", "POINT (49 13)"]
]
geoJSON =
"type": "FeatureCollection"
"features": [{
"type": "Feature"
"geometry":
"type": "Point"
"coordinates": [11, 49]
"properties": {
0: "Innsbruck"
1: "40,5%"
}
}, {
"type": "Feature"
"geometry":
"type": "Point"
"coordinates": [12, 49]
"properties": {
0: "Salzburg"
1: "41,5%"
}
}, {
"type": "Feature"
"geometry":
"type": "Point"
"coordinates": [13, 49]
"properties": {
0: "Innsbruck"
1: "42,5%"
}
}
]
expect(@Converter.convertArrays2GeoJSON(dataset, header, {x: 2, y: 2})).toEqual(geoJSON)
it 'should convert GeoJSON into arrays', ->
geoJSON =
"type": "FeatureCollection"
"features": [{
"type": "Feature"
"geometry":
"type": "Point"
"coordinates": [70, 90]
}, {
"type": "Feature"
"geometry":
"type": "Point"
"coordinates": [80, 80]
}, {
"type": "Feature"
"geometry":
"type": "Point"
"coordinates": [90, 70]
}
]
result =
[
[
"Point", 70, 90
],
[
"Point", 80, 80
],
[
"Point", 90, 70
]
]
expect(@Converter.convertGeoJSON2Arrays(geoJSON)).toEqual(result)
it 'should extract Headers from GeoJSON', ->
geoJSON =
"type": "FeatureCollection"
"features": [{
"type": "Feature"
"geometry":
"type": "Point"
"coordinates": [70, 90]
"properties":
"prop0": "value0"
"prop1": 0.0
}, {
"type": "Feature"
"geometry":
"type": "Polygon"
"coordinates": [
[[100.0, 0.0], [101.0, 0.0], [101.0, 1.0],
[100.0, 1.0], [100.0, 0.0]]
]
"properties":
"prop0": "value1"
"prop1": 1.0
}
]
headers =
[
"prop0", "prop1", "type", "coordinates 0", "coordinates 1",
"coordinates 2", "coordinates 3", "coordinates 4", "coordinates 5",
"coordinates 6", "coordinates 7", "coordinates 8", "coordinates 9"
]
expect(@Converter.convertGeoJSON2ColHeaders(geoJSON)).toEqual(headers)
| 69602 | "use strict"
describe "Service Converter", ->
beforeEach ->
module "app"
inject (ConverterService, $injector, $q) ->
@injector = $injector
@Converter = ConverterService
@deferred = $q.defer()
spyOn(@Converter, 'convertSHP2GeoJSON').and.returnValue(@deferred.promise)
it 'should be defined and included', ->
expect(@Converter).toBeDefined()
expect(@injector.has("ConverterService"))
it 'should convert arrays to geoJSON', ->
dataset = [["90", "70"], ["80", "80"], ["70", "90"]]
geoJSON =
"type": "FeatureCollection"
"features": [{
"type": "Feature"
"geometry":
"type": "Point"
"coordinates": [90, 70]
"properties": {}
}, {
"type": "Feature"
"geometry":
"type": "Point"
"coordinates": [80, 80]
"properties": {}
}, {
"type": "Feature"
"geometry":
"type": "Point"
"coordinates": [70, 90]
"properties": {}
}
]
expect(@Converter.convertArrays2GeoJSON(dataset, [], {x: 0, y: 1})).toEqual(geoJSON)
it 'should convert arrays with properties to GeoJSON', ->
header = ["City", "Content", "GEOMETRIE"]
dataset = [
["Innsbruck", "40,5%", "POINT (49 11)"]
["Salzburg", "41,5%", "POINT (49 12)"]
["Innsbruck", "42,5%", "POINT (49 13)"]
]
geoJSON =
"type": "FeatureCollection"
"features": [{
"type": "Feature"
"geometry":
"type": "Point"
"coordinates": [11, 49]
"properties": {
0: "Innsbruck"
1: "40,5%"
}
}, {
"type": "Feature"
"geometry":
"type": "Point"
"coordinates": [12, 49]
"properties": {
0: "<NAME>"
1: "41,5%"
}
}, {
"type": "Feature"
"geometry":
"type": "Point"
"coordinates": [13, 49]
"properties": {
0: "<NAME>"
1: "42,5%"
}
}
]
expect(@Converter.convertArrays2GeoJSON(dataset, header, {x: 2, y: 2})).toEqual(geoJSON)
it 'should convert GeoJSON into arrays', ->
geoJSON =
"type": "FeatureCollection"
"features": [{
"type": "Feature"
"geometry":
"type": "Point"
"coordinates": [70, 90]
}, {
"type": "Feature"
"geometry":
"type": "Point"
"coordinates": [80, 80]
}, {
"type": "Feature"
"geometry":
"type": "Point"
"coordinates": [90, 70]
}
]
result =
[
[
"Point", 70, 90
],
[
"Point", 80, 80
],
[
"Point", 90, 70
]
]
expect(@Converter.convertGeoJSON2Arrays(geoJSON)).toEqual(result)
it 'should extract Headers from GeoJSON', ->
geoJSON =
"type": "FeatureCollection"
"features": [{
"type": "Feature"
"geometry":
"type": "Point"
"coordinates": [70, 90]
"properties":
"prop0": "value0"
"prop1": 0.0
}, {
"type": "Feature"
"geometry":
"type": "Polygon"
"coordinates": [
[[100.0, 0.0], [101.0, 0.0], [101.0, 1.0],
[100.0, 1.0], [100.0, 0.0]]
]
"properties":
"prop0": "value1"
"prop1": 1.0
}
]
headers =
[
"prop0", "prop1", "type", "coordinates 0", "coordinates 1",
"coordinates 2", "coordinates 3", "coordinates 4", "coordinates 5",
"coordinates 6", "coordinates 7", "coordinates 8", "coordinates 9"
]
expect(@Converter.convertGeoJSON2ColHeaders(geoJSON)).toEqual(headers)
| true | "use strict"
describe "Service Converter", ->
beforeEach ->
module "app"
inject (ConverterService, $injector, $q) ->
@injector = $injector
@Converter = ConverterService
@deferred = $q.defer()
spyOn(@Converter, 'convertSHP2GeoJSON').and.returnValue(@deferred.promise)
it 'should be defined and included', ->
expect(@Converter).toBeDefined()
expect(@injector.has("ConverterService"))
it 'should convert arrays to geoJSON', ->
dataset = [["90", "70"], ["80", "80"], ["70", "90"]]
geoJSON =
"type": "FeatureCollection"
"features": [{
"type": "Feature"
"geometry":
"type": "Point"
"coordinates": [90, 70]
"properties": {}
}, {
"type": "Feature"
"geometry":
"type": "Point"
"coordinates": [80, 80]
"properties": {}
}, {
"type": "Feature"
"geometry":
"type": "Point"
"coordinates": [70, 90]
"properties": {}
}
]
expect(@Converter.convertArrays2GeoJSON(dataset, [], {x: 0, y: 1})).toEqual(geoJSON)
it 'should convert arrays with properties to GeoJSON', ->
header = ["City", "Content", "GEOMETRIE"]
dataset = [
["Innsbruck", "40,5%", "POINT (49 11)"]
["Salzburg", "41,5%", "POINT (49 12)"]
["Innsbruck", "42,5%", "POINT (49 13)"]
]
geoJSON =
"type": "FeatureCollection"
"features": [{
"type": "Feature"
"geometry":
"type": "Point"
"coordinates": [11, 49]
"properties": {
0: "Innsbruck"
1: "40,5%"
}
}, {
"type": "Feature"
"geometry":
"type": "Point"
"coordinates": [12, 49]
"properties": {
0: "PI:NAME:<NAME>END_PI"
1: "41,5%"
}
}, {
"type": "Feature"
"geometry":
"type": "Point"
"coordinates": [13, 49]
"properties": {
0: "PI:NAME:<NAME>END_PI"
1: "42,5%"
}
}
]
expect(@Converter.convertArrays2GeoJSON(dataset, header, {x: 2, y: 2})).toEqual(geoJSON)
it 'should convert GeoJSON into arrays', ->
geoJSON =
"type": "FeatureCollection"
"features": [{
"type": "Feature"
"geometry":
"type": "Point"
"coordinates": [70, 90]
}, {
"type": "Feature"
"geometry":
"type": "Point"
"coordinates": [80, 80]
}, {
"type": "Feature"
"geometry":
"type": "Point"
"coordinates": [90, 70]
}
]
result =
[
[
"Point", 70, 90
],
[
"Point", 80, 80
],
[
"Point", 90, 70
]
]
expect(@Converter.convertGeoJSON2Arrays(geoJSON)).toEqual(result)
it 'should extract Headers from GeoJSON', ->
geoJSON =
"type": "FeatureCollection"
"features": [{
"type": "Feature"
"geometry":
"type": "Point"
"coordinates": [70, 90]
"properties":
"prop0": "value0"
"prop1": 0.0
}, {
"type": "Feature"
"geometry":
"type": "Polygon"
"coordinates": [
[[100.0, 0.0], [101.0, 0.0], [101.0, 1.0],
[100.0, 1.0], [100.0, 0.0]]
]
"properties":
"prop0": "value1"
"prop1": 1.0
}
]
headers =
[
"prop0", "prop1", "type", "coordinates 0", "coordinates 1",
"coordinates 2", "coordinates 3", "coordinates 4", "coordinates 5",
"coordinates 6", "coordinates 7", "coordinates 8", "coordinates 9"
]
expect(@Converter.convertGeoJSON2ColHeaders(geoJSON)).toEqual(headers)
|
[
{
"context": "me jquery.<%= pkg.name %>.js\\n' +\n '@author Sebastian Helzle (sebastian@helzle.net or @sebobo)\\n' +\n '@",
"end": 502,
"score": 0.9998934864997864,
"start": 486,
"tag": "NAME",
"value": "Sebastian Helzle"
},
{
"context": "ame %>.js\\n' +\n '@auth... | Gruntfile.coffee | Sebobo/jquery.smallipop | 15 | module.exports = (grunt) ->
grunt.loadNpmTasks 'grunt-contrib-coffee'
grunt.loadNpmTasks 'grunt-contrib-watch'
grunt.loadNpmTasks 'grunt-contrib-qunit'
grunt.loadNpmTasks 'grunt-contrib-uglify'
grunt.loadNpmTasks 'grunt-contrib-sass'
# Project configuration.
grunt.initConfig
pkg: grunt.file.readJSON 'smallipop.jquery.json'
meta:
banner: '/*!\n' +
'jQuery <%= pkg.name %> plugin\n' +
'@name jquery.<%= pkg.name %>.js\n' +
'@author Sebastian Helzle (sebastian@helzle.net or @sebobo)\n' +
'@version <%= pkg.version %>\n' +
'@date <%= grunt.template.today("yyyy-mm-dd") %>\n' +
'@category jQuery plugin\n' +
'@copyright (c) 2011-2015 Small Improvements (http://www.small-improvements.com)\n' +
'@license Licensed under the MIT (http://www.opensource.org/licenses/mit-license.php) license.\n' +
'*/\n'
qunit:
files: ['tests/**/*.html']
growl:
coffee:
title: 'grunt'
message: 'Compiled coffeescript'
sass:
title: 'grunt'
message: 'Compiled sass'
coffee:
compile:
options:
bare: true
files:
'lib/jquery.smallipop.js': ['src/coffee/jquery.smallipop.coffee']
'lib/main.js': ['src/coffee/main.coffee']
'tests/tests.js': ['src/coffee/tests.coffee']
watch:
coffee:
files: 'src/coffee/**/*.coffee',
tasks: ['coffee:compile']#, 'growl:coffee']
sass:
files: 'src/scss/**/*.scss'
tasks: ['sass:compile']#, 'growl:sass']
sass:
compile:
options:
style: 'expanded'
compass: true
files:
'css/screen.css': 'src/scss/screen.scss'
'css/jquery.smallipop.css': 'src/scss/jquery.<%= pkg.name %>.scss'
dist:
options:
style: 'compressed'
compass: true
files:
'css/jquery.smallipop.min.css': 'src/scss/jquery.<%= pkg.name %>.scss'
uglify:
dist:
options:
banner: '<%= meta.banner %>'
files:
'lib/jquery.smallipop.min.js': ['lib/jquery.<%= pkg.name %>.js']
# Default task which watches, sass and coffee.
grunt.registerTask 'default', ['coffee', 'sass', 'watch']
# Minify task
grunt.registerTask 'minify', ['uglify', 'sass:dist']
# Release task to run tests then minify js and css
grunt.registerTask 'release', ['qunit', 'minify']
| 209569 | module.exports = (grunt) ->
grunt.loadNpmTasks 'grunt-contrib-coffee'
grunt.loadNpmTasks 'grunt-contrib-watch'
grunt.loadNpmTasks 'grunt-contrib-qunit'
grunt.loadNpmTasks 'grunt-contrib-uglify'
grunt.loadNpmTasks 'grunt-contrib-sass'
# Project configuration.
grunt.initConfig
pkg: grunt.file.readJSON 'smallipop.jquery.json'
meta:
banner: '/*!\n' +
'jQuery <%= pkg.name %> plugin\n' +
'@name jquery.<%= pkg.name %>.js\n' +
'@author <NAME> (<EMAIL> or @sebobo)\n' +
'@version <%= pkg.version %>\n' +
'@date <%= grunt.template.today("yyyy-mm-dd") %>\n' +
'@category jQuery plugin\n' +
'@copyright (c) 2011-2015 Small Improvements (http://www.small-improvements.com)\n' +
'@license Licensed under the MIT (http://www.opensource.org/licenses/mit-license.php) license.\n' +
'*/\n'
qunit:
files: ['tests/**/*.html']
growl:
coffee:
title: 'grunt'
message: 'Compiled coffeescript'
sass:
title: 'grunt'
message: 'Compiled sass'
coffee:
compile:
options:
bare: true
files:
'lib/jquery.smallipop.js': ['src/coffee/jquery.smallipop.coffee']
'lib/main.js': ['src/coffee/main.coffee']
'tests/tests.js': ['src/coffee/tests.coffee']
watch:
coffee:
files: 'src/coffee/**/*.coffee',
tasks: ['coffee:compile']#, 'growl:coffee']
sass:
files: 'src/scss/**/*.scss'
tasks: ['sass:compile']#, 'growl:sass']
sass:
compile:
options:
style: 'expanded'
compass: true
files:
'css/screen.css': 'src/scss/screen.scss'
'css/jquery.smallipop.css': 'src/scss/jquery.<%= pkg.name %>.scss'
dist:
options:
style: 'compressed'
compass: true
files:
'css/jquery.smallipop.min.css': 'src/scss/jquery.<%= pkg.name %>.scss'
uglify:
dist:
options:
banner: '<%= meta.banner %>'
files:
'lib/jquery.smallipop.min.js': ['lib/jquery.<%= pkg.name %>.js']
# Default task which watches, sass and coffee.
grunt.registerTask 'default', ['coffee', 'sass', 'watch']
# Minify task
grunt.registerTask 'minify', ['uglify', 'sass:dist']
# Release task to run tests then minify js and css
grunt.registerTask 'release', ['qunit', 'minify']
| true | module.exports = (grunt) ->
grunt.loadNpmTasks 'grunt-contrib-coffee'
grunt.loadNpmTasks 'grunt-contrib-watch'
grunt.loadNpmTasks 'grunt-contrib-qunit'
grunt.loadNpmTasks 'grunt-contrib-uglify'
grunt.loadNpmTasks 'grunt-contrib-sass'
# Project configuration.
grunt.initConfig
pkg: grunt.file.readJSON 'smallipop.jquery.json'
meta:
banner: '/*!\n' +
'jQuery <%= pkg.name %> plugin\n' +
'@name jquery.<%= pkg.name %>.js\n' +
'@author PI:NAME:<NAME>END_PI (PI:EMAIL:<EMAIL>END_PI or @sebobo)\n' +
'@version <%= pkg.version %>\n' +
'@date <%= grunt.template.today("yyyy-mm-dd") %>\n' +
'@category jQuery plugin\n' +
'@copyright (c) 2011-2015 Small Improvements (http://www.small-improvements.com)\n' +
'@license Licensed under the MIT (http://www.opensource.org/licenses/mit-license.php) license.\n' +
'*/\n'
qunit:
files: ['tests/**/*.html']
growl:
coffee:
title: 'grunt'
message: 'Compiled coffeescript'
sass:
title: 'grunt'
message: 'Compiled sass'
coffee:
compile:
options:
bare: true
files:
'lib/jquery.smallipop.js': ['src/coffee/jquery.smallipop.coffee']
'lib/main.js': ['src/coffee/main.coffee']
'tests/tests.js': ['src/coffee/tests.coffee']
watch:
coffee:
files: 'src/coffee/**/*.coffee',
tasks: ['coffee:compile']#, 'growl:coffee']
sass:
files: 'src/scss/**/*.scss'
tasks: ['sass:compile']#, 'growl:sass']
sass:
compile:
options:
style: 'expanded'
compass: true
files:
'css/screen.css': 'src/scss/screen.scss'
'css/jquery.smallipop.css': 'src/scss/jquery.<%= pkg.name %>.scss'
dist:
options:
style: 'compressed'
compass: true
files:
'css/jquery.smallipop.min.css': 'src/scss/jquery.<%= pkg.name %>.scss'
uglify:
dist:
options:
banner: '<%= meta.banner %>'
files:
'lib/jquery.smallipop.min.js': ['lib/jquery.<%= pkg.name %>.js']
# Default task which watches, sass and coffee.
grunt.registerTask 'default', ['coffee', 'sass', 'watch']
# Minify task
grunt.registerTask 'minify', ['uglify', 'sass:dist']
# Release task to run tests then minify js and css
grunt.registerTask 'release', ['qunit', 'minify']
|
[
{
"context": "#\n# Copyright (c) 2012 Konstantin Bender.\n#\n# Permission is hereby granted, free of charge",
"end": 40,
"score": 0.9998616576194763,
"start": 23,
"tag": "NAME",
"value": "Konstantin Bender"
}
] | source/array.coffee | konstantinbe/milk | 0 | #
# Copyright (c) 2012 Konstantin Bender.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
@module 'Milk', ->
class ArrayExtensions
# -------------------------------------------------- Native Functions ------
native_array_for_each = Array::forEach
native_array_map = Array::map
native_array_filter = Array::filter
native_array_reduce = Array::reduce
native_array_every = Array::every
native_array_some = Array::some
native_array_index_of = Array::indexOf
native_array_last_index_of = Array::lastIndexOf
# ------------------------------------------------- Private Functions ------
make_compare_function_for_sorting_by_keys = (keys) ->
(object1, object2) ->
result = 0
for key in keys
key_parts = key.split /\s+/
property = key_parts[0]
value1 = object1.value_for property
value2 = object2.value_for property
descending = key_parts[1] and key_parts[1] is "DESC"
ascending = not descending
comparison_result = @compare value1, value2
inverted_comparison_result = 0
inverted_comparison_result = -1 if comparison_result > 0
inverted_comparison_result = 1 if comparison_result < 0
result = (if ascending then comparison_result else inverted_comparison_result) if result is 0 and comparison_result isnt 0
result
# ----------------------------------------------- Working with Arrays ------
count: ->
@length
is_empty: ->
@count() is 0
contains: (object) ->
object in @
contains_all: (objects) ->
objects.all (object) => @contains object
contains_any: (objects) ->
objects.any (object) => @contains object
at: (index) ->
@[index] ? null
at_many: (indexes) ->
(@[index] ? null for index in indexes)
index_of: (object) ->
native_array_index_of.call this, object
last_index_of: (object) ->
native_array_last_index_of.call this, object
indexes_of: (object) ->
(index for current, index in this when current is object)
first: (count = null) ->
return @slice 0, count if count?
@[0] ? null
second: ->
@[1] ? null
third: ->
@[2] ? null
last: (count = null) ->
return @[@length - 1] ? null unless count?
return [] if count is 0
@slice -count
rest: ->
@slice 1
# --------------------------------------------------- Deriving Arrays ------
with: (object) ->
@copy().add object
with_many: (objects) ->
@copy().add_many objects
with_at: (object, index) ->
@copy().insert_at object, index
with_many_at: (objects, index) ->
@copy().insert_many_at objects, index
with_before: (object, next) ->
@copy().insert_before object, next
with_many_before: (objects, next) ->
@copy().insert_many_before objects, next
with_after: (object, previous) ->
@copy().insert_after object, previous
with_many_after: (objects, previous) ->
@copy().insert_many_after objects, previous
without: (object) ->
@copy().remove object
without_many: (objects) ->
@copy().remove_many objects
without_at: (index) ->
@copy().remove_at index
without_many_at: (indexes) ->
@copy().remove_many_at indexes
compacted: ->
(object for object in this when object?)
flattened: ->
@inject [], (result, object) =>
objects = if @is_array object then object.flattened() else [object]
result.add_many objects
reversed: ->
@copy().reverse()
sorted: ->
@copy().sort()
unique: ->
objects = []
objects.add object for object in this when not objects.contains object
objects
intersect: (objects) ->
(object for object in @unique() when objects.contains object)
unite: (objects) ->
@concat(objects).unique()
zip: (arrays...) ->
arrays = [this].with_many arrays
counts = arrays.collect (array) -> array.count()
zipped = []
for index in [0...counts.max()]
row = arrays.collect (array) -> array[index]
zipped.add row
zipped
# ------------------------------------------------ Enumerating Arrays ------
each: (block) ->
native_array_for_each.call this, block
collect: (block) ->
native_array_map.call this, block
select: (block) ->
native_array_filter.call this, block
reject: (block) ->
result = []
@each (object) ->
result.push object unless block object
result
detect: (block) ->
# TODO: optimize.
@select(block).first() or null
pluck: (key) ->
@collect (value) ->
value.value_for key
partition: (block) ->
block ?= Math.identity
selected = []
rejected = []
for value in this
if block value then selected.add value else rejected.add value
[selected, rejected]
all: (block) ->
native_array_every.call this, block
any: (block) ->
native_array_some.call this, block
min: (compare = null) ->
compare ?= @compare
min = @first()
min = object for object in @ when compare(object, min) < 0
min
max: (compare = null) ->
compare ?= @compare
max = @first()
max = object for object in @ when compare(object, max) > 0
max
group_by: (key_or_block) ->
block = if @is_function key_or_block then key_or_block else (object) -> object.value_for key_or_block
partition = {}
@each (object) ->
key = block object
partition[key] = [] unless partition[key]?
partition[key].add object
partition
inject: (initial, block) ->
native_array_reduce.call this, block, initial
# --------------------------------------------------- Mutating Arrays ------
add: (object) ->
@push object
@
add_many: (objects) ->
@push objects...
@
# Removes the first occurence of object.
remove: (object) ->
index = @index_of(object)
@splice index, 1 if 0 <= index < @length
@
remove_many: (objects) ->
for object in objects
index = @index_of(object)
@splice index, 1 if 0 <= index < @length
@
remove_at: (index) ->
@error "Array#remove_at() called with invalid index: #{index}, count: #{@length}" unless 0 <= index < @length
@splice index, 1
@
remove_many_at: (indexes) ->
for index in indexes.sorted().reversed()
@remove_at index
@
remove_all: () ->
@pop() while @length > 0
@
insert_at: (object, index) ->
@error "Can't insert object at index #{index}, index is out of bounds [0, #{@length}]" unless 0 <= index <= @length
@splice index, 0, object
@
insert_many_at: (objects, index) ->
@error "Can't insert objects at index #{index}, index is out of bounds [0, #{@length}]" unless 0 <= index <= @length
@splice index, 0, objects...
@
insert_before: (object, next) ->
index = @index_of next
index = 0 if index < 0
@splice index, 0, object
@
insert_many_before: (objects, next) ->
index = @index_of next
index = 0 if index < 0
@splice index, 0, objects...
@
insert_after: (object, previous) ->
count = @count()
index = @last_index_of(previous) + 1
index = count if index > count
@splice index, 0, object
@
insert_many_after: (objects, previous) ->
count = @count()
index = @last_index_of(previous) + 1
index = count if index > count
@splice index, 0, objects...
@
replace_with: (object, replacement) ->
index = @index_of object
@splice index, 1, replacement if index >= 0
@
replace_with_many: (object, replacements) ->
index = @index_of object
@splice index, 1, replacements... if index >= 0
@
replace_at_with: (index, replacement) ->
@splice index, 1, replacement
@
replace_at_with_many: (index, replacements) ->
@splice index, 1, replacements...
@
sort_by: (keys) ->
@sort make_compare_function_for_sorting_by_keys keys if keys.count() > 0
equals: (object) ->
return no unless @is_array object
return no unless @length == object.length
return no unless @all (value, index) -> value is object[index] or @are_equal value, object[index]
yes
copy: ->
[].concat @
to_string: ->
strings = @collect (object) -> if object? then object.to_string() else object
"[" + strings.join(", ") + "]"
Array.includes ArrayExtensions
| 215094 | #
# Copyright (c) 2012 <NAME>.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
@module 'Milk', ->
class ArrayExtensions
# -------------------------------------------------- Native Functions ------
native_array_for_each = Array::forEach
native_array_map = Array::map
native_array_filter = Array::filter
native_array_reduce = Array::reduce
native_array_every = Array::every
native_array_some = Array::some
native_array_index_of = Array::indexOf
native_array_last_index_of = Array::lastIndexOf
# ------------------------------------------------- Private Functions ------
make_compare_function_for_sorting_by_keys = (keys) ->
(object1, object2) ->
result = 0
for key in keys
key_parts = key.split /\s+/
property = key_parts[0]
value1 = object1.value_for property
value2 = object2.value_for property
descending = key_parts[1] and key_parts[1] is "DESC"
ascending = not descending
comparison_result = @compare value1, value2
inverted_comparison_result = 0
inverted_comparison_result = -1 if comparison_result > 0
inverted_comparison_result = 1 if comparison_result < 0
result = (if ascending then comparison_result else inverted_comparison_result) if result is 0 and comparison_result isnt 0
result
# ----------------------------------------------- Working with Arrays ------
count: ->
@length
is_empty: ->
@count() is 0
contains: (object) ->
object in @
contains_all: (objects) ->
objects.all (object) => @contains object
contains_any: (objects) ->
objects.any (object) => @contains object
at: (index) ->
@[index] ? null
at_many: (indexes) ->
(@[index] ? null for index in indexes)
index_of: (object) ->
native_array_index_of.call this, object
last_index_of: (object) ->
native_array_last_index_of.call this, object
indexes_of: (object) ->
(index for current, index in this when current is object)
first: (count = null) ->
return @slice 0, count if count?
@[0] ? null
second: ->
@[1] ? null
third: ->
@[2] ? null
last: (count = null) ->
return @[@length - 1] ? null unless count?
return [] if count is 0
@slice -count
rest: ->
@slice 1
# --------------------------------------------------- Deriving Arrays ------
with: (object) ->
@copy().add object
with_many: (objects) ->
@copy().add_many objects
with_at: (object, index) ->
@copy().insert_at object, index
with_many_at: (objects, index) ->
@copy().insert_many_at objects, index
with_before: (object, next) ->
@copy().insert_before object, next
with_many_before: (objects, next) ->
@copy().insert_many_before objects, next
with_after: (object, previous) ->
@copy().insert_after object, previous
with_many_after: (objects, previous) ->
@copy().insert_many_after objects, previous
without: (object) ->
@copy().remove object
without_many: (objects) ->
@copy().remove_many objects
without_at: (index) ->
@copy().remove_at index
without_many_at: (indexes) ->
@copy().remove_many_at indexes
compacted: ->
(object for object in this when object?)
flattened: ->
@inject [], (result, object) =>
objects = if @is_array object then object.flattened() else [object]
result.add_many objects
reversed: ->
@copy().reverse()
sorted: ->
@copy().sort()
unique: ->
objects = []
objects.add object for object in this when not objects.contains object
objects
intersect: (objects) ->
(object for object in @unique() when objects.contains object)
unite: (objects) ->
@concat(objects).unique()
zip: (arrays...) ->
arrays = [this].with_many arrays
counts = arrays.collect (array) -> array.count()
zipped = []
for index in [0...counts.max()]
row = arrays.collect (array) -> array[index]
zipped.add row
zipped
# ------------------------------------------------ Enumerating Arrays ------
each: (block) ->
native_array_for_each.call this, block
collect: (block) ->
native_array_map.call this, block
select: (block) ->
native_array_filter.call this, block
reject: (block) ->
result = []
@each (object) ->
result.push object unless block object
result
detect: (block) ->
# TODO: optimize.
@select(block).first() or null
pluck: (key) ->
@collect (value) ->
value.value_for key
partition: (block) ->
block ?= Math.identity
selected = []
rejected = []
for value in this
if block value then selected.add value else rejected.add value
[selected, rejected]
all: (block) ->
native_array_every.call this, block
any: (block) ->
native_array_some.call this, block
min: (compare = null) ->
compare ?= @compare
min = @first()
min = object for object in @ when compare(object, min) < 0
min
max: (compare = null) ->
compare ?= @compare
max = @first()
max = object for object in @ when compare(object, max) > 0
max
group_by: (key_or_block) ->
block = if @is_function key_or_block then key_or_block else (object) -> object.value_for key_or_block
partition = {}
@each (object) ->
key = block object
partition[key] = [] unless partition[key]?
partition[key].add object
partition
inject: (initial, block) ->
native_array_reduce.call this, block, initial
# --------------------------------------------------- Mutating Arrays ------
add: (object) ->
@push object
@
add_many: (objects) ->
@push objects...
@
# Removes the first occurence of object.
remove: (object) ->
index = @index_of(object)
@splice index, 1 if 0 <= index < @length
@
remove_many: (objects) ->
for object in objects
index = @index_of(object)
@splice index, 1 if 0 <= index < @length
@
remove_at: (index) ->
@error "Array#remove_at() called with invalid index: #{index}, count: #{@length}" unless 0 <= index < @length
@splice index, 1
@
remove_many_at: (indexes) ->
for index in indexes.sorted().reversed()
@remove_at index
@
remove_all: () ->
@pop() while @length > 0
@
insert_at: (object, index) ->
@error "Can't insert object at index #{index}, index is out of bounds [0, #{@length}]" unless 0 <= index <= @length
@splice index, 0, object
@
insert_many_at: (objects, index) ->
@error "Can't insert objects at index #{index}, index is out of bounds [0, #{@length}]" unless 0 <= index <= @length
@splice index, 0, objects...
@
insert_before: (object, next) ->
index = @index_of next
index = 0 if index < 0
@splice index, 0, object
@
insert_many_before: (objects, next) ->
index = @index_of next
index = 0 if index < 0
@splice index, 0, objects...
@
insert_after: (object, previous) ->
count = @count()
index = @last_index_of(previous) + 1
index = count if index > count
@splice index, 0, object
@
insert_many_after: (objects, previous) ->
count = @count()
index = @last_index_of(previous) + 1
index = count if index > count
@splice index, 0, objects...
@
replace_with: (object, replacement) ->
index = @index_of object
@splice index, 1, replacement if index >= 0
@
replace_with_many: (object, replacements) ->
index = @index_of object
@splice index, 1, replacements... if index >= 0
@
replace_at_with: (index, replacement) ->
@splice index, 1, replacement
@
replace_at_with_many: (index, replacements) ->
@splice index, 1, replacements...
@
sort_by: (keys) ->
@sort make_compare_function_for_sorting_by_keys keys if keys.count() > 0
equals: (object) ->
return no unless @is_array object
return no unless @length == object.length
return no unless @all (value, index) -> value is object[index] or @are_equal value, object[index]
yes
copy: ->
[].concat @
to_string: ->
strings = @collect (object) -> if object? then object.to_string() else object
"[" + strings.join(", ") + "]"
Array.includes ArrayExtensions
| true | #
# Copyright (c) 2012 PI:NAME:<NAME>END_PI.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
@module 'Milk', ->
class ArrayExtensions
# -------------------------------------------------- Native Functions ------
native_array_for_each = Array::forEach
native_array_map = Array::map
native_array_filter = Array::filter
native_array_reduce = Array::reduce
native_array_every = Array::every
native_array_some = Array::some
native_array_index_of = Array::indexOf
native_array_last_index_of = Array::lastIndexOf
# ------------------------------------------------- Private Functions ------
make_compare_function_for_sorting_by_keys = (keys) ->
(object1, object2) ->
result = 0
for key in keys
key_parts = key.split /\s+/
property = key_parts[0]
value1 = object1.value_for property
value2 = object2.value_for property
descending = key_parts[1] and key_parts[1] is "DESC"
ascending = not descending
comparison_result = @compare value1, value2
inverted_comparison_result = 0
inverted_comparison_result = -1 if comparison_result > 0
inverted_comparison_result = 1 if comparison_result < 0
result = (if ascending then comparison_result else inverted_comparison_result) if result is 0 and comparison_result isnt 0
result
# ----------------------------------------------- Working with Arrays ------
count: ->
@length
is_empty: ->
@count() is 0
contains: (object) ->
object in @
contains_all: (objects) ->
objects.all (object) => @contains object
contains_any: (objects) ->
objects.any (object) => @contains object
at: (index) ->
@[index] ? null
at_many: (indexes) ->
(@[index] ? null for index in indexes)
index_of: (object) ->
native_array_index_of.call this, object
last_index_of: (object) ->
native_array_last_index_of.call this, object
indexes_of: (object) ->
(index for current, index in this when current is object)
first: (count = null) ->
return @slice 0, count if count?
@[0] ? null
second: ->
@[1] ? null
third: ->
@[2] ? null
last: (count = null) ->
return @[@length - 1] ? null unless count?
return [] if count is 0
@slice -count
rest: ->
@slice 1
# --------------------------------------------------- Deriving Arrays ------
with: (object) ->
@copy().add object
with_many: (objects) ->
@copy().add_many objects
with_at: (object, index) ->
@copy().insert_at object, index
with_many_at: (objects, index) ->
@copy().insert_many_at objects, index
with_before: (object, next) ->
@copy().insert_before object, next
with_many_before: (objects, next) ->
@copy().insert_many_before objects, next
with_after: (object, previous) ->
@copy().insert_after object, previous
with_many_after: (objects, previous) ->
@copy().insert_many_after objects, previous
without: (object) ->
@copy().remove object
without_many: (objects) ->
@copy().remove_many objects
without_at: (index) ->
@copy().remove_at index
without_many_at: (indexes) ->
@copy().remove_many_at indexes
compacted: ->
(object for object in this when object?)
flattened: ->
@inject [], (result, object) =>
objects = if @is_array object then object.flattened() else [object]
result.add_many objects
reversed: ->
@copy().reverse()
sorted: ->
@copy().sort()
unique: ->
objects = []
objects.add object for object in this when not objects.contains object
objects
intersect: (objects) ->
(object for object in @unique() when objects.contains object)
unite: (objects) ->
@concat(objects).unique()
zip: (arrays...) ->
arrays = [this].with_many arrays
counts = arrays.collect (array) -> array.count()
zipped = []
for index in [0...counts.max()]
row = arrays.collect (array) -> array[index]
zipped.add row
zipped
# ------------------------------------------------ Enumerating Arrays ------
each: (block) ->
native_array_for_each.call this, block
collect: (block) ->
native_array_map.call this, block
select: (block) ->
native_array_filter.call this, block
reject: (block) ->
result = []
@each (object) ->
result.push object unless block object
result
detect: (block) ->
# TODO: optimize.
@select(block).first() or null
pluck: (key) ->
@collect (value) ->
value.value_for key
partition: (block) ->
block ?= Math.identity
selected = []
rejected = []
for value in this
if block value then selected.add value else rejected.add value
[selected, rejected]
all: (block) ->
native_array_every.call this, block
any: (block) ->
native_array_some.call this, block
min: (compare = null) ->
compare ?= @compare
min = @first()
min = object for object in @ when compare(object, min) < 0
min
max: (compare = null) ->
compare ?= @compare
max = @first()
max = object for object in @ when compare(object, max) > 0
max
group_by: (key_or_block) ->
block = if @is_function key_or_block then key_or_block else (object) -> object.value_for key_or_block
partition = {}
@each (object) ->
key = block object
partition[key] = [] unless partition[key]?
partition[key].add object
partition
inject: (initial, block) ->
native_array_reduce.call this, block, initial
# --------------------------------------------------- Mutating Arrays ------
add: (object) ->
@push object
@
add_many: (objects) ->
@push objects...
@
# Removes the first occurence of object.
remove: (object) ->
index = @index_of(object)
@splice index, 1 if 0 <= index < @length
@
remove_many: (objects) ->
for object in objects
index = @index_of(object)
@splice index, 1 if 0 <= index < @length
@
remove_at: (index) ->
@error "Array#remove_at() called with invalid index: #{index}, count: #{@length}" unless 0 <= index < @length
@splice index, 1
@
remove_many_at: (indexes) ->
for index in indexes.sorted().reversed()
@remove_at index
@
remove_all: () ->
@pop() while @length > 0
@
insert_at: (object, index) ->
@error "Can't insert object at index #{index}, index is out of bounds [0, #{@length}]" unless 0 <= index <= @length
@splice index, 0, object
@
insert_many_at: (objects, index) ->
@error "Can't insert objects at index #{index}, index is out of bounds [0, #{@length}]" unless 0 <= index <= @length
@splice index, 0, objects...
@
insert_before: (object, next) ->
index = @index_of next
index = 0 if index < 0
@splice index, 0, object
@
insert_many_before: (objects, next) ->
index = @index_of next
index = 0 if index < 0
@splice index, 0, objects...
@
insert_after: (object, previous) ->
count = @count()
index = @last_index_of(previous) + 1
index = count if index > count
@splice index, 0, object
@
insert_many_after: (objects, previous) ->
count = @count()
index = @last_index_of(previous) + 1
index = count if index > count
@splice index, 0, objects...
@
replace_with: (object, replacement) ->
index = @index_of object
@splice index, 1, replacement if index >= 0
@
replace_with_many: (object, replacements) ->
index = @index_of object
@splice index, 1, replacements... if index >= 0
@
replace_at_with: (index, replacement) ->
@splice index, 1, replacement
@
replace_at_with_many: (index, replacements) ->
@splice index, 1, replacements...
@
sort_by: (keys) ->
@sort make_compare_function_for_sorting_by_keys keys if keys.count() > 0
equals: (object) ->
return no unless @is_array object
return no unless @length == object.length
return no unless @all (value, index) -> value is object[index] or @are_equal value, object[index]
yes
copy: ->
[].concat @
to_string: ->
strings = @collect (object) -> if object? then object.to_string() else object
"[" + strings.join(", ") + "]"
Array.includes ArrayExtensions
|
[
{
"context": " for n in [ 0 .. 1000 ]\n key = [ \"number:#{n}\", \"square\", n ** 2, ]\n input.write key\n yield later",
"end": 6913,
"score": 0.9584726095199585,
"start": 6882,
"tag": "KEY",
"value": "number:#{n}\", \"square\", n ** 2,"
},
{
"contex... | src/tests.coffee | loveencounterflow/hollerith-legacy | 0 |
############################################################################################################
njs_path = require 'path'
# njs_fs = require 'fs'
join = njs_path.join
#...........................................................................................................
CND = require 'cnd'
rpr = CND.rpr
badge = 'HOLLERITH/tests'
log = CND.get_logger 'plain', badge
info = CND.get_logger 'info', badge
whisper = CND.get_logger 'whisper', badge
alert = CND.get_logger 'alert', badge
debug = CND.get_logger 'debug', badge
warn = CND.get_logger 'warn', badge
help = CND.get_logger 'help', badge
urge = CND.get_logger 'urge', badge
echo = CND.echo.bind CND
#...........................................................................................................
suspend = require 'coffeenode-suspend'
step = suspend.step
after = suspend.after
# eventually = suspend.eventually
### TAINT experimentally using `later` in place of `setImmediate` ###
later = suspend.immediately
#...........................................................................................................
test = require 'guy-test'
#...........................................................................................................
D = require 'pipedreams'
$ = D.remit.bind D
$async = D.remit_async.bind D
#...........................................................................................................
HOLLERITH = require './main'
db = null
#...........................................................................................................
levelup = require 'level'
leveldown = require 'leveldown'
CODEC = require 'hollerith-codec'
#...........................................................................................................
ƒ = CND.format_number
# #-----------------------------------------------------------------------------------------------------------
# @_sort_list = ( list ) ->
# @_encode_list list
# list.sort Buffer.compare
# @_decode_list list
# return list
#===========================================================================================================
# HELPERS
#-----------------------------------------------------------------------------------------------------------
show_keys_and_key_bfrs = ( keys, key_bfrs ) ->
f = ( p ) -> ( t for t in ( p.toString 'hex' ).split /(..)/ when t isnt '' ).join ' '
#.........................................................................................................
columnify_settings =
paddingChr: ' '
#.........................................................................................................
data = []
key_bfrs = ( f p for p in key_bfrs )
for key, idx in keys
key_txt = ( rpr key ).replace /\\u0000/g, '∇'
data.push { 'str': key_txt, 'bfr': key_bfrs[ idx ]}
help '\n' + CND.columnify data, columnify_settings
return null
#-----------------------------------------------------------------------------------------------------------
show_db_entries = ( handler ) ->
input = db[ '%self' ].createReadStream()
input
.pipe D.$show()
.pipe $ ( { key, value, }, send ) => send [ key, value, ]
.pipe $ ( [ key, value, ], send ) => send [ key, value, ] unless HOLLERITH._is_meta db, key
.pipe $ ( [ key, value, ], send ) =>
# debug '©RluhF', ( HOLLERITH.CODEC.decode key ), ( JSON.parse value )
send [ key, value, ]
.pipe D.$collect()
.pipe $ ( facets, send ) =>
help '\n' + HOLLERITH.DUMP.rpr_of_facets db, facets
# buffer = new Buffer JSON.stringify [ '开', '彡' ]
# debug '©GJfL6', HOLLERITH.CODEC.rpr_of_buffer null, buffer
.pipe D.$on_end => handler()
#-----------------------------------------------------------------------------------------------------------
get_new_db_name = ->
get_new_db_name.idx += +1
return "/tmp/hollerith2-testdb-#{get_new_db_name.idx}"
get_new_db_name.idx = 0
#-----------------------------------------------------------------------------------------------------------
read_all_keys = ( db, handler ) ->
Z = []
input = db.createKeyStream()
input.on 'end', -> handler null, Z
input
.pipe $ ( data, send ) => Z.push data
#-----------------------------------------------------------------------------------------------------------
clear_leveldb = ( leveldb, handler ) ->
step ( resume ) =>
route = leveldb[ 'location' ]
yield leveldb.close resume
whisper "closed LevelDB"
yield leveldown.destroy route, resume
whisper "destroyed LevelDB"
yield leveldb.open resume
whisper "re-opened LevelDB"
# help "erased and re-opened LevelDB at #{route}"
handler null
#-----------------------------------------------------------------------------------------------------------
@_main = ( handler ) ->
db_route = join __dirname, '..', 'dbs/tests'
db_settings = size: 500
db = HOLLERITH.new_db db_route, db_settings
test @, 'timeout': 2500
#-----------------------------------------------------------------------------------------------------------
@_feed_test_data = ( db, probes_idx, settings, handler ) ->
switch arity = arguments.length
when 3
handler = settings
settings = null
when 4
null
else
throw new Error "expected 3 or 4 arguments, got #{arity}"
#.........................................................................................................
step ( resume ) =>
yield HOLLERITH.clear db, resume
whisper "writing test dataset ##{probes_idx} with settings #{rpr settings}"
input = D.create_throughstream()
#.......................................................................................................
switch probes_idx
#-----------------------------------------------------------------------------------------------------
when -1
# settings =
input
.pipe HOLLERITH.$write db, settings
# .pipe D.$show()
.pipe D.$on_end ( end ) =>
whisper "test data written"
handler null
end()
#...................................................................................................
for n in [ 0 .. 1000 ]
key = [ "number:#{n}", "square", n ** 2, ]
input.write key
yield later resume
input.end()
#-----------------------------------------------------------------------------------------------------
when 0, 2, 3, 4, 5
input
.pipe HOLLERITH.$write db, settings
# .pipe D.$show()
.pipe D.$on_end ( end ) =>
whisper "test data written"
handler null
end()
#...................................................................................................
for probe in @_feed_test_data.probes[ probes_idx ]
# key = HOLLERITH.new_so_key db, probe...
# debug '©WV0j2', probe
input.write probe
yield later resume
input.end()
#-----------------------------------------------------------------------------------------------------
when 1
input
.pipe HOLLERITH.$write db, settings
# .pipe D.$show()
.pipe D.$on_end ( end ) =>
whisper "test data written"
end()
handler null
#...................................................................................................
for url_key in @_feed_test_data.probes[ probes_idx ]
key = HOLLERITH.key_from_url db, url_key
input.write key
yield later resume
input.end()
#-------------------------------------------------------------------------------------------------------
else return handler new Error "illegal probes index #{rpr probes_idx}"
#.........................................................................................................
return null
#-----------------------------------------------------------------------------------------------------------
@_feed_test_data.probes = []
#...........................................................................................................
### probes_idx == 0 ###
@_feed_test_data.probes.push [
[ '𧷟1', 'guide/lineup/length', 1, ]
[ '𧷟2', 'guide/lineup/length', 2, ]
[ '𧷟3', 'guide/lineup/length', 3, ]
[ '𧷟4', 'guide/lineup/length', 4, ]
[ '𧷟', 'guide/lineup/length', 5, ]
[ '𧷟6', 'guide/lineup/length', 6, ]
[ '𧷟', 'cp/cid', 163295, ]
[ '𧷟', 'guide/uchr/has', [ '八', '刀', '宀', '', '貝', ], ]
[ '𧷟', 'rank/cjt', 5432, ]
[ '八', 'factor/strokeclass/wbf', '34', ]
[ '刀', 'factor/strokeclass/wbf', '5(12)3', ]
[ '宀', 'factor/strokeclass/wbf', '44', ]
[ '', 'factor/strokeclass/wbf', '12', ]
[ '貝', 'factor/strokeclass/wbf', '25(12)', ]
[ '八', 'rank/cjt', 12541, ]
[ '刀', 'rank/cjt', 12542, ]
[ '宀', 'rank/cjt', 12543, ]
[ '', 'rank/cjt', 12544, ]
[ '貝', 'rank/cjt', 12545, ]
]
#...........................................................................................................
### probes_idx == 1 ###
@_feed_test_data.probes.push [
'so|glyph:劬|cp/fncr:u-cjk/52ac|0'
'so|glyph:邭|cp/fncr:u-cjk/90ad|0'
'so|glyph:𠴦|cp/fncr:u-cjk-xb/20d26|0'
'so|glyph:𤿯|cp/fncr:u-cjk-xb/24fef|0'
'so|glyph:𧑴|cp/fncr:u-cjk-xb/27474|0'
'so|glyph:𨒡|cp/fncr:u-cjk-xb/284a1|0'
'so|glyph:𪚧|cp/fncr:u-cjk-xb/2a6a7|0'
'so|glyph:𪚫|cp/fncr:u-cjk-xb/2a6ab|0'
'so|glyph:𤿯|strokeorder:352513553254|0'
'so|glyph:𠴦|strokeorder:3525141121|0'
'so|glyph:𨒡|strokeorder:35251454|0'
'so|glyph:邭|strokeorder:3525152|0'
'so|glyph:𪚫|strokeorder:352515251115115113541|0'
'so|glyph:𪚧|strokeorder:35251525112511511|0'
'so|glyph:𧑴|strokeorder:352515251214251214|0'
'so|glyph:劬|strokeorder:3525153|0'
]
#-----------------------------------------------------------------------------------------------------------
### probes_idx == 2 ###
@_feed_test_data.probes.push [
[ '丁', 'strokecount', 2, ]
[ '三', 'strokecount', 3, ]
[ '夫', 'strokecount', 5, ]
[ '國', 'strokecount', 11, ]
[ '形', 'strokecount', 7, ]
[ '丁', 'componentcount', 1, ]
[ '三', 'componentcount', 1, ]
[ '夫', 'componentcount', 1, ]
[ '國', 'componentcount', 4, ]
[ '形', 'componentcount', 2, ]
[ '丁', 'components', [ '丁', ], ]
[ '三', 'components', [ '三', ], ]
[ '夫', 'components', [ '夫', ], ]
[ '國', 'components', [ '囗', '戈', '口', '一', ], ]
[ '形', 'components', [ '开', '彡', ], ]
]
#-----------------------------------------------------------------------------------------------------------
### probes_idx == 3 ###
@_feed_test_data.probes.push [
[ '丁', 'isa', [ 'glyph', 'guide', ] ]
[ '三', 'isa', [ 'glyph', 'guide', ] ]
[ '夫', 'isa', [ 'glyph', 'guide', ] ]
[ '國', 'isa', [ 'glyph', ] ]
[ '形', 'isa', [ 'glyph', ] ]
[ 'glyph:丁', 'strokeorder/count', 2, ]
[ 'glyph:三', 'strokeorder/count', 3, ]
[ 'glyph:夫', 'strokeorder/count', 5, ]
[ 'glyph:國', 'strokeorder/count', 11, ]
[ 'glyph:形', 'strokeorder/count', 7, ]
[ 'glyph:丁', 'guide/count', 1, ]
[ 'glyph:三', 'guide/count', 1, ]
[ 'glyph:夫', 'guide/count', 1, ]
[ 'glyph:國', 'guide/count', 4, ]
[ 'glyph:形', 'guide/count', 2, ]
[ 'glyph:丁', 'guide/lineup', [ '丁', ], ]
[ 'glyph:三', 'guide/lineup', [ '三', ], ]
[ 'glyph:夫', 'guide/lineup', [ '夫', ], ]
[ 'glyph:國', 'guide/lineup', [ '囗', '戈', '口', '一', ], ]
[ 'glyph:形', 'guide/lineup', [ '开', '彡', ], ]
]
#...........................................................................................................
### probes_idx == 4 ###
@_feed_test_data.probes.push [
[ '𧷟1', 'guide/lineup/length', 1, ]
[ '𧷟2', 'guide/lineup/length', 2, ]
[ '𧷟3', 'guide/lineup/length', 3, ]
[ '𧷟4', 'guide/lineup/length', 4, ]
[ '𧷟', 'guide/lineup/length', 5, ]
[ '𧷟6', 'guide/lineup/length', 6, ]
[ '𧷟', 'cp/cid', 163295, ]
[ '𧷟', 'guide/uchr/has', [ '八', '刀', '宀', '', '貝', ], ]
[ '𧷟', 'rank/cjt', 5432, ]
[ '八', 'factor/strokeclass/wbf', '34', ]
[ '刀', 'factor/strokeclass/wbf', '5(12)3', ]
[ '宀', 'factor/strokeclass/wbf', '44', ]
[ '', 'factor/strokeclass/wbf', '12', ]
[ '貝', 'factor/strokeclass/wbf', '25(12)', ]
[ '八', 'rank/cjt', 12541, ]
[ '刀', 'rank/cjt', 12542, ]
[ '宀', 'rank/cjt', 12543, ]
[ '', 'rank/cjt', 12544, ]
[ '貝', 'rank/cjt', 12545, ]
[ '𧷟1', 'a', 42 ]
[ '𧷟1', 'ab', 42 ]
[ '𧷟1', 'guide', 'xxx' ]
[ '𧷟1', 'guide/', 'yyy' ]
[ '𧷟1', 'z', 42 ]
]
#-----------------------------------------------------------------------------------------------------------
### probes_idx == 5 ###
@_feed_test_data.probes.push [
[ '丁', 'strokecount', 2, ]
# [ '三', 'strokecount', 3, ]
# [ '夫', 'strokecount', 5, ]
# [ '國', 'strokecount', 11, ]
# [ '形', 'strokecount', 7, ]
[ '丁', 'componentcount', 1, ]
# [ '三', 'componentcount', 1, ]
# [ '夫', 'componentcount', 1, ]
# [ '國', 'componentcount', 4, ]
# [ '形', 'componentcount', 2, ]
[ '丁', 'components', [ '丁', ], ]
# [ '三', 'components', [ '三', ], ]
# [ '夫', 'components', [ '夫', ], ]
# [ '國', 'components', [ '囗', '戈', '口', '一', ], ]
# [ '形', 'components', [ '开', '彡', ], ]
# [ { type: 'route', value: '/foo/bar', }, 'mtime', new Date '2011-10-10T14:48:00Z', ]
[ { type: 'route', value: '/foo/bar', }, 'mtime', 123456789, ]
]
# pos|guide/kwic/sortcode
# # [
# # "1027~~~~,00","0156~~~~,01,0509~~~~,02,0000~~~~,03,--------,04,--------,05,--------,06,--------,07,--------,08,--------,09,--------,10,--------,11,--------,12,"
# # "0156~~~~,01","0509~~~~,02,0000~~~~,03,--------,04,--------,05,--------,06,--------,07,--------,08,--------,09,--------,10,--------,11,--------,12,1027~~~~,00,"
# # "0509~~~~,02","0000~~~~,03,--------,04,--------,05,--------,06,--------,07,--------,08,--------,09,--------,10,--------,11,--------,12,1027~~~~,00,0156~~~~,01,"
# # "0000~~~~,03","--------,04,--------,05,--------,06,--------,07,--------,08,--------,09,--------,10,--------,11,--------,12,1027~~~~,00,0156~~~~,01,0509~~~~,02,"
# # ]
# 0087~~~~,00,0291~~~~,01,0555~~~~,02,--------,03,--------,04,--------,05,--------,06,--------,07,--------,08,--------,09,--------,10,--------,11,--------,12,|𦖈|0
# 0087~~~~,00,0291~~~~,01,0823x2h-,02,--------,03,--------,04,--------,05,--------,06,--------,07,--------,08,--------,09,--------,10,--------,11,--------,12,|聗|0
# 0087~~~~,00,0291~~~~,01,1023~~~~,02,--------,03,--------,04,--------,05,--------,06,--------,07,--------,08,--------,09,--------,10,--------,11,--------,12,|𤋕|0
# 0087~~~~,00,0294~~~~,01,0060~~~~,02,--------,03,--------,04,--------,05,--------,06,--------,07,--------,08,--------,09,--------,10,--------,11,--------,12,|𦖔|0
# 0087~~~~,00,0294~~~~,01,0555~~~~,02,--------,03,--------,04,--------,05,--------,06,--------,07,--------,08,--------,09,--------,10,--------,11,--------,12,|𦗆|0
# 0087~~~~,00,0295~~~~,01,0802~~~~,02,0958~~~~,03,--------,04,--------,05,--------,06,--------,07,--------,08,--------,09,--------,10,--------,11,--------,12,|𥪻|0
# 0087~~~~,00,0312~~~~,01,--------,02,--------,03,--------,04,--------,05,--------,06,--------,07,--------,08,--------,09,--------,10,--------,11,--------,12,|𦔲|0
# 0087~~~~,00,0314~~~~,01,1173~~~~,02,--------,03,--------,04,--------,05,--------,06,--------,07,--------,08,--------,09,--------,10,--------,11,--------,12,|𦕀|0
# 0087~~~~,00,0319~~~~,01,--------,02,--------,03,--------,04,--------,05,--------,06,--------,07,--------,08,--------,09,--------,10,--------,11,--------,12,|𦕇|0
# 0087~~~~,00,0355~~~~,01,--------,02,--------,03,--------,04,--------,05,--------,06,--------,07,--------,08,--------,09,--------,10,--------,11,--------,12,|𦕆|0
# 0087~~~~,00,0373~~~~,01,0284~~~~,02,--------,03,--------,04,--------,05,--------,06,--------,07,--------,08,--------,09,--------,10,--------,11,--------,12,|𦕧|0
#-----------------------------------------------------------------------------------------------------------
@[ "write without error (1)" ] = ( T, done ) ->
probes_idx = 0
idx = -1
write_settings =
batch: 10
step ( resume ) =>
yield @_feed_test_data db, probes_idx, write_settings, resume
done()
#-----------------------------------------------------------------------------------------------------------
@[ "write without error (2)" ] = ( T, done ) ->
probes_idx = -1
idx = -1
write_settings =
batch: 10
step ( resume ) =>
yield @_feed_test_data db, probes_idx, write_settings, resume
done()
#-----------------------------------------------------------------------------------------------------------
@[ "read without error" ] = ( T, done ) ->
probes_idx = 0
idx = -1
step ( resume ) =>
yield @_feed_test_data db, probes_idx, resume
# done()
input = HOLLERITH.create_facetstream db
input
# .pipe HOLLERITH.$url_from_key db
.pipe $ ( [ key, value, ], send ) =>
idx += +1
# T.eq key, matchers[ idx ]
.pipe D.$on_end ( end ) => end; done()
#-----------------------------------------------------------------------------------------------------------
@[ "read keys without error (1)" ] = ( T, done ) ->
step ( resume ) =>
yield HOLLERITH.clear db, resume
### TAINT awaiting better solution ###
NULL = HOLLERITH._encode_value db, 1
for idx in [ 0 ... 10 ]
key_bfr = HOLLERITH._encode_key db, [ 'x', idx, 'x', ]
db[ '%self' ].put key_bfr, NULL
#.......................................................................................................
probe_idx = 4
count = 0
query = HOLLERITH._query_from_prefix db, [ 'x', probe_idx, ]
# debug '©ETONp', HOLLERITH.CODEC.rpr_of_buffer key_bfr
input = db[ '%self' ].createReadStream query
input
.pipe $ ( { key, value, }, send ) =>
count += 1
T.eq ( HOLLERITH._decode_key db, key )[ 1 ], probe_idx
.pipe D.$on_end ( end ) =>
T.eq count, 1
end()
done()
#-----------------------------------------------------------------------------------------------------------
@[ "read keys without error (2)" ] = ( T, done ) ->
step ( resume ) =>
yield HOLLERITH.clear db, resume
### TAINT awaiting better solution ###
NULL = HOLLERITH._encode_value db, 1
for idx in [ 0 ... 10 ]
db[ '%self' ].put ( HOLLERITH._encode_key db, [ 'x', idx, 'x', ] ), NULL
#.......................................................................................................
probe_idx = 4
count = 0
prefix = [ 'x', probe_idx, ]
input = HOLLERITH.create_facetstream db, { prefix, }
input
.pipe $ ( facet, send ) =>
count += 1
[ key, value, ] = facet
T.eq key[ 1 ], probe_idx
.pipe D.$on_end ( end ) =>
T.eq count, 1
end()
done()
#-----------------------------------------------------------------------------------------------------------
@[ "read keys without error (3)" ] = ( T, done ) ->
step ( resume ) =>
yield HOLLERITH.clear db, resume
### TAINT awaiting better solution ###
NULL = HOLLERITH._encode_value db, 1
for idx in [ 0 ... 10 ]
db[ '%self' ].put ( HOLLERITH._encode_key db, [ 'x', idx, 'x', ] ), NULL
#.......................................................................................................
probe_idx = 3
count = 0
delta = 2
lo = [ 'x', probe_idx, ]
hi = [ 'x', probe_idx + delta, ]
query = { gte: ( HOLLERITH._encode_key db, lo ), lte: ( HOLLERITH._query_from_prefix db, hi )[ 'lte' ], }
input = db[ '%self' ].createReadStream query
input
.pipe $ ( { key, value, }, send ) =>
count += 1
T.eq ( HOLLERITH._decode_key db, key )[ 1 ], probe_idx + count - 1
.pipe D.$on_end ( end ) =>
T.eq count, delta + 1
end()
done()
#-----------------------------------------------------------------------------------------------------------
@[ "read keys without error (4)" ] = ( T, done ) ->
step ( resume ) =>
yield HOLLERITH.clear db, resume
for idx in [ 0 ... 10 ]
db[ '%self' ].put ( HOLLERITH._encode_key db, [ 'x', idx, 'x', ] ), HOLLERITH._encode_value db, 1
#.......................................................................................................
probe_idx = 3
count = 0
delta = 2
lo = [ 'x', probe_idx, ]
hi = [ 'x', probe_idx + delta, ]
input = HOLLERITH.create_facetstream db, { lo, hi, }
input
.pipe $ ( [ key, value, ], send ) =>
count += 1
T.eq key[ 1 ], probe_idx + count - 1
.pipe D.$on_end ( end ) =>
T.eq count, delta + 1
end()
done()
#-----------------------------------------------------------------------------------------------------------
@[ "create_facetstream throws with wrong arguments" ] = ( T, done ) ->
message = "illegal to specify `hi` but not `lo`"
T.throws message, ( -> HOLLERITH.create_facetstream db, hi: [ 'xxx', ] )
done()
#-----------------------------------------------------------------------------------------------------------
@[ "read POS facets" ] = ( T, done ) ->
probes_idx = 0
idx = -1
#.........................................................................................................
key_matchers = [
[ 'pos', 'guide/lineup/length', 2, '𧷟2', ]
[ 'pos', 'guide/lineup/length', 3, '𧷟3', ]
[ 'pos', 'guide/lineup/length', 4, '𧷟4', ]
]
#.........................................................................................................
step ( resume ) =>
yield @_feed_test_data db, probes_idx, resume
lo = [ 'pos', 'guide/lineup/length', 2, ]
hi = [ 'pos', 'guide/lineup/length', 4, ]
# input = HOLLERITH.create_keystream db, lo
input = HOLLERITH.create_facetstream db, { lo, hi, }
input
# .pipe HOLLERITH.$url_from_key db
.pipe $ ( [ key, value, ], send ) =>
idx += +1
phrase = HOLLERITH.as_phrase db, key, value
T.eq key, key_matchers[ idx ]
.pipe D.$on_end ( end ) => end(); done()
#-----------------------------------------------------------------------------------------------------------
@[ "read POS phrases (1)" ] = ( T, done ) ->
probes_idx = 0
idx = -1
#.........................................................................................................
matchers = [
[ 'pos', 'guide/lineup/length', 2, '𧷟2', ]
[ 'pos', 'guide/lineup/length', 3, '𧷟3', ]
[ 'pos', 'guide/lineup/length', 4, '𧷟4', ]
]
#.........................................................................................................
step ( resume ) =>
yield @_feed_test_data db, probes_idx, resume
lo = [ 'pos', 'guide/lineup/length', 2, ]
hi = [ 'pos', 'guide/lineup/length', 4, ]
input = HOLLERITH.create_phrasestream db, { lo, hi, }
input
.pipe $ ( phrase, send ) =>
idx += +1
T.eq phrase, matchers[ idx ]
.pipe D.$on_end ( end ) => end(); done()
#-----------------------------------------------------------------------------------------------------------
@[ "read POS phrases (2)" ] = ( T, done ) ->
probes_idx = 0
idx = -1
count = 0
#.........................................................................................................
matchers = [
[ 'pos', 'guide/uchr/has', '八', '𧷟', 0, ]
[ 'pos', 'guide/uchr/has', '刀', '𧷟', 1, ]
[ 'pos', 'guide/uchr/has', '宀', '𧷟', 2, ]
[ 'pos', 'guide/uchr/has', '貝', '𧷟', 4, ]
[ 'pos', 'guide/uchr/has', '', '𧷟', 3, ]
]
#.........................................................................................................
step ( resume ) =>
yield @_feed_test_data db, probes_idx, resume
prefix = [ 'pos', 'guide/uchr/has', ]
input = HOLLERITH.create_phrasestream db, { prefix, }
settings = { indexed: no, }
input
.pipe $ ( phrase, send ) =>
debug '©DsAfY', rpr phrase
count += +1
idx += +1
T.eq phrase, matchers[ idx ]
.pipe D.$on_end ( end ) =>
T.eq count, matchers.length
end()
done()
#-----------------------------------------------------------------------------------------------------------
@[ "read SPO phrases" ] = ( T, done ) ->
debug '©Rsoxb', db[ '%self' ].isOpen()
probes_idx = 0
idx = -1
count = 0
#.........................................................................................................
matchers = [
[ 'spo', '𧷟', 'cp/cid', 163295 ]
[ 'spo', '𧷟', 'guide/lineup/length', 5 ]
[ 'spo', '𧷟', 'guide/uchr/has', [ '八', '刀', '宀', '', '貝' ] ]
[ 'spo', '𧷟', 'rank/cjt', 5432 ]
]
#.........................................................................................................
step ( resume ) =>
yield @_feed_test_data db, probes_idx, resume
prefix = [ 'spo', '𧷟', ]
input = HOLLERITH.create_phrasestream db, { prefix, }
input
.pipe $ ( phrase, send ) =>
debug '©DsAfY', rpr phrase
count += +1
idx += +1
T.eq phrase, matchers[ idx ]
.pipe D.$on_end ( end ) =>
T.eq count, matchers.length
end()
done()
#-----------------------------------------------------------------------------------------------------------
@[ "sorting (1)" ] = ( T, done ) ->
step ( resume ) =>
settings =
db: leveldown
keyEncoding: 'binary'
leveldb = levelup '/tmp/hollerith2-test', settings
yield clear_leveldb leveldb, resume
probes = [
'a'
'ab'
'abc'
'abc\x00'
'abc\x00a'
'abca'
'abcb'
'abcc'
'abcd'
'abcde'
'abcdef'
'abcdefg' ]
matchers = [
new Buffer [ 0x61, ]
new Buffer [ 0x61, 0x62, ]
new Buffer [ 0x61, 0x62, 0x63, ]
new Buffer [ 0x61, 0x62, 0x63, 0x00, ]
new Buffer [ 0x61, 0x62, 0x63, 0x00, 0x61, ]
new Buffer [ 0x61, 0x62, 0x63, 0x61, ]
new Buffer [ 0x61, 0x62, 0x63, 0x62, ]
new Buffer [ 0x61, 0x62, 0x63, 0x63, ]
new Buffer [ 0x61, 0x62, 0x63, 0x64, ]
new Buffer [ 0x61, 0x62, 0x63, 0x64, 0x65, ]
new Buffer [ 0x61, 0x62, 0x63, 0x64, 0x65, 0x66, ]
new Buffer [ 0x61, 0x62, 0x63, 0x64, 0x65, 0x66, 0x67, ] ]
CND.shuffle probes
for probe in probes
probe_bfr = new Buffer probe, 'utf-8'
yield leveldb.put probe_bfr, '1', resume
probe_bfrs = yield read_all_keys leveldb, resume
probe_bfrs = yield read_all_keys leveldb, resume
# debug '©RXPvv', '\n' + rpr probe_bfrs
for probe_bfr, probe_idx in probe_bfrs
matcher = matchers[ probe_idx ]
### TAINT looks like `T.eq buffer1, buffer2` doesn't work---sometimes... ###
# T.eq probe_bfr, matcher
T.ok probe_bfr.equals matcher
leveldb.close -> done()
#-----------------------------------------------------------------------------------------------------------
@[ "sorting (2)" ] = ( T, done ) ->
### This test is here because there seemed to occur some strange ordering issues when
using memdown instead of leveldown ###
step ( resume ) =>
settings =
db: leveldown
keyEncoding: 'binary'
leveldb = levelup '/tmp/hollerith2-test', settings
yield clear_leveldb leveldb, resume
probes = [
new Buffer [ 0x00, ]
new Buffer [ 0x01, ]
new Buffer [ 0x02, ]
new Buffer [ 0x03, ]
new Buffer [ 0xf9, ]
new Buffer [ 0xfa, ]
new Buffer [ 0xfb, ]
new Buffer [ 0xfc, ]
new Buffer [ 0xfd, ]
]
matchers = ( probe for probe in probes )
CND.shuffle probes
for probe in probes
yield leveldb.put probe, '1', resume
probe_bfrs = yield read_all_keys leveldb, resume
for probe_bfr, probe_idx in probe_bfrs
matcher = matchers[ probe_idx ]
# debug '©15060', probe_idx, probe_bfr, matcher
### TAINT looks like `T.eq buffer1, buffer2` doesn't work---sometimes... ###
T.ok probe_bfr.equals matcher
leveldb.close -> done()
#-----------------------------------------------------------------------------------------------------------
@[ "H2 codec `encode` throws on anything but a list" ] = ( T, done ) ->
T.throws "expected a list, got a text", ( -> CODEC.encode 'unaccaptable' )
T.throws "expected a list, got a number", ( -> CODEC.encode 42 )
T.throws "expected a list, got a boolean", ( -> CODEC.encode true )
T.throws "expected a list, got a boolean", ( -> CODEC.encode false )
T.throws /^expected a list, got a (?:js)?undefined$/, ( -> CODEC.encode() )
done()
#-----------------------------------------------------------------------------------------------------------
@[ "sort texts with H2 codec (1)" ] = ( T, done ) ->
step ( resume ) =>
settings =
db: leveldown
keyEncoding: 'binary'
leveldb = levelup '/tmp/hollerith2-test', settings
yield clear_leveldb leveldb, resume
probes = [
'a'
'ab'
'abc'
'abc\x00'
'abc\x00a'
'abca'
'abca\x00'
'abcb'
'abcc'
'abcd'
'abcde'
'abcdef'
'abcdefg'
]
matchers = ( [ probe, ] for probe in probes )
CND.shuffle probes
for probe in probes
yield leveldb.put ( CODEC.encode [ probe, ] ), '1', resume
probe_bfrs = yield read_all_keys leveldb, resume
probes = ( CODEC.decode probe_bfr for probe_bfr in probe_bfrs )
show_keys_and_key_bfrs probes, probe_bfrs
for probe, probe_idx in probes
matcher = matchers[ probe_idx ]
T.eq probe, matcher
leveldb.close -> done()
#-----------------------------------------------------------------------------------------------------------
@[ "sort texts with H2 codec (2)" ] = ( T, done ) ->
step ( resume ) =>
settings =
db: leveldown
keyEncoding: 'binary'
leveldb = levelup '/tmp/hollerith2-test', settings
yield clear_leveldb leveldb, resume
probes = [
''
' '
'a'
'abc'
'一'
'一二'
'一二三'
'三'
'二'
'𠀀'
'𠀀\x00'
'𠀀a'
'𪜀'
'𫝀'
String.fromCodePoint 0x10ffff
]
matchers = ( [ probe, ] for probe in probes )
CND.shuffle probes
for probe in probes
probe_bfr = CODEC.encode [ probe, ]
yield leveldb.put probe_bfr, '1', resume
probe_bfrs = yield read_all_keys leveldb, resume
# debug '©Fd5iw', probe_bfrs
probes = ( CODEC.decode probe_bfr for probe_bfr in probe_bfrs )
show_keys_and_key_bfrs probes, probe_bfrs
for probe, probe_idx in probes
matcher = matchers[ probe_idx ]
T.eq probe, matcher
leveldb.close -> done()
#-----------------------------------------------------------------------------------------------------------
@[ "sort numbers with H2 codec (1)" ] = ( T, done ) ->
step ( resume ) =>
settings =
db: leveldown
keyEncoding: 'binary'
leveldb = levelup '/tmp/hollerith2-test', settings
yield clear_leveldb leveldb, resume
probes_and_descriptions = [
[ -Infinity, "-Infinity" ]
[ -Number.MAX_VALUE, "-Number.MAX_VALUE" ]
[ Number.MIN_SAFE_INTEGER, "Number.MIN_SAFE_INTEGER" ]
[ -123456789, "-123456789" ]
[ -3, "-3" ]
[ -2, "-2" ]
[ -1.5, "-1.5" ]
[ -1, "-1" ]
[ -Number.EPSILON, "-Number.EPSILON" ]
[ -Number.MIN_VALUE, "-Number.MIN_VALUE" ]
[ 0, "0" ]
[ +Number.MIN_VALUE, "+Number.MIN_VALUE" ]
[ +Number.EPSILON, "+Number.EPSILON" ]
[ +1, "+1" ]
[ +1.5, "+1.5" ]
[ +2, "+2" ]
[ +3, "+3" ]
[ +123456789, "+123456789" ]
[ Number.MAX_SAFE_INTEGER, "Number.MAX_SAFE_INTEGER" ]
[ Number.MAX_VALUE, "Number.MAX_VALUE" ]
[ +Infinity, "+Infinity" ]
]
# probes_and_descriptions.sort ( a, b ) ->
# return +1 if a[ 0 ] > b[ 0 ]
# return -1 if a[ 0 ] < b[ 0 ]
# return 0
matchers = ( [ pad[ 0 ], ] for pad in probes_and_descriptions )
# descriptions = ( [ pad[ 1 ], ] for pad in probes_and_descriptions )
for pad in probes_and_descriptions
urge pad
CND.shuffle probes_and_descriptions
for [ probe, _, ] in probes_and_descriptions
probe_bfr = CODEC.encode [ probe, ]
yield leveldb.put probe_bfr, '1', resume
probe_bfrs = yield read_all_keys leveldb, resume
probes = ( CODEC.decode probe_bfr for probe_bfr in probe_bfrs )
show_keys_and_key_bfrs probes, probe_bfrs
for probe, probe_idx in probes
matcher = matchers[ probe_idx ]
T.eq probe, matcher
leveldb.close -> done()
#-----------------------------------------------------------------------------------------------------------
@[ "sort mixed values with H2 codec" ] = ( T, done ) ->
step ( resume ) =>
settings =
db: leveldown
keyEncoding: 'binary'
leveldb = levelup '/tmp/hollerith2-test', settings
yield clear_leveldb leveldb, resume
probes = [
null
false
true
CODEC[ 'sentinels' ][ 'firstdate' ]
new Date 0
new Date 8e11
new Date()
CODEC[ 'sentinels' ][ 'lastdate' ]
1234
Infinity
''
'一'
'三'
'二'
'𠀀'
'𠀀\x00'
String.fromCodePoint 0x10ffff
]
matchers = ( [ probe, ] for probe in probes )
CND.shuffle probes
for probe in probes
debug '©oMXJZ', probe
probe_bfr = CODEC.encode [ probe, ]
yield leveldb.put probe_bfr, '1', resume
probe_bfrs = yield read_all_keys leveldb, resume
# debug '©Fd5iw', probe_bfrs
probes = ( CODEC.decode probe_bfr for probe_bfr in probe_bfrs )
show_keys_and_key_bfrs probes, probe_bfrs
for probe, probe_idx in probes
matcher = matchers[ probe_idx ]
T.eq probe, matcher
leveldb.close -> done()
#-----------------------------------------------------------------------------------------------------------
@[ "sort lists of mixed values with H2 codec" ] = ( T, done ) ->
step ( resume ) =>
settings =
db: leveldown
keyEncoding: 'binary'
leveldb = levelup '/tmp/hollerith2-test', settings
yield clear_leveldb leveldb, resume
probes = [
[ "", '', ]
[ "1234", 1234, ]
[ "Infinity", Infinity, ]
[ "String.fromCodePoint 0x10ffff", String.fromCodePoint 0x10ffff ]
[ "false", false, ]
[ "new Date 0", new Date 0, ]
[ "new Date 8e11", new Date 8e11, ]
[ "new Date()", new Date(), ]
[ "null", null, ]
[ "true", true, ]
[ "一", '一', ]
[ "三", '三', ]
[ "二", '二', ]
[ "𠀀", '𠀀', ]
[ "𠀀\x00", '𠀀\x00', ]
]
matchers = ( probe for probe in probes )
CND.shuffle probes
for probe in probes
debug '©oMXJZ', probe
probe_bfr = CODEC.encode probe
yield leveldb.put probe_bfr, '1', resume
probe_bfrs = yield read_all_keys leveldb, resume
# debug '©Fd5iw', probe_bfrs
probes = ( CODEC.decode probe_bfr for probe_bfr in probe_bfrs )
show_keys_and_key_bfrs probes, probe_bfrs
for probe, probe_idx in probes
matcher = matchers[ probe_idx ]
T.eq probe, matcher
leveldb.close -> done()
#-----------------------------------------------------------------------------------------------------------
@[ "ensure `Buffer.compare` gives same sorting as LevelDB" ] = ( T, done ) ->
step ( resume ) =>
settings =
db: leveldown
keyEncoding: 'binary'
leveldb = levelup '/tmp/hollerith2-test', settings
yield clear_leveldb leveldb, resume
probes = [
# { x: 1234.5678 }
[ "", '', ]
[ "1234", 1234, ]
[ "Infinity", Infinity, ]
[ "String.fromCodePoint 0x10ffff", String.fromCodePoint 0x10ffff ]
[ "false", false, ]
[ "new Date 0", new Date 0, ]
[ "new Date 8e11", new Date 8e11, ]
[ "new Date()", new Date(), ]
[ "null", null, ]
[ "true", true, ]
[ "一", '一', ]
[ "三", '三', ]
[ "二", '二', ]
[ "𠀀", '𠀀', ]
[ "𠀀\x00", '𠀀\x00', ]
]
CND.shuffle probes
for probe in probes
probe_bfr = CODEC.encode probe
yield leveldb.put probe_bfr, '1', resume
probe_bfrs = yield read_all_keys leveldb, resume
last_probe_bfr = null
for probe_bfr in probe_bfrs
if last_probe_bfr?
T.eq ( Buffer.compare last_probe_bfr, probe_bfr ), -1
last_probe_bfr = probe_bfr
leveldb.close -> done()
#-----------------------------------------------------------------------------------------------------------
@[ "sort routes with values (1)" ] = ( T, done ) ->
step ( resume ) =>
settings =
db: leveldown
keyEncoding: 'binary'
leveldb = levelup '/tmp/hollerith2-test', settings
yield clear_leveldb leveldb, resume
probes = [
[ 'pos', 'strokeorder', '352513553254', '𤿯', ]
[ 'pos', 'strokeorder', '3525141121', '𠴦', ]
[ 'pos', 'strokeorder', '35251454', '𨒡', ]
[ 'pos', 'strokeorder', '3525152', '邭', ]
[ 'pos', 'strokeorder', '352515251115115113541', '𪚫', ]
[ 'pos', 'strokeorder', '35251525112511511', '𪚧', ]
[ 'pos', 'strokeorder', '352515251214251214', '𧑴', ]
[ 'pos', 'strokeorder', '3525153', '劬', ]
[ 'pos', 'strokeorder', '3525153\x00', '劬', ]
[ 'pos', 'strokeorder\x00', '352513553254', '𤿯', ]
]
matchers = ( probe for probe in probes )
CND.shuffle probes
for probe in probes
probe_bfr = CODEC.encode probe
yield leveldb.put probe_bfr, '1', resume
probe_bfrs = yield read_all_keys leveldb, resume
# debug '©Fd5iw', probe_bfrs
probes = ( CODEC.decode probe_bfr for probe_bfr in probe_bfrs )
show_keys_and_key_bfrs probes, probe_bfrs
for probe, probe_idx in probes
matcher = matchers[ probe_idx ]
T.eq probe, matcher
leveldb.close -> done()
#-----------------------------------------------------------------------------------------------------------
@[ "sort routes with values (2)" ] = ( T, done ) ->
step ( resume ) =>
settings =
db: leveldown
keyEncoding: 'binary'
leveldb = levelup '/tmp/hollerith2-test', settings
yield clear_leveldb leveldb, resume
probes = [
[ 'a', null, ]
[ 'a', false, ]
[ 'a', true, ]
[ 'a', new Date(), ]
[ 'a', -Infinity, ]
[ 'a', +1234, ]
[ 'a', +Infinity, ]
[ 'a', 'b', ]
[ 'a', 'b\x00', ]
[ 'a\x00', +1234, ]
[ 'a\x00', 'b', ]
[ 'aa', +1234, ]
[ 'aa', 'b', ]
[ 'aa', 'b\x00', ]
]
matchers = ( probe for probe in probes )
CND.shuffle probes
for probe in probes
probe_bfr = CODEC.encode probe
yield leveldb.put probe_bfr, '1', resume
probe_bfrs = yield read_all_keys leveldb, resume
# debug '©Fd5iw', probe_bfrs
probes = ( CODEC.decode probe_bfr for probe_bfr in probe_bfrs )
show_keys_and_key_bfrs probes, probe_bfrs
for probe, probe_idx in probes
matcher = matchers[ probe_idx ]
T.eq probe, matcher
leveldb.close -> done()
#-----------------------------------------------------------------------------------------------------------
@[ "read sample data" ] = ( T, done ) ->
probes_idx = 2
idx = -1
step ( resume ) =>
debug '©bUJhI', 'XX'
yield @_feed_test_data db, probes_idx, resume
debug '©PRzA5', 'XX'
input = db[ '%self' ].createReadStream()
input
.pipe D.$show()
.pipe $ ( { key, value, }, send ) => send [ key, value, ]
.pipe $ ( [ key, value, ], send ) => send [ key, value, ] unless HOLLERITH._is_meta db, key
.pipe $ ( [ key, value, ], send ) =>
# debug '©RluhF', ( HOLLERITH.CODEC.decode key ), ( JSON.parse value )
send [ key, value, ]
.pipe D.$collect()
.pipe $ ( facets, send ) =>
# debug '©54IKt', facets
help '\n' + HOLLERITH.DUMP.rpr_of_facets db, facets
buffer = new Buffer JSON.stringify [ '开', '彡' ]
debug '©GJfL6', HOLLERITH.CODEC.rpr_of_buffer buffer
.pipe D.$on_end => done()
#.........................................................................................................
return null
#-----------------------------------------------------------------------------------------------------------
@[ "read and write keys with lists" ] = ( T, done ) ->
probes_idx = 0
idx = -1
count = 0
probes = [
[ 'a', 1, ]
[ 'a', [], ]
[ 'a', [ 1, ], ]
[ 'a', [ true, ], ]
[ 'a', [ 'x', 'y', 'b', ], ]
[ 'a', [ 120, 1 / 3, ], ]
[ 'a', [ 'x', ], ]
]
matchers = ( probe for probe in probes )
#.........................................................................................................
for probe, probe_idx in probes
buffer = HOLLERITH.CODEC.encode probe
result = HOLLERITH.CODEC.decode buffer
T.eq result, matchers[ probe_idx ]
#.........................................................................................................
done()
#-----------------------------------------------------------------------------------------------------------
@[ "encode keys with list elements" ] = ( T, done ) ->
probes = [
[ 'foo', 'bar', ]
[ 'foo', [ 'bar', ], ]
[ [], 'bar', ]
[ 'foo', [], ]
[ [ 'foo', ], 'bar', ]
[ [ 42, ], 'bar', ]
[ 'foo', [ 42, ] ]
]
for probe in probes
T.eq probe, HOLLERITH.CODEC.decode HOLLERITH.CODEC.encode probe
done()
#-----------------------------------------------------------------------------------------------------------
@[ "read and write phrases with unanalyzed lists" ] = ( T, done ) ->
# ### !!!!!!!!!!!!!!!!!!!!!! ###
# warn "skipped"
# return done()
# ### !!!!!!!!!!!!!!!!!!!!!! ###
idx = -1
count = 0
#.........................................................................................................
probes = [
[ 'probe#00', 'some-predicate', [], ]
[ 'probe#01', 'some-predicate', [ -1 ], ]
[ 'probe#02', 'some-predicate', [ 0 ], ]
[ 'probe#03', 'some-predicate', [ 1 ], ]
[ 'probe#04', 'some-predicate', [ 2 ], ]
[ 'probe#05', 'some-predicate', [ 2, -1, ], ]
[ 'probe#06', 'some-predicate', [ 2, 0, ], ]
[ 'probe#07', 'some-predicate', [ 2, 1, ], ]
[ 'probe#08', 'some-predicate', [ 2, 1, 0 ], ]
[ 'probe#09', 'some-predicate', [ 2, 2, ], ]
[ 'probe#10', 'some-predicate', [ 2, [ 2, ], ], ]
[ 'probe#11', 'some-predicate', [ 3 ], ]
]
#.........................................................................................................
write_probes = ( handler ) =>
step ( resume ) =>
yield HOLLERITH.clear db, resume
input = D.create_throughstream()
input
# .pipe ( [ sbj, prd, obj, ], send ) =>
# if prd is 'some-predicate' # always the case in this example
# obj
.pipe HOLLERITH.$write db, solids: [ 'some-predicate', ]
.pipe D.$on_end =>
urge "test data written"
handler()
#.....................................................................................................
input.write probe for probe in probes
input.end()
#.........................................................................................................
step ( resume ) =>
#.......................................................................................................
yield write_probes resume
input = HOLLERITH.create_phrasestream db
debug '©FphJK', input[ '%meta' ]
input
.pipe $ ( phrase, send ) =>
count += +1
idx += +1
# debug '©Sc5FG', phrase
# T.eq phrase, matchers[ idx ]
.pipe D.$on_end =>
# T.eq count, matchers.length
done()
#-----------------------------------------------------------------------------------------------------------
@[ "read partial POS phrases" ] = ( T, done ) ->
# ### !!!!!!!!!!!!!!!!!!!!!! ###
# warn "skipped"
# return done()
# ### !!!!!!!!!!!!!!!!!!!!!! ###
probes_idx = 4
idx = -1
count = 0
#.........................................................................................................
matchers = [
[ 'pos', 'guide', 'xxx', '𧷟1' ]
[ 'pos', 'guide/', 'yyy', '𧷟1' ]
[ 'pos', 'guide/lineup/length', 1, '𧷟1', ]
[ 'pos', 'guide/lineup/length', 2, '𧷟2', ]
[ 'pos', 'guide/lineup/length', 3, '𧷟3', ]
[ 'pos', 'guide/lineup/length', 4, '𧷟4', ]
[ 'pos', 'guide/lineup/length', 5, '𧷟', ]
[ 'pos', 'guide/lineup/length', 6, '𧷟6', ]
[ 'pos', 'guide/uchr/has', '八', '𧷟', 0 ]
[ 'pos', 'guide/uchr/has', '刀', '𧷟', 1 ]
[ 'pos', 'guide/uchr/has', '宀', '𧷟', 2 ]
[ 'pos', 'guide/uchr/has', '貝', '𧷟', 4 ]
[ 'pos', 'guide/uchr/has', '', '𧷟', 3 ]
]
#.........................................................................................................
step ( resume ) =>
yield @_feed_test_data db, probes_idx, resume
# prefix = [ 'pos', 'guide', ]
prefix = [ 'pos', 'guide', ]
input = HOLLERITH.create_phrasestream db, { prefix, star: '*', }
# input = HOLLERITH.create_phrasestream db, { prefix, }
debug '©FphJK', input[ '%meta' ]
settings = { indexed: no, }
input
.pipe $ ( phrase, send ) =>
count += +1
idx += +1
debug '©Sc5FG', phrase
T.eq phrase, matchers[ idx ]
.pipe D.$on_end =>
T.eq count, matchers.length
done()
#-----------------------------------------------------------------------------------------------------------
@[ "read single phrases (1)" ] = ( T, done ) ->
probes_idx = 4
matcher = [ 'spo', '𧷟', 'guide/lineup/length', 5 ]
#.........................................................................................................
step ( resume ) =>
yield @_feed_test_data db, probes_idx, resume
# prefix = [ 'pos', 'guide', ]
prefix = [ 'spo', '𧷟', 'guide/lineup/length', ]
query = { prefix, star: '*', }
input = HOLLERITH.read_one_phrase db, query, ( error, phrase ) ->
throw error if error?
debug '©61ENl', phrase
T.eq phrase, matcher
done()
#-----------------------------------------------------------------------------------------------------------
@[ "read single phrases (2)" ] = ( T, done ) ->
probes_idx = 4
matcher = [ 'spo', '𧷟', 'guide/lineup/length', 5 ]
#.........................................................................................................
step ( resume ) =>
yield @_feed_test_data db, probes_idx, resume
prefix = [ 'spo', '𧷟', 'guide/lineup/length', ]
query = { prefix, star: '*', fallback: 'not to be used', }
input = HOLLERITH.read_one_phrase db, query, ( error, phrase ) ->
throw error if error?
debug '©61ENl', phrase
T.eq phrase, matcher
done()
#-----------------------------------------------------------------------------------------------------------
@[ "read single phrases (3)" ] = ( T, done ) ->
probes_idx = 4
matcher = "expected 1 phrase, got 0"
#.........................................................................................................
step ( resume ) =>
yield @_feed_test_data db, probes_idx, resume
prefix = [ 'spo', '中', 'guide/lineup/length', ]
query = { prefix, star: '*', }
input = HOLLERITH.read_one_phrase db, query, ( error, phrase ) ->
throw new Error "expected error" unless error?
T.eq error[ 'message' ], matcher
done()
#-----------------------------------------------------------------------------------------------------------
@[ "read single phrases (4)" ] = ( T, done ) ->
probes_idx = 4
matcher = "this entry is missing"
#.........................................................................................................
step ( resume ) =>
yield @_feed_test_data db, probes_idx, resume
prefix = [ 'spo', '中', 'guide/lineup/length', ]
query = { prefix, star: '*', fallback: matcher, }
input = HOLLERITH.read_one_phrase db, query, ( error, phrase ) ->
throw error if error?
T.eq phrase, matcher
done()
#-----------------------------------------------------------------------------------------------------------
@[ "writing phrases with non-unique keys fails" ] = ( T, done ) ->
alert """test case "writing phrases with non-unique keys fails" to be written"""
done()
#-----------------------------------------------------------------------------------------------------------
@[ "reminders" ] = ( T, done ) ->
alert "H.$write() must test for repeated keys"
done()
#-----------------------------------------------------------------------------------------------------------
@[ "invalid key not accepted (1)" ] = ( T, done ) ->
domain = ( require 'domain' ).create()
domain.on 'error', ( error ) ->
# debug '©AOSmn', JSON.stringify error[ 'message' ]
T.eq error[ 'message' ], "invalid SPO key, must be list: 'xxx'"
later done
domain.run ->
input = D.create_throughstream()
input
.pipe HOLLERITH.$write db
.pipe D.$on_end ->
# T.fail "should throw error"
later done
input.write 'xxx'
input.end()
#-----------------------------------------------------------------------------------------------------------
@[ "invalid key not accepted (2)" ] = ( T, done ) ->
domain = ( require 'domain' ).create()
domain.on 'error', ( error ) ->
# debug '©AOSmn', JSON.stringify error[ 'message' ]
T.eq error[ 'message' ], "invalid SPO key, must be of length 3: [ 'foo' ]"
done()
domain.run ->
input = D.create_throughstream()
input.pipe HOLLERITH.$write db
input.write [ 'foo', ]
#-----------------------------------------------------------------------------------------------------------
@[ "catching errors (2)" ] = ( T, done ) ->
run = ( method, handler ) ->
domain = ( require 'domain' ).create()
domain.on 'error', ( error ) ->
handler error
domain.run ->
method()
#.........................................................................................................
f = ->
input = D.create_throughstream()
input
.pipe HOLLERITH.$write db
.pipe D.$on_end ->
later done
input.write [ 'foo', 'bar', 'baz', ]
input.end()
run f, ( error ) ->
debug '©WaXJV', JSON.stringify error[ 'message' ]
T.eq true, false
done()
#-----------------------------------------------------------------------------------------------------------
@[ "catching errors (1)" ] = ( T, done ) ->
#.........................................................................................................
d = D.run ->
input = D.create_throughstream()
input
.pipe HOLLERITH.$write db
.pipe D.$on_end ->
later done
input.write [ 'foo', 'bar', 'baz', 'gnu', ]
input.end()
, ( error ) ->
T.eq error[ 'message' ], "invalid SPO key, must be of length 3: [ 'foo', 'bar', 'baz', 'gnu' ]"
later done
#-----------------------------------------------------------------------------------------------------------
@[ "catching errors (2)" ] = ( T, done ) ->
message = "should not produce error"
#.........................................................................................................
d = D.run ->
input = D.create_throughstream()
input
.pipe HOLLERITH.$write db
.pipe D.$on_end ->
T.succeed message
later done
input.write [ 'foo', 'bar', 'baz', ]
input.end()
, ( error ) ->
T.fail message
later done
#-----------------------------------------------------------------------------------------------------------
@[ "building PODs from SPO phrases" ] = ( T, done ) ->
probes_idx = 4
idx = -1
count = 0
# #.........................................................................................................
# matchers = [
# [ 'spo', '𧷟', 'cp/cid', 163295 ]
# [ 'spo', '𧷟', 'guide/lineup/length', 5 ]
# [ 'spo', '𧷟', 'guide/uchr/has', [ '八', '刀', '宀', '', '貝' ] ]
# [ 'spo', '𧷟', 'rank/cjt', 5432 ]
# ]
#.........................................................................................................
$shorten_spo = ->
return $ ( phrase, send ) =>
unless ( CND.isa_list phrase ) and phrase[ 0 ] is 'spo'
return send.error new Error "not an SPO phrase: #{rpr phrase}"
spo = phrase[ 1 .. ]
### TAINT repeated validation? ###
HOLLERITH.validate_spo spo
send spo
#.........................................................................................................
$consolidate = ->
last_sbj = null
pod = null
return $ ( spo, send, end ) =>
if spo?
### TAINT repeated validation? ###
HOLLERITH.validate_spo spo
[ sbj, prd, obj, ] = spo
#...................................................................................................
if sbj is last_sbj
pod[ prd ] = obj
#...................................................................................................
else
if pod?
### TAINT implicit key `pod` ###
send [ last_sbj, 'pod', pod, ]
pod = '%sbj': sbj
pod[ prd ] = obj
last_sbj = sbj
#...................................................................................................
# send spo
#.....................................................................................................
if end?
send [ last_sbj, 'pod', pod, ] if last_sbj?
end()
#.........................................................................................................
step ( resume ) =>
yield @_feed_test_data db, probes_idx, resume
prefix = [ 'spo', ]
input = HOLLERITH.create_phrasestream db, { prefix, }
input
.pipe $shorten_spo()
.pipe $consolidate()
.pipe D.$show()
.pipe HOLLERITH.$write db
.pipe D.$on_end done
###
#-----------------------------------------------------------------------------------------------------------
@[ "keep ordering and completeness in asynchronous streams" ] = ( T, T_done ) ->
step ( resume ) =>
idx = 0
input_A = D.create_throughstream()
#.......................................................................................................
input_B = input_A
.pipe D.$stop_time "keep ordering and completeness in asynchronous streams"
.pipe $async ( data, done ) ->
dt = CND.random_number 0.5, 1.5
# debug '©WscFi', data, dt
after dt, =>
warn "send #{rpr data}"
done data
.pipe $ ( data, send ) ->
help "read #{rpr data}"
T.eq data, idx
idx += +1
send data
.pipe D.$on_end =>
T_done()
#.......................................................................................................
write = ->
for n in [ 0 .. 10 ]
# help "write #{n}"
input_A.write n
yield after 0.1, resume
input_A.end()
#.......................................................................................................
write()
###
#-----------------------------------------------------------------------------------------------------------
@[ "read phrases in lockstep" ] = ( T, done ) ->
probes_idx = 2
#.........................................................................................................
step ( resume ) =>
yield @_feed_test_data db, probes_idx, resume
input_1 = HOLLERITH.create_phrasestream db, { prefix: [ 'pos', 'strokecount' ], }
input_2 = HOLLERITH.create_phrasestream db, { prefix: [ 'pos', 'componentcount' ], }
input_3 = HOLLERITH.create_phrasestream db, { prefix: [ 'pos', 'components' ], }
input_1
.pipe D.$lockstep input_2, fallback: null
.pipe D.$lockstep input_3, fallback: null
.pipe $ ( data, send ) => help JSON.stringify data; send data
.pipe D.$on_end done
#-----------------------------------------------------------------------------------------------------------
@[ "has_any yields existence of key" ] = ( T, done ) ->
probes_idx = 2
probes_and_matchers = [
[ [ 'spo', '形', 'strokecount', ], true, ]
[ [ 'spo', '丁', 'componentcount', ], true, ]
[ [ 'spo', '三', 'componentcount', ], true, ]
[ [ 'spo', '夫', 'componentcount', ], true, ]
[ [ 'spo', '國', 'componentcount', ], true, ]
[ [ 'spo', '形', 'componentcount', ], true, ]
[ [ 'spo', '丁', 'components', ], true, ]
[ [ 'spo', '丁', 'xxxx', ], false, ]
[ [ 'spo', '丁', ], true, ]
[ [ 'spo', ], true, ]
[ [ 'xxx', ], false, ]
]
#.........................................................................................................
step ( resume ) =>
yield @_feed_test_data db, probes_idx, resume
for [ probe, matcher, ] in probes_and_matchers
T.eq matcher, yield HOLLERITH.has_any db, { prefix: probe, }, resume
done()
#-----------------------------------------------------------------------------------------------------------
@[ "$write rejects duplicate S/P pairs" ] = ( T, done ) ->
probes_idx = 2
#.........................................................................................................
step ( resume ) =>
yield @_feed_test_data db, probes_idx, resume
#.......................................................................................................
try_writing = ->
input = D.create_throughstream()
#.....................................................................................................
input
.pipe D.$show()
.pipe HOLLERITH.$write db
.pipe D.$on_end ->
T.fail "should never be called"
done()
#.....................................................................................................
input.write [ '形', 'strokecount', 1234, ]
input.end()
#.......................................................................................................
D.run try_writing, ( error ) ->
T.eq "S/P pair already in DB: [ '形', 'strokecount' ]", error[ 'message' ]
done()
#-----------------------------------------------------------------------------------------------------------
@[ "codec accepts long keys" ] = ( T, done ) ->
probes_idx = 2
probes = []
long_text = ( new Array 1025 ).join '#'
# probes.push [ 'foo', long_text, [ long_text, long_text, long_text, long_text, long_text, ], ]
# probes.push [ 'foo', [ long_text, long_text, long_text, long_text, long_text, ],
# [ long_text, long_text, long_text, long_text, long_text, ], ]
# probes.push [ 'foo', [ long_text, long_text, long_text, long_text, long_text, ], ]
probes.push [ 'foo', [ long_text, long_text, long_text, long_text, ], 42, ]
#.........................................................................................................
step ( resume ) =>
yield @_feed_test_data db, probes_idx, resume
#.......................................................................................................
try_writing = ->
input = D.create_throughstream()
#.....................................................................................................
input
# .pipe D.$show()
.pipe HOLLERITH.$write db
.pipe D.$on_end ->
T.eq 1, 1
done()
#.....................................................................................................
for probe in probes
input.write probe
# yield later resume
input.end()
#.......................................................................................................
D.run try_writing, ( error ) ->
T.fail "should not throw error"
warn error
done()
#-----------------------------------------------------------------------------------------------------------
@[ "write private types (1)" ] = ( T, done ) ->
probes_idx = 5
idx = -1
count = 0
#.........................................................................................................
matchers = [
["pos","componentcount",1,"丁"]
["pos","components","丁","丁",0]
["pos","mtime",123456789,{"type":"route","value":"/foo/bar"}]
["pos","strokecount",2,"丁"]
["spo","丁","componentcount",1]
["spo","丁","components",["丁"]]
["spo","丁","strokecount",2]
["spo",{"type":"route","value":"/foo/bar"},"mtime",123456789]
]
#.........................................................................................................
write_data = ( handler ) =>
input = D.create_throughstream()
input
# .pipe D.$show()
.pipe HOLLERITH.$write db
.pipe D.$on_end -> handler()
#.......................................................................................................
for probe in @_feed_test_data.probes[ probes_idx ]
input.write probe
input.end()
#.........................................................................................................
read_data = ( handler ) ->
#.......................................................................................................
input = HOLLERITH.create_phrasestream db
input
# .pipe D.$show()
.pipe $ ( phrase, send ) =>
count += +1
idx += +1
debug '©Sc5FG', JSON.stringify phrase
T.eq phrase, matchers[ idx ]
.pipe D.$on_end -> handler()
#.........................................................................................................
step ( resume ) =>
yield HOLLERITH.clear db, resume
yield write_data resume
yield read_data resume
done()
#-----------------------------------------------------------------------------------------------------------
@[ "write private types (2)" ] = ( T, done ) ->
probes_idx = 5
idx = -1
count = 0
#.........................................................................................................
encoder = ( type, value ) ->
debug '©XXX-encoder', type, rpr value
return value.split '/' if type is 'route'
throw new Error "unknown private type #{rpr type}"
#.........................................................................................................
xdb_route = join __dirname, '..', 'dbs/tests-with-private-types'
#.........................................................................................................
xdb_settings =
size: 500
encoder: encoder
#.........................................................................................................
xdb = HOLLERITH.new_db xdb_route, xdb_settings
#.........................................................................................................
matchers = [
["pos","componentcount",1,"丁"]
["pos","components","丁","丁",0]
["pos","mtime",123456789,{"type":"route","value":["","foo","bar"]}]
["pos","strokecount",2,"丁"]
["spo","丁","componentcount",1]
["spo","丁","components",["丁"]]
["spo","丁","strokecount",2]
["spo",{"type":"route","value":["","foo","bar"]},"mtime",123456789]
]
#.........................................................................................................
write_data = ( handler ) =>
input = D.create_throughstream()
input
# .pipe D.$show()
.pipe HOLLERITH.$write xdb
.pipe D.$on_end -> handler()
#.......................................................................................................
for probe in @_feed_test_data.probes[ probes_idx ]
input.write probe
input.end()
#.........................................................................................................
read_data = ( handler ) ->
#.......................................................................................................
input = HOLLERITH.create_phrasestream xdb
input
# .pipe D.$show()
.pipe $ ( phrase, send ) =>
count += +1
idx += +1
debug '©Sc5FG', JSON.stringify phrase
T.eq phrase, matchers[ idx ]
.pipe D.$on_end -> handler()
#.........................................................................................................
step ( resume ) =>
yield HOLLERITH.clear xdb, resume
yield write_data resume
yield read_data resume
yield xdb[ '%self' ].close resume
done()
#-----------------------------------------------------------------------------------------------------------
@[ "write private types (3)" ] = ( T, done ) ->
probes_idx = 5
idx = -1
count = 0
#.........................................................................................................
encoder = ( type, value ) ->
# debug '©XXX-encoder', type, rpr value
return value.split '/' if type is 'route'
throw new Error "unknown private type #{rpr type}"
#.........................................................................................................
decoder = ( type, value ) ->
# debug '©XXX-decoder', type, rpr value
return value.join '/' if type is 'route'
throw new Error "unknown private type #{rpr type}"
#.........................................................................................................
xdb_route = join __dirname, '..', 'dbs/tests-with-private-types'
#.........................................................................................................
xdb_settings =
size: 500
encoder: encoder
decoder: decoder
#.........................................................................................................
xdb = HOLLERITH.new_db xdb_route, xdb_settings
#.........................................................................................................
matchers = [
["pos","componentcount",1,"丁"]
["pos","components","丁","丁",0]
["pos","mtime",123456789,"/foo/bar"]
["pos","strokecount",2,"丁"]
["spo","丁","componentcount",1]
["spo","丁","components",["丁"]]
["spo","丁","strokecount",2]
["spo","/foo/bar","mtime",123456789]
]
#.........................................................................................................
write_data = ( handler ) =>
input = D.create_throughstream()
input
# .pipe D.$show()
.pipe HOLLERITH.$write xdb
.pipe D.$on_end -> handler()
#.......................................................................................................
for probe in @_feed_test_data.probes[ probes_idx ]
input.write probe
input.end()
#.........................................................................................................
read_data = ( handler ) ->
#.......................................................................................................
input = HOLLERITH.create_phrasestream xdb
input
# .pipe D.$show()
.pipe $ ( phrase, send ) =>
count += +1
idx += +1
urge '©Sc5FG', JSON.stringify phrase
T.eq phrase, matchers[ idx ]
.pipe D.$on_end -> handler()
#.........................................................................................................
step ( resume ) =>
yield HOLLERITH.clear xdb, resume
yield write_data resume
yield read_data resume
yield xdb[ '%self' ].close resume
done()
#-----------------------------------------------------------------------------------------------------------
@[ "bloom filter serialization without writes" ] = ( T, done ) ->
#.........................................................................................................
# step ( resume ) =>
xdb = HOLLERITH.new_db get_new_db_name()
input = HOLLERITH.create_phrasestream xdb
input.pause()
input.pipe HOLLERITH.$write xdb
input.resume()
input.end()
T.ok true
done()
#-----------------------------------------------------------------------------------------------------------
@[ "Pinyin Unicode Sorting" ] = ( T, done ) ->
#.........................................................................................................
write_data = ( handler ) ->
input = D.create_throughstream()
#.......................................................................................................
input
.pipe HOLLERITH.$write db
.pipe D.$on_end ->
handler()
#.......................................................................................................
input.write [ '01', 'reading', 'ā', ]
input.write [ '02', 'reading', 'ɑ̄', ]
input.write [ '03', 'reading', 'ē', ]
input.write [ '04', 'reading', 'ī', ]
input.write [ '05', 'reading', 'ō', ]
input.write [ '06', 'reading', 'ū', ]
input.write [ '07', 'reading', 'ǖ', ]
input.write [ '08', 'reading', 'Ā', ]
input.write [ '09', 'reading', 'Ē', ]
input.write [ '10', 'reading', 'Ī', ]
input.write [ '11', 'reading', 'Ō', ]
input.write [ '12', 'reading', 'Ū', ]
input.write [ '13', 'reading', 'Ǖ', ]
input.write [ '14', 'reading', 'á', ]
input.write [ '15', 'reading', 'ɑ́', ]
input.write [ '16', 'reading', 'é', ]
input.write [ '17', 'reading', 'í', ]
input.write [ '18', 'reading', 'ó', ]
input.write [ '19', 'reading', 'ú', ]
input.write [ '20', 'reading', 'ǘ', ]
input.write [ '21', 'reading', 'Á', ]
input.write [ '22', 'reading', 'É', ]
input.write [ '23', 'reading', 'Í', ]
input.write [ '24', 'reading', 'Ó', ]
input.write [ '25', 'reading', 'Ú', ]
input.write [ '26', 'reading', 'Ǘ', ]
input.write [ '27', 'reading', 'ǎ', ]
input.write [ '28', 'reading', 'ɑ̌', ]
input.write [ '29', 'reading', 'ě', ]
input.write [ '30', 'reading', 'ǐ', ]
input.write [ '31', 'reading', 'ǒ', ]
input.write [ '32', 'reading', 'ǔ', ]
input.write [ '33', 'reading', 'ǚ', ]
input.write [ '34', 'reading', 'Ǎ', ]
input.write [ '35', 'reading', 'Ě', ]
input.write [ '36', 'reading', 'Ǐ', ]
input.write [ '37', 'reading', 'Ǒ', ]
input.write [ '38', 'reading', 'Ǔ', ]
input.write [ '39', 'reading', 'Ǚ', ]
input.write [ '40', 'reading', 'à', ]
input.write [ '41', 'reading', 'ɑ̀', ]
input.write [ '42', 'reading', 'è', ]
input.write [ '43', 'reading', 'ì', ]
input.write [ '44', 'reading', 'ò', ]
input.write [ '45', 'reading', 'ù', ]
input.write [ '46', 'reading', 'ǜ', ]
input.write [ '47', 'reading', 'À', ]
input.write [ '48', 'reading', 'È', ]
input.write [ '49', 'reading', 'Ì', ]
input.write [ '50', 'reading', 'Ò', ]
input.write [ '51', 'reading', 'Ù', ]
input.write [ '52', 'reading', 'Ǜ', ]
input.write [ '53', 'reading', 'a', ]
input.write [ '54', 'reading', 'ɑ', ]
input.write [ '55', 'reading', 'e', ]
input.write [ '56', 'reading', 'i', ]
input.write [ '57', 'reading', 'o', ]
input.write [ '58', 'reading', 'u', ]
input.write [ '59', 'reading', 'ü', ]
input.write [ '60', 'reading', 'A', ]
input.write [ '61', 'reading', 'Ɑ', ]
input.write [ '62', 'reading', 'E', ]
input.write [ '63', 'reading', 'I', ]
input.write [ '64', 'reading', 'O', ]
input.write [ '65', 'reading', 'U', ]
input.write [ '66', 'reading', 'Ü', ]
#.......................................................................................................
input.end()
#.........................................................................................................
show = ( handler ) ->
query = { prefix: [ 'pos', ], star: '*', }
input = HOLLERITH.create_phrasestream db, query
input
.pipe do =>
collector = []
return $ ( phrase, send, end ) =>
if phrase?
[ _, _, letter, _, ] = phrase
collector.push letter
if end?
urge collector = collector.join ''
T.eq collector, 'AEIOUaeiouÀÁÈÉÌÍÒÓÙÚÜàáèéìíòóùúüĀāĒēĚěĪīŌōŪūǍǎǏǐǑǒǓǔǕǖǗǘǙǚǛǜɑɑ̀ɑ́ɑ̄ɑ̌Ɑ'
end()
.pipe D.$observe ( phrase ) =>
info JSON.stringify phrase
.pipe D.$on_end ->
handler()
#.........................................................................................................
step ( resume ) =>
yield clear_leveldb db[ '%self' ], resume
# yield feed_test_data db, probes_idx, resume
yield write_data resume
yield show resume
done()
#-----------------------------------------------------------------------------------------------------------
@[ "use non-string subjects in phrases (1)" ] = ( T, done ) ->
#.........................................................................................................
write_data = ( handler ) ->
input = D.create_throughstream()
#.......................................................................................................
input
.pipe HOLLERITH.$write db
.pipe D.$on_end ->
handler()
#.......................................................................................................
input.write [ '千', 'guide/kwic/v3/sortcode', [ [ [ '0686f---', null ], '千', [], [] ] ], ]
input.write [ '于', 'guide/kwic/v3/sortcode', [ [ [ '0019f---', null ], '于', [], [] ] ], ]
input.write [ '干', 'guide/kwic/v3/sortcode', [ [ [ '0020f---', null ], '干', [], [] ] ], ]
#.......................................................................................................
### Three phrases to register '千 looks similar to both 于 and 干': ###
input.write [ '千', 'shape/similarity', [ '于', '干', ], ]
input.write [ '于', 'shape/similarity', [ '干', '千', ], ]
input.write [ '干', 'shape/similarity', [ '千', '于', ], ]
### The same as the above, experimentally using nested phrases whose subject is itself a phrase: ###
input.write [ [ '千', 'shape/similarity', [ '于', '干', ], ], 'guide/kwic/v3/sortcode', [ [ [ '0686f---', null ], '千', [], [] ] ], ]
input.write [ [ '于', 'shape/similarity', [ '千', '干', ], ], 'guide/kwic/v3/sortcode', [ [ [ '0019f---', null ], '于', [], [] ] ], ]
input.write [ [ '干', 'shape/similarity', [ '千', '于', ], ], 'guide/kwic/v3/sortcode', [ [ [ '0020f---', null ], '干', [], [] ] ], ]
#.......................................................................................................
### Two sub-factorial renderings of 千 as 亻一 and 丿十: ###
input.write [ '亻', 'guide/kwic/v3/sortcode', [ [ [ '0774f---', null ], '亻', [], [] ] ], ]
input.write [ '一', 'guide/kwic/v3/sortcode', [ [ [ '0000f---', null ], '一', [], [] ] ], ]
input.write [ '丿', 'guide/kwic/v3/sortcode', [ [ [ '0645f---', null ], '丿', [], [] ] ], ]
input.write [ '十', 'guide/kwic/v3/sortcode', [ [ [ '0104f---', null ], '十', [], [] ] ], ]
input.write [
[ '千', 'guide/lineup/uchr', '亻一', ], 'guide/kwic/v3/sortcode', [
[ [ '0774f---', '0000f---', null, ], [ '亻', [ '一', ], [] ], ]
[ [ '0000f---', null, '0774f---', ], [ '一', [], [ '亻', ] ], ]
] ]
input.write [
[ '千', 'guide/lineup/uchr', '丿十', ], 'guide/kwic/v3/sortcode', [
[ [ '0645f---', '0104f---', null, ], [ '丿', [ '十', ], [] ], ]
[ [ '0104f---', null, '0645f---', ], [ '十', [], [ '丿', ] ], ]
] ]
#.......................................................................................................
input.end()
#.........................................................................................................
show = ( handler ) ->
input = HOLLERITH.create_phrasestream db
input
.pipe D.$observe ( phrase ) =>
info JSON.stringify phrase
.pipe D.$on_end ->
handler()
#.........................................................................................................
step ( resume ) =>
yield clear_leveldb db[ '%self' ], resume
# yield feed_test_data db, probes_idx, resume
yield write_data resume
yield show resume
done()
#-----------------------------------------------------------------------------------------------------------
@[ "use non-string subjects in phrases (2)" ] = ( T, done ) ->
#.........................................................................................................
write_data = ( handler ) ->
input = D.create_throughstream()
#.......................................................................................................
input
.pipe HOLLERITH.$write db
.pipe D.$on_end ->
handler()
#.......................................................................................................
input.write [ '千', 'reading/py/base', [ 'qian', ], ]
input.write [ '于', 'reading/py/base', [ 'yu', ], ]
input.write [ '干', 'reading/py/base', [ 'gan', ], ]
#.......................................................................................................
### Three phrases to register '千 looks similar to both 于 and 干': ###
input.write [ '千', 'shape/similarity', [ '于', '干', ], ]
input.write [ '于', 'shape/similarity', [ '干', '千', ], ]
input.write [ '干', 'shape/similarity', [ '千', '于', ], ]
### The same as the above, experimentally using nested phrases whose subject is itself a phrase: ###
input.write [ [ '千', 'shape/similarity', [ '于', '干', ], 0, ], 'reading/py/base', [ 'qian', ], ]
input.write [ [ '于', 'shape/similarity', [ '千', '干', ], 0, ], 'reading/py/base', [ 'yu', ], ]
input.write [ [ '干', 'shape/similarity', [ '千', '于', ], 0, ], 'reading/py/base', [ 'gan', ], ]
#.......................................................................................................
input.write [ [ '千', 'reading/py/base', [ 'qian', ], 0, ], 'shape/similarity', [ '于', '干', ], ]
input.write [ [ '于', 'reading/py/base', [ 'yu', ], 0, ], 'shape/similarity', [ '千', '干', ], ]
input.write [ [ '干', 'reading/py/base', [ 'gan', ], 0, ], 'shape/similarity', [ '千', '于', ], ]
#.......................................................................................................
input.end()
#.........................................................................................................
show = ( handler ) ->
input = HOLLERITH.create_phrasestream db
input
.pipe D.$observe ( phrase ) =>
info JSON.stringify phrase
.pipe D.$on_end ->
handler()
#.........................................................................................................
step ( resume ) =>
yield clear_leveldb db[ '%self' ], resume
# yield feed_test_data db, probes_idx, resume
yield write_data resume
yield show resume
done()
#-----------------------------------------------------------------------------------------------------------
@[ "use non-string subjects in phrases (3)" ] = ( T, done ) ->
#.........................................................................................................
write_data = ( handler ) ->
input = D.create_throughstream()
#.......................................................................................................
input
.pipe HOLLERITH.$write db
.pipe D.$on_end ->
handler()
#.......................................................................................................
### Readings for 3 glyphs: ###
input.write [ [ '千', ], 'reading/py/base', [ 'qian', ], ]
input.write [ [ '于', ], 'reading/py/base', [ 'yu', ], ]
input.write [ [ '干', ], 'reading/py/base', [ 'gan', ], ]
#.......................................................................................................
### Three phrases to register '千 looks similar to both 于 and 干': ###
input.write [ [ '千', ], 'shape/similarity', [ '于', '干', ], ]
input.write [ [ '于', ], 'shape/similarity', [ '干', '千', ], ]
input.write [ [ '干', ], 'shape/similarity', [ '千', '于', ], ]
#.......................................................................................................
### The same as the above, experimentally using nested phrases whose subject is itself a phrase: ###
### (1) these will lead from reading to similarity, as in
`["pos","reading/py/base","gan",["干","shape/similarity",["千","于"]],0]`, meaning these phrases
are suitable for building a dictionary organzed by Pinyin readings with cross-references
to similar characters: ###
input.write [ [ '千', 'shape/similarity', [ '于', '干', ], ], 'reading/py/base', [ 'qian', ], ]
input.write [ [ '于', 'shape/similarity', [ '千', '干', ], ], 'reading/py/base', [ 'yu', ], ]
input.write [ [ '干', 'shape/similarity', [ '千', '于', ], ], 'reading/py/base', [ 'gan', ], ]
#.......................................................................................................
### (2) these will lead from similarity to reading, as in
`["pos","shape/similarity","于",["千","reading/py/base",["qian"]],0]` ###
input.write [ [ '千', 'reading/py/base', [ 'qian', ], ], 'shape/similarity', [ '于', '干', ], ]
input.write [ [ '于', 'reading/py/base', [ 'yu', ], ], 'shape/similarity', [ '千', '干', ], ]
input.write [ [ '干', 'reading/py/base', [ 'gan', ], ], 'shape/similarity', [ '千', '于', ], ]
#.......................................................................................................
input.end()
#.........................................................................................................
show = ( handler ) ->
input = HOLLERITH.create_phrasestream db
input
.pipe D.$observe ( phrase ) =>
info JSON.stringify phrase
.pipe D.$on_end ->
handler()
#.........................................................................................................
step ( resume ) =>
yield clear_leveldb db[ '%self' ], resume
# yield feed_test_data db, probes_idx, resume
yield write_data resume
yield show resume
done()
#-----------------------------------------------------------------------------------------------------------
@[ "use non-string subjects in phrases (4)" ] = ( T, done ) ->
#.........................................................................................................
write_data = ( handler ) ->
input = D.create_throughstream()
#.......................................................................................................
input
.pipe HOLLERITH.$write db
.pipe D.$on_end ->
handler()
#.......................................................................................................
### Readings for 3 glyphs: ###
input.write [ [ '千', ], 'reading/py/base', [ 'qian', ], ]
input.write [ [ '于', ], 'reading/py/base', [ 'yu', 'foo', ], ]
input.write [ [ '干', ], 'reading/py/base', [ 'gan', ], ]
input.write [ [ '人', ], 'reading/py/base', [ 'ren', ], ]
#.......................................................................................................
### Three phrases to register '千 looks similar to both 于 and 干': ###
# input.write [ [ '千', ], 'shape/similarity', [ '于', '干', ], ]
# input.write [ [ '于', ], 'shape/similarity', [ '干', '千', ], ]
# input.write [ [ '干', ], 'shape/similarity', [ '千', '于', ], ]
#.......................................................................................................
### The same as the above, experimentally using nested phrases whose subject is itself a phrase: ###
### (1) these will lead from reading to similarity, as in
`["pos","reading/py/base","gan",["干","shape/similarity",["千","于"]],0]`, meaning these phrases
are suitable for building a dictionary organzed by Pinyin readings with cross-references
to similar characters: ###
# input.write [ [ '千', 'shape/similarity', [ '于', '干', ], ], 'reading/py/base', 'qian', ]
# input.write [ [ '于', 'shape/similarity', [ '千', '干', ], ], 'reading/py/base', 'yu', ]
# input.write [ [ '干', 'shape/similarity', [ '千', '于', ], ], 'reading/py/base', 'gan', ]
input.write [ [ '千', 'shape/similarity', '于', ], 'reading/py/base', 'qian', ]
input.write [ [ '千', 'shape/similarity', '干', ], 'reading/py/base', 'qian', ]
input.write [ [ '于', 'shape/similarity', '千', ], 'reading/py/base', 'yu', ]
input.write [ [ '于', 'shape/similarity', '干', ], 'reading/py/base', 'yu', ]
input.write [ [ '干', 'shape/similarity', '千', ], 'reading/py/base', 'gan', ]
input.write [ [ '干', 'shape/similarity', '于', ], 'reading/py/base', 'gan', ]
input.write [ [ '于', 'shape/similarity', '千', 1, ], 'reading/py/base', 'foo', ]
input.write [ [ '于', 'shape/similarity', '干', 2, ], 'reading/py/base', 'foo', ]
#.......................................................................................................
# ### (2) these will lead from similarity to reading, as in
# `["pos","shape/similarity","于",["千","reading/py/base",["qian"]],0]`. These phrases carry the same
# information as the corresponding ones in `use non-string subjects in phrases (3)`, above,
# but here the referenced similarity phrases have singular objects; consequently, subject / predicate
# pairs may be repeated, which is why introducing an index is mandatory. As such, the index
# need not be a number or for meaningful series—it only needs to be unique within the respective
# group: ###
# input.write [ [ '千', 'reading/py/base', [ 'qian', ], 0, ], 'shape/similarity', '于', ]
# input.write [ [ '千', 'reading/py/base', [ 'qian', ], 1, ], 'shape/similarity', '干', ]
# input.write [ [ '于', 'reading/py/base', [ 'yu', ], 0, ], 'shape/similarity', '千', ]
# input.write [ [ '于', 'reading/py/base', [ 'yu', ], 1, ], 'shape/similarity', '干', ]
# input.write [ [ '干', 'reading/py/base', [ 'gan', ], 0, ], 'shape/similarity', '千', ]
# input.write [ [ '干', 'reading/py/base', [ 'gan', ], 1, ], 'shape/similarity', '于', ]
#.......................................................................................................
input.end()
#.........................................................................................................
show = ( handler ) ->
query = { prefix: [ 'pos', ], star: '*', }
input = HOLLERITH.create_phrasestream db #, query
input
.pipe D.$observe ( phrase ) =>
info JSON.stringify phrase
.pipe D.$on_end ->
handler()
#.........................................................................................................
step ( resume ) =>
yield clear_leveldb db[ '%self' ], resume
# yield feed_test_data db, probes_idx, resume
yield write_data resume
yield show resume
done()
#-----------------------------------------------------------------------------------------------------------
@[ "binary indexing" ] = ( T, done ) ->
#.........................................................................................................
$index = ( from_predicate, to_predicate, settings = {} ) =>
from_is_plural = settings[ 'from' ] is 'plural'
to_is_plural = settings[ 'to' ] is 'plural'
from_cache = {}
to_cache = {}
#.......................................................................................................
new_index_phrase = ( tsbj, tprd, tobj, fprd, fobj, tsbj_is_list, idx = 0 ) =>
return [ [ tsbj..., tprd, idx, tobj, ], fprd, fobj, ] if tsbj_is_list
return [ [ tsbj, tprd, idx, tobj, ], fprd, fobj, ]
#.......................................................................................................
link = ( from_phrase, to_phrase ) =>
[ fsbj, fprd, fobj, ] = from_phrase
[ tsbj, tprd, tobj, ] = to_phrase
tsbj_is_list = CND.isa_list tsbj
#.....................................................................................................
unless from_is_plural or to_is_plural
# fs ts
return [ new_index_phrase tsbj, tprd, tobj, fprd, fobj, tsbj_is_list ]
#.....................................................................................................
idx = -1
R = []
if from_is_plural
# fp tp
if to_is_plural
for sub_fobj in fobj
for sub_tobj in tobj
idx += +1
R.push new_index_phrase tsbj, tprd, sub_tobj, fprd, sub_fobj, tsbj_is_list, idx
else
# fp ts
for sub_fobj in fobj
idx += +1
R.push new_index_phrase tsbj, tprd, tobj, fprd, sub_fobj, tsbj_is_list, idx
else
# fs tp
for sub_tobj in tobj
idx += +1
R.push new_index_phrase tsbj, tprd, sub_tobj, fprd, fobj, tsbj_is_list, idx
#.....................................................................................................
return R
#.......................................................................................................
return $ ( phrase, send ) =>
send phrase
[ sbj, prd, obj, ] = phrase
#.....................................................................................................
switch prd
#...................................................................................................
when from_predicate
sbj_txt = JSON.stringify sbj
if ( to_phrase = to_cache[ sbj_txt ] )?
delete to_cache[ sbj_txt ]
send index_phrase for index_phrase in link phrase, to_phrase
else
from_cache[ sbj_txt ] = phrase
#...................................................................................................
when to_predicate
sbj_txt = JSON.stringify sbj
if ( from_phrase = from_cache[ sbj_txt ] )?
delete from_cache[ sbj_txt ]
send index_phrase for index_phrase in link from_phrase, phrase
else
to_cache[ sbj_txt ] = phrase
#.....................................................................................................
return null
#.........................................................................................................
write_data = ( handler ) ->
input = D.create_throughstream()
#.......................................................................................................
input
.pipe $index 'reading', 'variant', { from: 'plural', to: 'plural', }
.pipe $index 'reading', 'similarity', { from: 'plural', to: 'plural', }
.pipe $index 'reading', 'strokeorder', { from: 'plural', to: 'singular', }
.pipe $index 'strokeorder', 'reading', { from: 'singular', to: 'plural', }
.pipe $index 'strokeorder', 'usagecode', { from: 'singular', to: 'singular', }
# .pipe $index 'strokeorder', 'variant', { from: 'singular', to: 'plural', }
# .pipe $index 'strokeorder', 'similarity', { from: 'singular', to: 'plural', }
.pipe HOLLERITH.$write db
.pipe D.$on_end ->
handler()
#.......................................................................................................
input.write [ [ '千', ], 'variant', [ '仟', '韆', ], ]
input.write [ [ '千', ], 'similarity', [ '于', '干', ], ]
input.write [ [ '千', ], 'usagecode', 'CJKTHM', ]
input.write [ [ '千', ], 'strokeorder', '312', ]
input.write [ [ '千', ], 'reading', [ 'qian', 'foo', 'bar', ], ]
input.write [ [ '仟', ], 'strokeorder', '32312', ]
input.write [ [ '仟', ], 'usagecode', 'CJKTHm', ]
input.write [ [ '仟', ], 'reading', [ 'qian', ], ]
input.write [ [ '韆', ], 'strokeorder', '122125112125221134515454', ]
input.write [ [ '韆', ], 'usagecode', 'KTHm', ]
input.write [ [ '韆', ], 'reading', [ 'qian', ], ]
#.......................................................................................................
# input.write [ ["千","variant",0,"仟"],"strokeorder","312"]
# input.write [ ["千","variant",1,"韆"],"strokeorder","312"]
# input.write [ ["千","variant",0,"仟",'usagecode','CJKTHm'],"strokeorder","312"]
# input.write [ ["千","variant",1,"韆",'usagecode','KTHm'],"strokeorder","312"]
#.......................................................................................................
# input.write [ [ '千', ], 'variant', [ '仟', '韆', ], ]
# input.write [ [ '于', ], 'variant', [ '於', '亐', ], ]
# input.write [ [ '干', ], 'variant', [ '乾', '幹', '榦', '亁', '乹', ], ]
# input.write [ [ '人', ], 'variant', [ '亻', '𠔽', ], ]
# input.write [ [ '仁', ], 'variant', [ '忈', ], ]
# #.......................................................................................................
# input.write [ [ '千', ], 'usagecode', 'CJKTHM', ]
# input.write [ [ '于', ], 'usagecode', 'CJKTHM', ]
# input.write [ [ '干', ], 'usagecode', 'CJKTHM', ]
# input.write [ [ '人', ], 'usagecode', 'CJKTHM', ]
# input.write [ [ '仁', ], 'usagecode', 'CJKTHM', ]
# input.write [ [ '仟', ], 'usagecode', 'CJKTHm', ]
# input.write [ [ '韆', ], 'usagecode', 'KTHm', ]
# input.write [ [ '於', ], 'usagecode', 'cJKTHM', ]
# input.write [ [ '亐', ], 'usagecode', 'K', ]
# input.write [ [ '乾', ], 'usagecode', 'CJKTHM', ]
# input.write [ [ '幹', ], 'usagecode', 'JKTHM', ]
# input.write [ [ '榦', ], 'usagecode', 'THm', ]
# input.write [ [ '亻', ], 'usagecode', 'p', ]
# #.......................................................................................................
# input.write [ [ '千', ], 'reading', [ 'qian', ], ]
# input.write [ [ '于', ], 'reading', [ 'yu', 'foo', 'bar', ], ]
# input.write [ [ '干', ], 'reading', [ 'gan', 'ほす', ], ]
# input.write [ [ '人', ], 'reading', [ 'ren', ], ]
# input.write [ [ '仁', ], 'reading', [ 'ren', ], ]
# input.write [ [ '千', ], 'similarity', [ '于', '干', ], ]
# input.write [ [ '于', ], 'similarity', [ '干', '千', ], ]
# input.write [ [ '干', ], 'similarity', [ '千', '于', ], ]
# #.......................................................................................................
# input.write [ [ '千', ], 'strokeorder', '312', ]
# input.write [ [ '于', ], 'strokeorder', '112', ]
# input.write [ [ '干', ], 'strokeorder', '112', ]
# input.write [ [ '人', ], 'strokeorder', '34', ]
# input.write [ [ '仁', ], 'strokeorder', '3211', ]
# input.write [ [ '仟', ], 'strokeorder', '32312', ]
# input.write [ [ '韆', ], 'strokeorder', '122125112125221134515454', ]
# input.write [ [ '於', ], 'strokeorder', '41353444', ]
# input.write [ [ '亐', ], 'strokeorder', '115', ]
# input.write [ [ '乾', ], 'strokeorder', '12251112315', ]
# input.write [ [ '幹', ], 'strokeorder', '1225111231112', ]
# input.write [ [ '榦', ], 'strokeorder', '12251112341234', ]
# input.write [ [ '亻', ], 'strokeorder', '32', ]
#.......................................................................................................
input.end()
#.........................................................................................................
matchers = [
["pos","reading","bar",["千"],2]
["pos","reading","bar",["千","similarity",4,"于"]]
["pos","reading","bar",["千","similarity",5,"干"]]
["pos","reading","bar",["千","strokeorder",2,"312"]]
["pos","reading","bar",["千","variant",4,"仟"]]
["pos","reading","bar",["千","variant",5,"韆"]]
["pos","reading","foo",["千"],1]
["pos","reading","foo",["千","similarity",2,"于"]]
["pos","reading","foo",["千","similarity",3,"干"]]
["pos","reading","foo",["千","strokeorder",1,"312"]]
["pos","reading","foo",["千","variant",2,"仟"]]
["pos","reading","foo",["千","variant",3,"韆"]]
["pos","reading","qian",["仟"],0]
["pos","reading","qian",["仟","strokeorder",0,"32312"]]
["pos","reading","qian",["千"],0]
["pos","reading","qian",["千","similarity",0,"于"]]
["pos","reading","qian",["千","similarity",1,"干"]]
["pos","reading","qian",["千","strokeorder",0,"312"]]
["pos","reading","qian",["千","variant",0,"仟"]]
["pos","reading","qian",["千","variant",1,"韆"]]
["pos","reading","qian",["韆"],0]
["pos","reading","qian",["韆","strokeorder",0,"122125112125221134515454"]]
["pos","similarity","于",["千"],0]
["pos","similarity","干",["千"],1]
["pos","strokeorder","122125112125221134515454",["韆"]]
["pos","strokeorder","122125112125221134515454",["韆","reading",0,"qian"]]
["pos","strokeorder","122125112125221134515454",["韆","usagecode",0,"KTHm"]]
["pos","strokeorder","312",["千"]]
["pos","strokeorder","312",["千","reading",0,"qian"]]
["pos","strokeorder","312",["千","reading",1,"foo"]]
["pos","strokeorder","312",["千","reading",2,"bar"]]
["pos","strokeorder","312",["千","usagecode",0,"CJKTHM"]]
["pos","strokeorder","32312",["仟"]]
["pos","strokeorder","32312",["仟","reading",0,"qian"]]
["pos","strokeorder","32312",["仟","usagecode",0,"CJKTHm"]]
["pos","usagecode","CJKTHM",["千"]]
["pos","usagecode","CJKTHm",["仟"]]
["pos","usagecode","KTHm",["韆"]]
["pos","variant","仟",["千"],0]
["pos","variant","韆",["千"],1]
]
#.........................................................................................................
show = ( handler ) ->
query = { prefix: [ 'pos', ], star: '*', }
# query = { prefix: [ 'pos', 'strokeorder', '312', ], star: '*', }
input = HOLLERITH.create_phrasestream db, query
input
.pipe D.$observe ( phrase ) => info JSON.stringify phrase
#.....................................................................................................
.pipe do =>
idx = -1
return D.$observe ( phrase ) =>
idx += +1
T.eq phrase, matchers[ idx ]
#.....................................................................................................
.pipe D.$on_end -> handler()
#.........................................................................................................
step ( resume ) =>
yield clear_leveldb db[ '%self' ], resume
yield write_data resume
yield show resume
done()
#-----------------------------------------------------------------------------------------------------------
@[ "n-ary indexing (1)" ] = ( T, done ) ->
#.........................................................................................................
$index = ( descriptions ) =>
predicates = []
predicate_count = 0
arities = []
phrases = []
phrase_counts = {}
#.......................................................................................................
for predicate, arity of descriptions
predicate_count += +1
unless arity in [ 'singular', 'plural', ]
throw new Error "expected 'singular' or 'plural' for arity, got #{rpr arity}"
predicates.push predicate
phrases.push {}
arities.push arity
#.......................................................................................................
if predicate_count.length < 2
throw new Error "expected at least two predicate descriptions, got #{predicates.length}"
if predicate_count.length > 2
throw new Error "indexes with more than 2 steps not supported yet"
#.......................................................................................................
new_index_phrase = ( tsbj, tprd, tobj, fprd, fobj, tsbj_is_list, idx = 0 ) =>
return [ [ tsbj..., tprd, idx, tobj, ], fprd, fobj, ] if tsbj_is_list
return [ [ tsbj, tprd, idx, tobj, ], fprd, fobj, ]
#.......................................................................................................
link = ( phrases ) =>
throw new Error "indexes with anything but 2 steps not supported yet" if phrases.length != 2
[ from_phrase, to_phrase, ] = phrases
[ fsbj, fprd, fobj, ] = from_phrase
[ tsbj, tprd, tobj, ] = to_phrase
tsbj_is_list = CND.isa_list tsbj
from_is_plural = arities[ 0 ] is 'plural'
to_is_plural = arities[ 1 ] is 'plural'
#.....................................................................................................
unless from_is_plural or to_is_plural
return [ new_index_phrase tsbj, tprd, tobj, fprd, fobj, tsbj_is_list ]
#.....................................................................................................
idx = -1
R = []
if from_is_plural
if to_is_plural
for sub_fobj in fobj
for sub_tobj in tobj
idx += +1
R.push new_index_phrase tsbj, tprd, sub_tobj, fprd, sub_fobj, tsbj_is_list, idx
else
for sub_fobj in fobj
idx += +1
R.push new_index_phrase tsbj, tprd, tobj, fprd, sub_fobj, tsbj_is_list, idx
else
for sub_tobj in tobj
idx += +1
R.push new_index_phrase tsbj, tprd, sub_tobj, fprd, fobj, tsbj_is_list, idx
#.....................................................................................................
return R
#.......................................................................................................
return $ ( phrase, send ) =>
send phrase
[ sbj, prd, obj, ] = phrase
return unless ( prd_idx = predicates.indexOf prd ) >= 0
sbj_txt = JSON.stringify sbj
phrase_target = phrases[ sbj_txt]?= []
phrase_target[ prd_idx ] = phrase
phrase_counts[ sbj_txt ] = ( phrase_counts[ sbj_txt ] ? 0 ) + 1
return null if phrase_counts[ sbj_txt ] < predicate_count
#.....................................................................................................
send index_phrase for index_phrase in link phrases[ sbj_txt ]
return null
#.........................................................................................................
write_data = ( handler ) ->
input = D.create_throughstream()
#.......................................................................................................
input
.pipe $index 'reading': 'plural', 'similarity': 'plural'
.pipe $index 'reading': 'plural', 'variant': 'plural'
.pipe $index 'reading': 'plural', 'strokeorder': 'singular'
.pipe $index 'strokeorder': 'singular', 'reading': 'plural'
.pipe $index 'strokeorder': 'singular', 'variant': 'plural'
.pipe $index 'strokeorder': 'singular', 'similarity': 'plural'
.pipe HOLLERITH.$write db
.pipe D.$on_end ->
handler()
#.......................................................................................................
input.write [ [ '千', ], 'variant', [ '仟', '韆', ], ]
input.write [ [ '千', ], 'similarity', [ '于', '干', ], ]
input.write [ [ '千', ], 'usagecode', 'CJKTHM', ]
input.write [ [ '千', ], 'strokeorder', '312', ]
input.write [ [ '千', ], 'reading', [ 'qian', 'foo', 'bar', ], ]
input.write [ [ '仟', ], 'strokeorder', '32312', ]
input.write [ [ '仟', ], 'usagecode', 'CJKTHm', ]
input.write [ [ '仟', ], 'reading', [ 'qian', ], ]
input.write [ [ '韆', ], 'strokeorder', '122125112125221134515454', ]
input.write [ [ '韆', ], 'usagecode', 'KTHm', ]
input.write [ [ '韆', ], 'reading', [ 'qian', ], ]
#.......................................................................................................
input.end()
#.........................................................................................................
matchers = [
["pos","reading","bar",["千"],2]
["pos","reading","bar",["千","similarity",4,"于"]]
["pos","reading","bar",["千","similarity",5,"干"]]
["pos","reading","bar",["千","strokeorder",2,"312"]]
["pos","reading","bar",["千","variant",4,"仟"]]
["pos","reading","bar",["千","variant",5,"韆"]]
["pos","reading","foo",["千"],1]
["pos","reading","foo",["千","similarity",2,"于"]]
["pos","reading","foo",["千","similarity",3,"干"]]
["pos","reading","foo",["千","strokeorder",1,"312"]]
["pos","reading","foo",["千","variant",2,"仟"]]
["pos","reading","foo",["千","variant",3,"韆"]]
["pos","reading","qian",["仟"],0]
["pos","reading","qian",["仟","strokeorder",0,"32312"]]
["pos","reading","qian",["千"],0]
["pos","reading","qian",["千","similarity",0,"于"]]
["pos","reading","qian",["千","similarity",1,"干"]]
["pos","reading","qian",["千","strokeorder",0,"312"]]
["pos","reading","qian",["千","variant",0,"仟"]]
["pos","reading","qian",["千","variant",1,"韆"]]
["pos","reading","qian",["韆"],0]
["pos","reading","qian",["韆","strokeorder",0,"122125112125221134515454"]]
["pos","similarity","于",["千"],0]
["pos","similarity","干",["千"],1]
["pos","strokeorder","122125112125221134515454",["韆"]]
["pos","strokeorder","122125112125221134515454",["韆","reading",0,"qian"]]
["pos","strokeorder","312",["千"]]
["pos","strokeorder","312",["千","reading",0,"qian"]]
["pos","strokeorder","312",["千","reading",1,"foo"]]
["pos","strokeorder","312",["千","reading",2,"bar"]]
["pos","strokeorder","312",["千","similarity",0,"于"]]
["pos","strokeorder","312",["千","similarity",1,"干"]]
["pos","strokeorder","312",["千","variant",0,"仟"]]
["pos","strokeorder","312",["千","variant",1,"韆"]]
["pos","strokeorder","32312",["仟"]]
["pos","strokeorder","32312",["仟","reading",0,"qian"]]
["pos","usagecode","CJKTHM",["千"]]
["pos","usagecode","CJKTHm",["仟"]]
["pos","usagecode","KTHm",["韆"]]
["pos","variant","仟",["千"],0]
["pos","variant","韆",["千"],1]
]
#.........................................................................................................
show = ( handler ) ->
query = { prefix: [ 'pos', ], star: '*', }
input = HOLLERITH.create_phrasestream db, query
input
.pipe D.$observe ( phrase ) => info JSON.stringify phrase
#.....................................................................................................
.pipe do =>
idx = -1
return D.$observe ( phrase ) =>
idx += +1
T.eq phrase, matchers[ idx ]
#.....................................................................................................
.pipe D.$on_end -> handler()
#.........................................................................................................
step ( resume ) =>
yield clear_leveldb db[ '%self' ], resume
yield write_data resume
yield show resume
done()
#-----------------------------------------------------------------------------------------------------------
@[ "n-ary indexing (2)" ] = ( T, done ) ->
#.........................................................................................................
write_data = ( handler ) ->
input = D.create_throughstream()
#.......................................................................................................
input
.pipe HOLLERITH.$index 'reading': 'plural', 'similarity': 'plural'
.pipe HOLLERITH.$index 'reading': 'plural', 'variant': 'plural'
.pipe HOLLERITH.$index 'reading': 'plural', 'strokeorder': 'singular'
.pipe HOLLERITH.$index 'strokeorder': 'singular', 'reading': 'plural'
.pipe HOLLERITH.$index 'strokeorder': 'singular', 'variant': 'plural'
.pipe HOLLERITH.$index 'strokeorder': 'singular', 'similarity': 'plural'
.pipe HOLLERITH.$write db
.pipe D.$on_end ->
handler()
#.......................................................................................................
input.write [ [ '千', ], 'variant', [ '仟', '韆', ], ]
input.write [ [ '千', ], 'similarity', [ '于', '干', ], ]
input.write [ [ '千', ], 'usagecode', 'CJKTHM', ]
input.write [ [ '千', ], 'strokeorder', '312', ]
input.write [ [ '千', ], 'reading', [ 'qian', 'foo', 'bar', ], ]
input.write [ [ '仟', ], 'strokeorder', '32312', ]
input.write [ [ '仟', ], 'usagecode', 'CJKTHm', ]
input.write [ [ '仟', ], 'reading', [ 'qian', ], ]
input.write [ [ '韆', ], 'strokeorder', '122125112125221134515454', ]
input.write [ [ '韆', ], 'usagecode', 'KTHm', ]
input.write [ [ '韆', ], 'reading', [ 'qian', ], ]
#.......................................................................................................
input.end()
#.........................................................................................................
matchers = [
["pos","reading","bar",["千"],2]
["pos","reading","bar",["千","similarity",4,"于"]]
["pos","reading","bar",["千","similarity",5,"干"]]
["pos","reading","bar",["千","strokeorder",2,"312"]]
["pos","reading","bar",["千","variant",4,"仟"]]
["pos","reading","bar",["千","variant",5,"韆"]]
["pos","reading","foo",["千"],1]
["pos","reading","foo",["千","similarity",2,"于"]]
["pos","reading","foo",["千","similarity",3,"干"]]
["pos","reading","foo",["千","strokeorder",1,"312"]]
["pos","reading","foo",["千","variant",2,"仟"]]
["pos","reading","foo",["千","variant",3,"韆"]]
["pos","reading","qian",["仟"],0]
["pos","reading","qian",["仟","strokeorder",0,"32312"]]
["pos","reading","qian",["千"],0]
["pos","reading","qian",["千","similarity",0,"于"]]
["pos","reading","qian",["千","similarity",1,"干"]]
["pos","reading","qian",["千","strokeorder",0,"312"]]
["pos","reading","qian",["千","variant",0,"仟"]]
["pos","reading","qian",["千","variant",1,"韆"]]
["pos","reading","qian",["韆"],0]
["pos","reading","qian",["韆","strokeorder",0,"122125112125221134515454"]]
["pos","similarity","于",["千"],0]
["pos","similarity","干",["千"],1]
["pos","strokeorder","122125112125221134515454",["韆"]]
["pos","strokeorder","122125112125221134515454",["韆","reading",0,"qian"]]
["pos","strokeorder","312",["千"]]
["pos","strokeorder","312",["千","reading",0,"qian"]]
["pos","strokeorder","312",["千","reading",1,"foo"]]
["pos","strokeorder","312",["千","reading",2,"bar"]]
["pos","strokeorder","312",["千","similarity",0,"于"]]
["pos","strokeorder","312",["千","similarity",1,"干"]]
["pos","strokeorder","312",["千","variant",0,"仟"]]
["pos","strokeorder","312",["千","variant",1,"韆"]]
["pos","strokeorder","32312",["仟"]]
["pos","strokeorder","32312",["仟","reading",0,"qian"]]
["pos","usagecode","CJKTHM",["千"]]
["pos","usagecode","CJKTHm",["仟"]]
["pos","usagecode","KTHm",["韆"]]
["pos","variant","仟",["千"],0]
["pos","variant","韆",["千"],1]
]
#.........................................................................................................
show = ( handler ) ->
query = { prefix: [ 'pos', ], star: '*', }
input = HOLLERITH.create_phrasestream db, query
input
.pipe D.$observe ( phrase ) => info JSON.stringify phrase
#.....................................................................................................
.pipe do =>
idx = -1
return D.$observe ( phrase ) =>
idx += +1
T.eq phrase, matchers[ idx ]
#.....................................................................................................
.pipe D.$on_end -> handler()
#.........................................................................................................
step ( resume ) =>
yield clear_leveldb db[ '%self' ], resume
yield write_data resume
yield show resume
done()
#-----------------------------------------------------------------------------------------------------------
@_prune = ->
for name, value of @
continue if name.startsWith '_'
delete @[ name ] unless name in include
return null
############################################################################################################
unless module.parent?
# debug '0980', JSON.stringify ( Object.keys @ ), null, ' '
include = [
# "write without error (1)"
# "write without error (2)"
# "read without error"
# "read keys without error (1)"
# "read keys without error (2)"
# "read keys without error (3)"
# "read keys without error (4)"
# "create_facetstream throws with wrong arguments"
# "read POS facets"
# "read POS phrases (1)"
# "read POS phrases (2)"
# "read SPO phrases"
# "sorting (1)"
# "sorting (2)"
# "H2 codec `encode` throws on anything but a list"
# "sort texts with H2 codec (1)"
# "sort texts with H2 codec (2)"
# "sort numbers with H2 codec (1)"
# "sort mixed values with H2 codec"
# "sort lists of mixed values with H2 codec"
# "sort routes with values (1)"
# "sort routes with values (2)"
# "read sample data"
# "read and write keys with lists"
# "encode keys with list elements"
# "read and write phrases with unanalyzed lists"
# "read partial POS phrases"
# "read single phrases (1)"
# "read single phrases (2)"
# "read single phrases (3)"
# "read single phrases (4)"
# "writing phrases with non-unique keys fails"
# "reminders"
# "invalid key not accepted (1)"
# "invalid key not accepted (2)"
# "catching errors (2)"
# "catching errors (1)"
# "building PODs from SPO phrases"
# "read phrases in lockstep"
# "has_any yields existence of key"
# "$write rejects duplicate S/P pairs"
# "codec accepts long keys"
# "write private types (1)"
# "write private types (2)"
# "write private types (3)"
# "bloom filter serialization without writes"
# "use non-string subjects in phrases"
# '$write rejects duplicate S/P pairs'
# 'codec accepts long keys'
# 'write private types (1)'
# 'use non-string subjects in phrases (1)'
# 'use non-string subjects in phrases (2)'
# 'use non-string subjects in phrases (3)'
# 'use non-string subjects in phrases (4)'
# 'binary indexing'
'n-ary indexing (1)'
'n-ary indexing (2)'
# "Pinyin Unicode Sorting"
# "ensure `Buffer.compare` gives same sorting as LevelDB"
]
# @_prune()
@_main()
# @[ "XXX" ] null, -> help "(done)"
# @[ "YYY" ] null, -> help "(done)"
# @[ "ZZZ" ] null, -> help "(done)"
# debug '©P9AOR', ( HOLLERITH.CODEC[ 'typemarkers' ][ 'null' ] ).toString 16
# debug '©xxmIp', ( HOLLERITH.CODEC[ 'typemarkers' ][ 'false' ] ).toString 16
# debug '©ZeY26', ( HOLLERITH.CODEC[ 'typemarkers' ][ 'true' ] ).toString 16
# debug '©WgER9', ( HOLLERITH.CODEC[ 'typemarkers' ][ 'date' ] ).toString 16
# debug '©UmpjJ', ( HOLLERITH.CODEC[ 'typemarkers' ][ 'ninfinity' ] ).toString 16
# debug '©Url0K', ( HOLLERITH.CODEC[ 'typemarkers' ][ 'nnumber' ] ).toString 16
# debug '©nFIIi', ( HOLLERITH.CODEC[ 'typemarkers' ][ 'pnumber' ] ).toString 16
# debug '©LZ58R', ( HOLLERITH.CODEC[ 'typemarkers' ][ 'pinfinity' ] ).toString 16
# debug '©MYxda', ( HOLLERITH.CODEC[ 'typemarkers' ][ 'text' ] ).toString 16
| 135494 |
############################################################################################################
njs_path = require 'path'
# njs_fs = require 'fs'
join = njs_path.join
#...........................................................................................................
CND = require 'cnd'
rpr = CND.rpr
badge = 'HOLLERITH/tests'
log = CND.get_logger 'plain', badge
info = CND.get_logger 'info', badge
whisper = CND.get_logger 'whisper', badge
alert = CND.get_logger 'alert', badge
debug = CND.get_logger 'debug', badge
warn = CND.get_logger 'warn', badge
help = CND.get_logger 'help', badge
urge = CND.get_logger 'urge', badge
echo = CND.echo.bind CND
#...........................................................................................................
suspend = require 'coffeenode-suspend'
step = suspend.step
after = suspend.after
# eventually = suspend.eventually
### TAINT experimentally using `later` in place of `setImmediate` ###
later = suspend.immediately
#...........................................................................................................
test = require 'guy-test'
#...........................................................................................................
D = require 'pipedreams'
$ = D.remit.bind D
$async = D.remit_async.bind D
#...........................................................................................................
HOLLERITH = require './main'
db = null
#...........................................................................................................
levelup = require 'level'
leveldown = require 'leveldown'
CODEC = require 'hollerith-codec'
#...........................................................................................................
ƒ = CND.format_number
# #-----------------------------------------------------------------------------------------------------------
# @_sort_list = ( list ) ->
# @_encode_list list
# list.sort Buffer.compare
# @_decode_list list
# return list
#===========================================================================================================
# HELPERS
#-----------------------------------------------------------------------------------------------------------
show_keys_and_key_bfrs = ( keys, key_bfrs ) ->
f = ( p ) -> ( t for t in ( p.toString 'hex' ).split /(..)/ when t isnt '' ).join ' '
#.........................................................................................................
columnify_settings =
paddingChr: ' '
#.........................................................................................................
data = []
key_bfrs = ( f p for p in key_bfrs )
for key, idx in keys
key_txt = ( rpr key ).replace /\\u0000/g, '∇'
data.push { 'str': key_txt, 'bfr': key_bfrs[ idx ]}
help '\n' + CND.columnify data, columnify_settings
return null
#-----------------------------------------------------------------------------------------------------------
show_db_entries = ( handler ) ->
input = db[ '%self' ].createReadStream()
input
.pipe D.$show()
.pipe $ ( { key, value, }, send ) => send [ key, value, ]
.pipe $ ( [ key, value, ], send ) => send [ key, value, ] unless HOLLERITH._is_meta db, key
.pipe $ ( [ key, value, ], send ) =>
# debug '©RluhF', ( HOLLERITH.CODEC.decode key ), ( JSON.parse value )
send [ key, value, ]
.pipe D.$collect()
.pipe $ ( facets, send ) =>
help '\n' + HOLLERITH.DUMP.rpr_of_facets db, facets
# buffer = new Buffer JSON.stringify [ '开', '彡' ]
# debug '©GJfL6', HOLLERITH.CODEC.rpr_of_buffer null, buffer
.pipe D.$on_end => handler()
#-----------------------------------------------------------------------------------------------------------
get_new_db_name = ->
get_new_db_name.idx += +1
return "/tmp/hollerith2-testdb-#{get_new_db_name.idx}"
get_new_db_name.idx = 0
#-----------------------------------------------------------------------------------------------------------
read_all_keys = ( db, handler ) ->
Z = []
input = db.createKeyStream()
input.on 'end', -> handler null, Z
input
.pipe $ ( data, send ) => Z.push data
#-----------------------------------------------------------------------------------------------------------
clear_leveldb = ( leveldb, handler ) ->
step ( resume ) =>
route = leveldb[ 'location' ]
yield leveldb.close resume
whisper "closed LevelDB"
yield leveldown.destroy route, resume
whisper "destroyed LevelDB"
yield leveldb.open resume
whisper "re-opened LevelDB"
# help "erased and re-opened LevelDB at #{route}"
handler null
#-----------------------------------------------------------------------------------------------------------
@_main = ( handler ) ->
db_route = join __dirname, '..', 'dbs/tests'
db_settings = size: 500
db = HOLLERITH.new_db db_route, db_settings
test @, 'timeout': 2500
#-----------------------------------------------------------------------------------------------------------
@_feed_test_data = ( db, probes_idx, settings, handler ) ->
switch arity = arguments.length
when 3
handler = settings
settings = null
when 4
null
else
throw new Error "expected 3 or 4 arguments, got #{arity}"
#.........................................................................................................
step ( resume ) =>
yield HOLLERITH.clear db, resume
whisper "writing test dataset ##{probes_idx} with settings #{rpr settings}"
input = D.create_throughstream()
#.......................................................................................................
switch probes_idx
#-----------------------------------------------------------------------------------------------------
when -1
# settings =
input
.pipe HOLLERITH.$write db, settings
# .pipe D.$show()
.pipe D.$on_end ( end ) =>
whisper "test data written"
handler null
end()
#...................................................................................................
for n in [ 0 .. 1000 ]
key = [ "<KEY> ]
input.write key
yield later resume
input.end()
#-----------------------------------------------------------------------------------------------------
when 0, 2, 3, 4, 5
input
.pipe HOLLERITH.$write db, settings
# .pipe D.$show()
.pipe D.$on_end ( end ) =>
whisper "test data written"
handler null
end()
#...................................................................................................
for probe in @_feed_test_data.probes[ probes_idx ]
# key = H<KEY>LER<KEY>.new_so_key db, probe...
# debug '©WV0j2', probe
input.write probe
yield later resume
input.end()
#-----------------------------------------------------------------------------------------------------
when 1
input
.pipe HOLLERITH.$write db, settings
# .pipe D.$show()
.pipe D.$on_end ( end ) =>
whisper "test data written"
end()
handler null
#...................................................................................................
for url_key in @_feed_test_data.probes[ probes_idx ]
key = H<KEY>.key_from_url db, url_key
input.write key
yield later resume
input.end()
#-------------------------------------------------------------------------------------------------------
else return handler new Error "illegal probes index #{rpr probes_idx}"
#.........................................................................................................
return null
#-----------------------------------------------------------------------------------------------------------
@_feed_test_data.probes = []
#...........................................................................................................
### probes_idx == 0 ###
@_feed_test_data.probes.push [
[ '𧷟1', 'guide/lineup/length', 1, ]
[ '𧷟2', 'guide/lineup/length', 2, ]
[ '𧷟3', 'guide/lineup/length', 3, ]
[ '𧷟4', 'guide/lineup/length', 4, ]
[ '𧷟', 'guide/lineup/length', 5, ]
[ '𧷟6', 'guide/lineup/length', 6, ]
[ '𧷟', 'cp/cid', 163295, ]
[ '𧷟', 'guide/uchr/has', [ '八', '刀', '宀', '', '貝', ], ]
[ '𧷟', 'rank/cjt', 5432, ]
[ '八', 'factor/strokeclass/wbf', '34', ]
[ '刀', 'factor/strokeclass/wbf', '5(12)3', ]
[ '宀', 'factor/strokeclass/wbf', '44', ]
[ '', 'factor/strokeclass/wbf', '12', ]
[ '貝', 'factor/strokeclass/wbf', '25(12)', ]
[ '八', 'rank/cjt', 12541, ]
[ '刀', 'rank/cjt', 12542, ]
[ '宀', 'rank/cjt', 12543, ]
[ '', 'rank/cjt', 12544, ]
[ '貝', 'rank/cjt', 12545, ]
]
#...........................................................................................................
### probes_idx == 1 ###
@_feed_test_data.probes.push [
'so|glyph:劬|cp/fncr:u-cjk/52ac|0'
'so|glyph:邭|cp/fncr:u-cjk/90ad|0'
'so|glyph:𠴦|cp/fncr:u-cjk-xb/20d26|0'
'so|glyph:𤿯|cp/fncr:u-cjk-xb/24fef|0'
'so|glyph:𧑴|cp/fncr:u-cjk-xb/27474|0'
'so|glyph:𨒡|cp/fncr:u-cjk-xb/284a1|0'
'so|glyph:𪚧|cp/fncr:u-cjk-xb/2a6a7|0'
'so|glyph:𪚫|cp/fncr:u-cjk-xb/2a6ab|0'
'so|glyph:𤿯|strokeorder:352513553254|0'
'so|glyph:𠴦|strokeorder:3525141121|0'
'so|glyph:𨒡|strokeorder:35251454|0'
'so|glyph:邭|strokeorder:3525152|0'
'so|glyph:𪚫|strokeorder:352515251115115113541|0'
'so|glyph:𪚧|strokeorder:35251525112511511|0'
'so|glyph:𧑴|strokeorder:352515251214251214|0'
'so|glyph:劬|strokeorder:3525153|0'
]
#-----------------------------------------------------------------------------------------------------------
### probes_idx == 2 ###
@_feed_test_data.probes.push [
[ '丁', 'strokecount', 2, ]
[ '三', 'strokecount', 3, ]
[ '夫', 'strokecount', 5, ]
[ '國', 'strokecount', 11, ]
[ '形', 'strokecount', 7, ]
[ '丁', 'componentcount', 1, ]
[ '三', 'componentcount', 1, ]
[ '夫', 'componentcount', 1, ]
[ '國', 'componentcount', 4, ]
[ '形', 'componentcount', 2, ]
[ '丁', 'components', [ '丁', ], ]
[ '三', 'components', [ '三', ], ]
[ '夫', 'components', [ '夫', ], ]
[ '國', 'components', [ '囗', '戈', '口', '一', ], ]
[ '形', 'components', [ '开', '彡', ], ]
]
#-----------------------------------------------------------------------------------------------------------
### probes_idx == 3 ###
@_feed_test_data.probes.push [
[ '丁', 'isa', [ 'glyph', 'guide', ] ]
[ '三', 'isa', [ 'glyph', 'guide', ] ]
[ '夫', 'isa', [ 'glyph', 'guide', ] ]
[ '國', 'isa', [ 'glyph', ] ]
[ '形', 'isa', [ 'glyph', ] ]
[ 'glyph:丁', 'strokeorder/count', 2, ]
[ 'glyph:三', 'strokeorder/count', 3, ]
[ 'glyph:夫', 'strokeorder/count', 5, ]
[ 'glyph:國', 'strokeorder/count', 11, ]
[ 'glyph:形', 'strokeorder/count', 7, ]
[ 'glyph:丁', 'guide/count', 1, ]
[ 'glyph:三', 'guide/count', 1, ]
[ 'glyph:夫', 'guide/count', 1, ]
[ 'glyph:國', 'guide/count', 4, ]
[ 'glyph:形', 'guide/count', 2, ]
[ 'glyph:丁', 'guide/lineup', [ '丁', ], ]
[ 'glyph:三', 'guide/lineup', [ '三', ], ]
[ 'glyph:夫', 'guide/lineup', [ '夫', ], ]
[ 'glyph:國', 'guide/lineup', [ '囗', '戈', '口', '一', ], ]
[ 'glyph:形', 'guide/lineup', [ '开', '彡', ], ]
]
#...........................................................................................................
### probes_idx == 4 ###
@_feed_test_data.probes.push [
[ '𧷟1', 'guide/lineup/length', 1, ]
[ '𧷟2', 'guide/lineup/length', 2, ]
[ '𧷟3', 'guide/lineup/length', 3, ]
[ '𧷟4', 'guide/lineup/length', 4, ]
[ '𧷟', 'guide/lineup/length', 5, ]
[ '𧷟6', 'guide/lineup/length', 6, ]
[ '𧷟', 'cp/cid', 163295, ]
[ '𧷟', 'guide/uchr/has', [ '八', '刀', '宀', '', '貝', ], ]
[ '𧷟', 'rank/cjt', 5432, ]
[ '八', 'factor/strokeclass/wbf', '34', ]
[ '刀', 'factor/strokeclass/wbf', '5(12)3', ]
[ '宀', 'factor/strokeclass/wbf', '44', ]
[ '', 'factor/strokeclass/wbf', '12', ]
[ '貝', 'factor/strokeclass/wbf', '25(12)', ]
[ '八', 'rank/cjt', 12541, ]
[ '刀', 'rank/cjt', 12542, ]
[ '宀', 'rank/cjt', 12543, ]
[ '', 'rank/cjt', 12544, ]
[ '貝', 'rank/cjt', 12545, ]
[ '𧷟1', 'a', 42 ]
[ '𧷟1', 'ab', 42 ]
[ '𧷟1', 'guide', 'xxx' ]
[ '𧷟1', 'guide/', 'yyy' ]
[ '𧷟1', 'z', 42 ]
]
#-----------------------------------------------------------------------------------------------------------
### probes_idx == 5 ###
@_feed_test_data.probes.push [
[ '丁', 'strokecount', 2, ]
# [ '三', 'strokecount', 3, ]
# [ '夫', 'strokecount', 5, ]
# [ '國', 'strokecount', 11, ]
# [ '形', 'strokecount', 7, ]
[ '丁', 'componentcount', 1, ]
# [ '三', 'componentcount', 1, ]
# [ '夫', 'componentcount', 1, ]
# [ '國', 'componentcount', 4, ]
# [ '形', 'componentcount', 2, ]
[ '丁', 'components', [ '丁', ], ]
# [ '三', 'components', [ '三', ], ]
# [ '夫', 'components', [ '夫', ], ]
# [ '國', 'components', [ '囗', '戈', '口', '一', ], ]
# [ '形', 'components', [ '开', '彡', ], ]
# [ { type: 'route', value: '/foo/bar', }, 'mtime', new Date '2011-10-10T14:48:00Z', ]
[ { type: 'route', value: '/foo/bar', }, 'mtime', 123456789, ]
]
# pos|guide/kwic/sortcode
# # [
# # "1027~~~~,00","0156~~~~,01,0509~~~~,02,0000~~~~,03,--------,04,--------,05,--------,06,--------,07,--------,08,--------,09,--------,10,--------,11,--------,12,"
# # "0156~~~~,01","0509~~~~,02,0000~~~~,03,--------,04,--------,05,--------,06,--------,07,--------,08,--------,09,--------,10,--------,11,--------,12,1027~~~~,00,"
# # "0509~~~~,02","0000~~~~,03,--------,04,--------,05,--------,06,--------,07,--------,08,--------,09,--------,10,--------,11,--------,12,1027~~~~,00,0156~~~~,01,"
# # "0000~~~~,03","--------,04,--------,05,--------,06,--------,07,--------,08,--------,09,--------,10,--------,11,--------,12,1027~~~~,00,0156~~~~,01,0509~~~~,02,"
# # ]
# 0087~~~~,00,0291~~~~,01,0555~~~~,02,--------,03,--------,04,--------,05,--------,06,--------,07,--------,08,--------,09,--------,10,--------,11,--------,12,|𦖈|0
# 0087~~~~,00,0291~~~~,01,0823x2h-,02,--------,03,--------,04,--------,05,--------,06,--------,07,--------,08,--------,09,--------,10,--------,11,--------,12,|聗|0
# 0087~~~~,00,0291~~~~,01,1023~~~~,02,--------,03,--------,04,--------,05,--------,06,--------,07,--------,08,--------,09,--------,10,--------,11,--------,12,|𤋕|0
# 0087~~~~,00,0294~~~~,01,0060~~~~,02,--------,03,--------,04,--------,05,--------,06,--------,07,--------,08,--------,09,--------,10,--------,11,--------,12,|𦖔|0
# 0087~~~~,00,0294~~~~,01,0555~~~~,02,--------,03,--------,04,--------,05,--------,06,--------,07,--------,08,--------,09,--------,10,--------,11,--------,12,|𦗆|0
# 0087~~~~,00,0295~~~~,01,0802~~~~,02,0958~~~~,03,--------,04,--------,05,--------,06,--------,07,--------,08,--------,09,--------,10,--------,11,--------,12,|𥪻|0
# 0087~~~~,00,0312~~~~,01,--------,02,--------,03,--------,04,--------,05,--------,06,--------,07,--------,08,--------,09,--------,10,--------,11,--------,12,|𦔲|0
# 0087~~~~,00,0314~~~~,01,1173~~~~,02,--------,03,--------,04,--------,05,--------,06,--------,07,--------,08,--------,09,--------,10,--------,11,--------,12,|𦕀|0
# 0087~~~~,00,0319~~~~,01,--------,02,--------,03,--------,04,--------,05,--------,06,--------,07,--------,08,--------,09,--------,10,--------,11,--------,12,|𦕇|0
# 0087~~~~,00,0355~~~~,01,--------,02,--------,03,--------,04,--------,05,--------,06,--------,07,--------,08,--------,09,--------,10,--------,11,--------,12,|𦕆|0
# 0087~~~~,00,0373~~~~,01,0284~~~~,02,--------,03,--------,04,--------,05,--------,06,--------,07,--------,08,--------,09,--------,10,--------,11,--------,12,|𦕧|0
#-----------------------------------------------------------------------------------------------------------
@[ "write without error (1)" ] = ( T, done ) ->
probes_idx = 0
idx = -1
write_settings =
batch: 10
step ( resume ) =>
yield @_feed_test_data db, probes_idx, write_settings, resume
done()
#-----------------------------------------------------------------------------------------------------------
@[ "write without error (2)" ] = ( T, done ) ->
probes_idx = -1
idx = -1
write_settings =
batch: 10
step ( resume ) =>
yield @_feed_test_data db, probes_idx, write_settings, resume
done()
#-----------------------------------------------------------------------------------------------------------
@[ "read without error" ] = ( T, done ) ->
probes_idx = 0
idx = -1
step ( resume ) =>
yield @_feed_test_data db, probes_idx, resume
# done()
input = HOLLERITH.create_facetstream db
input
# .pipe HOLLERITH.$url_from_key db
.pipe $ ( [ key, value, ], send ) =>
idx += +1
# T.eq key, matchers[ idx ]
.pipe D.$on_end ( end ) => end; done()
#-----------------------------------------------------------------------------------------------------------
@[ "read keys without error (1)" ] = ( T, done ) ->
step ( resume ) =>
yield HOLLERITH.clear db, resume
### TAINT awaiting better solution ###
NULL = HOLLERITH._encode_value db, 1
for idx in [ 0 ... 10 ]
key_bfr = HOLLERITH._encode_key db, [ 'x', idx, 'x', ]
db[ '%self' ].put key_bfr, NULL
#.......................................................................................................
probe_idx = 4
count = 0
query = HOLLERITH._query_from_prefix db, [ 'x', probe_idx, ]
# debug '©ETONp', HOLLERITH.CODEC.rpr_of_buffer key_bfr
input = db[ '%self' ].createReadStream query
input
.pipe $ ( { key, value, }, send ) =>
count += 1
T.eq ( HOLLERITH._decode_key db, key )[ 1 ], probe_idx
.pipe D.$on_end ( end ) =>
T.eq count, 1
end()
done()
#-----------------------------------------------------------------------------------------------------------
@[ "read keys without error (2)" ] = ( T, done ) ->
step ( resume ) =>
yield HOLLERITH.clear db, resume
### TAINT awaiting better solution ###
NULL = HOLLERITH._encode_value db, 1
for idx in [ 0 ... 10 ]
db[ '%self' ].put ( HOLLERITH._encode_key db, [ 'x', idx, 'x', ] ), NULL
#.......................................................................................................
probe_idx = 4
count = 0
prefix = [ 'x', probe_idx, ]
input = HOLLERITH.create_facetstream db, { prefix, }
input
.pipe $ ( facet, send ) =>
count += 1
[ key, value, ] = facet
T.eq key[ 1 ], probe_idx
.pipe D.$on_end ( end ) =>
T.eq count, 1
end()
done()
#-----------------------------------------------------------------------------------------------------------
@[ "read keys without error (3)" ] = ( T, done ) ->
step ( resume ) =>
yield HOLLERITH.clear db, resume
### TAINT awaiting better solution ###
NULL = HOLLERITH._encode_value db, 1
for idx in [ 0 ... 10 ]
db[ '%self' ].put ( HOLLERITH._encode_key db, [ 'x', idx, 'x', ] ), NULL
#.......................................................................................................
probe_idx = 3
count = 0
delta = 2
lo = [ 'x', probe_idx, ]
hi = [ 'x', probe_idx + delta, ]
query = { gte: ( HOLLERITH._encode_key db, lo ), lte: ( HOLLERITH._query_from_prefix db, hi )[ 'lte' ], }
input = db[ '%self' ].createReadStream query
input
.pipe $ ( { key, value, }, send ) =>
count += 1
T.eq ( HOLLERITH._decode_key db, key )[ 1 ], probe_idx + count - 1
.pipe D.$on_end ( end ) =>
T.eq count, delta + 1
end()
done()
#-----------------------------------------------------------------------------------------------------------
@[ "read keys without error (4)" ] = ( T, done ) ->
step ( resume ) =>
yield HOLLERITH.clear db, resume
for idx in [ 0 ... 10 ]
db[ '%self' ].put ( HOLLERITH._encode_key db, [ 'x', idx, 'x', ] ), HOLLERITH._encode_value db, 1
#.......................................................................................................
probe_idx = 3
count = 0
delta = 2
lo = [ 'x', probe_idx, ]
hi = [ 'x', probe_idx + delta, ]
input = HOLLERITH.create_facetstream db, { lo, hi, }
input
.pipe $ ( [ key, value, ], send ) =>
count += 1
T.eq key[ 1 ], probe_idx + count - 1
.pipe D.$on_end ( end ) =>
T.eq count, delta + 1
end()
done()
#-----------------------------------------------------------------------------------------------------------
@[ "create_facetstream throws with wrong arguments" ] = ( T, done ) ->
message = "illegal to specify `hi` but not `lo`"
T.throws message, ( -> HOLLERITH.create_facetstream db, hi: [ 'xxx', ] )
done()
#-----------------------------------------------------------------------------------------------------------
@[ "read POS facets" ] = ( T, done ) ->
probes_idx = 0
idx = -1
#.........................................................................................................
key_matchers = [
[ 'pos', 'guide/lineup/length', 2, '𧷟2', ]
[ 'pos', 'guide/lineup/length', 3, '𧷟3', ]
[ 'pos', 'guide/lineup/length', 4, '𧷟4', ]
]
#.........................................................................................................
step ( resume ) =>
yield @_feed_test_data db, probes_idx, resume
lo = [ 'pos', 'guide/lineup/length', 2, ]
hi = [ 'pos', 'guide/lineup/length', 4, ]
# input = HOLLERITH.create_keystream db, lo
input = HOLLERITH.create_facetstream db, { lo, hi, }
input
# .pipe HOLLERITH.$url_from_key db
.pipe $ ( [ key, value, ], send ) =>
idx += +1
phrase = HOLLERITH.as_phrase db, key, value
T.eq key, key_matchers[ idx ]
.pipe D.$on_end ( end ) => end(); done()
#-----------------------------------------------------------------------------------------------------------
@[ "read POS phrases (1)" ] = ( T, done ) ->
probes_idx = 0
idx = -1
#.........................................................................................................
matchers = [
[ 'pos', 'guide/lineup/length', 2, '𧷟2', ]
[ 'pos', 'guide/lineup/length', 3, '𧷟3', ]
[ 'pos', 'guide/lineup/length', 4, '𧷟4', ]
]
#.........................................................................................................
step ( resume ) =>
yield @_feed_test_data db, probes_idx, resume
lo = [ 'pos', 'guide/lineup/length', 2, ]
hi = [ 'pos', 'guide/lineup/length', 4, ]
input = HOLLERITH.create_phrasestream db, { lo, hi, }
input
.pipe $ ( phrase, send ) =>
idx += +1
T.eq phrase, matchers[ idx ]
.pipe D.$on_end ( end ) => end(); done()
#-----------------------------------------------------------------------------------------------------------
@[ "read POS phrases (2)" ] = ( T, done ) ->
probes_idx = 0
idx = -1
count = 0
#.........................................................................................................
matchers = [
[ 'pos', 'guide/uchr/has', '八', '𧷟', 0, ]
[ 'pos', 'guide/uchr/has', '刀', '𧷟', 1, ]
[ 'pos', 'guide/uchr/has', '宀', '𧷟', 2, ]
[ 'pos', 'guide/uchr/has', '貝', '𧷟', 4, ]
[ 'pos', 'guide/uchr/has', '', '𧷟', 3, ]
]
#.........................................................................................................
step ( resume ) =>
yield @_feed_test_data db, probes_idx, resume
prefix = [ 'pos', 'guide/uchr/has', ]
input = HOLLERITH.create_phrasestream db, { prefix, }
settings = { indexed: no, }
input
.pipe $ ( phrase, send ) =>
debug '©DsAfY', rpr phrase
count += +1
idx += +1
T.eq phrase, matchers[ idx ]
.pipe D.$on_end ( end ) =>
T.eq count, matchers.length
end()
done()
#-----------------------------------------------------------------------------------------------------------
@[ "read SPO phrases" ] = ( T, done ) ->
debug '©Rsoxb', db[ '%self' ].isOpen()
probes_idx = 0
idx = -1
count = 0
#.........................................................................................................
matchers = [
[ 'spo', '𧷟', 'cp/cid', 163295 ]
[ 'spo', '𧷟', 'guide/lineup/length', 5 ]
[ 'spo', '𧷟', 'guide/uchr/has', [ '八', '刀', '宀', '', '貝' ] ]
[ 'spo', '𧷟', 'rank/cjt', 5432 ]
]
#.........................................................................................................
step ( resume ) =>
yield @_feed_test_data db, probes_idx, resume
prefix = [ 'spo', '𧷟', ]
input = HOLLERITH.create_phrasestream db, { prefix, }
input
.pipe $ ( phrase, send ) =>
debug '©DsAfY', rpr phrase
count += +1
idx += +1
T.eq phrase, matchers[ idx ]
.pipe D.$on_end ( end ) =>
T.eq count, matchers.length
end()
done()
#-----------------------------------------------------------------------------------------------------------
@[ "sorting (1)" ] = ( T, done ) ->
step ( resume ) =>
settings =
db: leveldown
keyEncoding: 'binary'
leveldb = levelup '/tmp/hollerith2-test', settings
yield clear_leveldb leveldb, resume
probes = [
'a'
'ab'
'abc'
'abc\x00'
'abc\x00a'
'abca'
'abcb'
'abcc'
'abcd'
'abcde'
'abcdef'
'abcdefg' ]
matchers = [
new Buffer [ 0x61, ]
new Buffer [ 0x61, 0x62, ]
new Buffer [ 0x61, 0x62, 0x63, ]
new Buffer [ 0x61, 0x62, 0x63, 0x00, ]
new Buffer [ 0x61, 0x62, 0x63, 0x00, 0x61, ]
new Buffer [ 0x61, 0x62, 0x63, 0x61, ]
new Buffer [ 0x61, 0x62, 0x63, 0x62, ]
new Buffer [ 0x61, 0x62, 0x63, 0x63, ]
new Buffer [ 0x61, 0x62, 0x63, 0x64, ]
new Buffer [ 0x61, 0x62, 0x63, 0x64, 0x65, ]
new Buffer [ 0x61, 0x62, 0x63, 0x64, 0x65, 0x66, ]
new Buffer [ 0x61, 0x62, 0x63, 0x64, 0x65, 0x66, 0x67, ] ]
CND.shuffle probes
for probe in probes
probe_bfr = new Buffer probe, 'utf-8'
yield leveldb.put probe_bfr, '1', resume
probe_bfrs = yield read_all_keys leveldb, resume
probe_bfrs = yield read_all_keys leveldb, resume
# debug '©RXPvv', '\n' + rpr probe_bfrs
for probe_bfr, probe_idx in probe_bfrs
matcher = matchers[ probe_idx ]
### TAINT looks like `T.eq buffer1, buffer2` doesn't work---sometimes... ###
# T.eq probe_bfr, matcher
T.ok probe_bfr.equals matcher
leveldb.close -> done()
#-----------------------------------------------------------------------------------------------------------
@[ "sorting (2)" ] = ( T, done ) ->
### This test is here because there seemed to occur some strange ordering issues when
using memdown instead of leveldown ###
step ( resume ) =>
settings =
db: leveldown
keyEncoding: 'binary'
leveldb = levelup '/tmp/hollerith2-test', settings
yield clear_leveldb leveldb, resume
probes = [
new Buffer [ 0x00, ]
new Buffer [ 0x01, ]
new Buffer [ 0x02, ]
new Buffer [ 0x03, ]
new Buffer [ 0xf9, ]
new Buffer [ 0xfa, ]
new Buffer [ 0xfb, ]
new Buffer [ 0xfc, ]
new Buffer [ 0xfd, ]
]
matchers = ( probe for probe in probes )
CND.shuffle probes
for probe in probes
yield leveldb.put probe, '1', resume
probe_bfrs = yield read_all_keys leveldb, resume
for probe_bfr, probe_idx in probe_bfrs
matcher = matchers[ probe_idx ]
# debug '©15060', probe_idx, probe_bfr, matcher
### TAINT looks like `T.eq buffer1, buffer2` doesn't work---sometimes... ###
T.ok probe_bfr.equals matcher
leveldb.close -> done()
#-----------------------------------------------------------------------------------------------------------
@[ "H2 codec `encode` throws on anything but a list" ] = ( T, done ) ->
T.throws "expected a list, got a text", ( -> CODEC.encode 'unaccaptable' )
T.throws "expected a list, got a number", ( -> CODEC.encode 42 )
T.throws "expected a list, got a boolean", ( -> CODEC.encode true )
T.throws "expected a list, got a boolean", ( -> CODEC.encode false )
T.throws /^expected a list, got a (?:js)?undefined$/, ( -> CODEC.encode() )
done()
#-----------------------------------------------------------------------------------------------------------
@[ "sort texts with H2 codec (1)" ] = ( T, done ) ->
step ( resume ) =>
settings =
db: leveldown
keyEncoding: 'binary'
leveldb = levelup '/tmp/hollerith2-test', settings
yield clear_leveldb leveldb, resume
probes = [
'a'
'ab'
'abc'
'abc\x00'
'abc\x00a'
'abca'
'abca\x00'
'abcb'
'abcc'
'abcd'
'abcde'
'abcdef'
'abcdefg'
]
matchers = ( [ probe, ] for probe in probes )
CND.shuffle probes
for probe in probes
yield leveldb.put ( CODEC.encode [ probe, ] ), '1', resume
probe_bfrs = yield read_all_keys leveldb, resume
probes = ( CODEC.decode probe_bfr for probe_bfr in probe_bfrs )
show_keys_and_key_bfrs probes, probe_bfrs
for probe, probe_idx in probes
matcher = matchers[ probe_idx ]
T.eq probe, matcher
leveldb.close -> done()
#-----------------------------------------------------------------------------------------------------------
@[ "sort texts with H2 codec (2)" ] = ( T, done ) ->
step ( resume ) =>
settings =
db: leveldown
keyEncoding: 'binary'
leveldb = levelup '/tmp/hollerith2-test', settings
yield clear_leveldb leveldb, resume
probes = [
''
' '
'a'
'abc'
'一'
'一二'
'一二三'
'三'
'二'
'𠀀'
'𠀀\x00'
'𠀀a'
'𪜀'
'𫝀'
String.fromCodePoint 0x10ffff
]
matchers = ( [ probe, ] for probe in probes )
CND.shuffle probes
for probe in probes
probe_bfr = CODEC.encode [ probe, ]
yield leveldb.put probe_bfr, '1', resume
probe_bfrs = yield read_all_keys leveldb, resume
# debug '©Fd5iw', probe_bfrs
probes = ( CODEC.decode probe_bfr for probe_bfr in probe_bfrs )
show_keys_and_key_bfrs probes, probe_bfrs
for probe, probe_idx in probes
matcher = matchers[ probe_idx ]
T.eq probe, matcher
leveldb.close -> done()
#-----------------------------------------------------------------------------------------------------------
@[ "sort numbers with H2 codec (1)" ] = ( T, done ) ->
step ( resume ) =>
settings =
db: leveldown
keyEncoding: 'binary'
leveldb = levelup '/tmp/hollerith2-test', settings
yield clear_leveldb leveldb, resume
probes_and_descriptions = [
[ -Infinity, "-Infinity" ]
[ -Number.MAX_VALUE, "-Number.MAX_VALUE" ]
[ Number.MIN_SAFE_INTEGER, "Number.MIN_SAFE_INTEGER" ]
[ -123456789, "-123456789" ]
[ -3, "-3" ]
[ -2, "-2" ]
[ -1.5, "-1.5" ]
[ -1, "-1" ]
[ -Number.EPSILON, "-Number.EPSILON" ]
[ -Number.MIN_VALUE, "-Number.MIN_VALUE" ]
[ 0, "0" ]
[ +Number.MIN_VALUE, "+Number.MIN_VALUE" ]
[ +Number.EPSILON, "+Number.EPSILON" ]
[ +1, "+1" ]
[ +1.5, "+1.5" ]
[ +2, "+2" ]
[ +3, "+3" ]
[ +123456789, "+123456789" ]
[ Number.MAX_SAFE_INTEGER, "Number.MAX_SAFE_INTEGER" ]
[ Number.MAX_VALUE, "Number.MAX_VALUE" ]
[ +Infinity, "+Infinity" ]
]
# probes_and_descriptions.sort ( a, b ) ->
# return +1 if a[ 0 ] > b[ 0 ]
# return -1 if a[ 0 ] < b[ 0 ]
# return 0
matchers = ( [ pad[ 0 ], ] for pad in probes_and_descriptions )
# descriptions = ( [ pad[ 1 ], ] for pad in probes_and_descriptions )
for pad in probes_and_descriptions
urge pad
CND.shuffle probes_and_descriptions
for [ probe, _, ] in probes_and_descriptions
probe_bfr = CODEC.encode [ probe, ]
yield leveldb.put probe_bfr, '1', resume
probe_bfrs = yield read_all_keys leveldb, resume
probes = ( CODEC.decode probe_bfr for probe_bfr in probe_bfrs )
show_keys_and_key_bfrs probes, probe_bfrs
for probe, probe_idx in probes
matcher = matchers[ probe_idx ]
T.eq probe, matcher
leveldb.close -> done()
#-----------------------------------------------------------------------------------------------------------
@[ "sort mixed values with H2 codec" ] = ( T, done ) ->
step ( resume ) =>
settings =
db: leveldown
keyEncoding: 'binary'
leveldb = levelup '/tmp/hollerith2-test', settings
yield clear_leveldb leveldb, resume
probes = [
null
false
true
CODEC[ 'sentinels' ][ 'firstdate' ]
new Date 0
new Date 8e11
new Date()
CODEC[ 'sentinels' ][ 'lastdate' ]
1234
Infinity
''
'一'
'三'
'二'
'𠀀'
'𠀀\x00'
String.fromCodePoint 0x10ffff
]
matchers = ( [ probe, ] for probe in probes )
CND.shuffle probes
for probe in probes
debug '©oMXJZ', probe
probe_bfr = CODEC.encode [ probe, ]
yield leveldb.put probe_bfr, '1', resume
probe_bfrs = yield read_all_keys leveldb, resume
# debug '©Fd5iw', probe_bfrs
probes = ( CODEC.decode probe_bfr for probe_bfr in probe_bfrs )
show_keys_and_key_bfrs probes, probe_bfrs
for probe, probe_idx in probes
matcher = matchers[ probe_idx ]
T.eq probe, matcher
leveldb.close -> done()
#-----------------------------------------------------------------------------------------------------------
@[ "sort lists of mixed values with H2 codec" ] = ( T, done ) ->
step ( resume ) =>
settings =
db: leveldown
keyEncoding: 'binary'
leveldb = levelup '/tmp/hollerith2-test', settings
yield clear_leveldb leveldb, resume
probes = [
[ "", '', ]
[ "1234", 1234, ]
[ "Infinity", Infinity, ]
[ "String.fromCodePoint 0x10ffff", String.fromCodePoint 0x10ffff ]
[ "false", false, ]
[ "new Date 0", new Date 0, ]
[ "new Date 8e11", new Date 8e11, ]
[ "new Date()", new Date(), ]
[ "null", null, ]
[ "true", true, ]
[ "一", '一', ]
[ "三", '三', ]
[ "二", '二', ]
[ "𠀀", '𠀀', ]
[ "𠀀\x00", '𠀀\x00', ]
]
matchers = ( probe for probe in probes )
CND.shuffle probes
for probe in probes
debug '©oMXJZ', probe
probe_bfr = CODEC.encode probe
yield leveldb.put probe_bfr, '1', resume
probe_bfrs = yield read_all_keys leveldb, resume
# debug '©Fd5iw', probe_bfrs
probes = ( CODEC.decode probe_bfr for probe_bfr in probe_bfrs )
show_keys_and_key_bfrs probes, probe_bfrs
for probe, probe_idx in probes
matcher = matchers[ probe_idx ]
T.eq probe, matcher
leveldb.close -> done()
#-----------------------------------------------------------------------------------------------------------
@[ "ensure `Buffer.compare` gives same sorting as LevelDB" ] = ( T, done ) ->
step ( resume ) =>
settings =
db: leveldown
keyEncoding: 'binary'
leveldb = levelup '/tmp/hollerith2-test', settings
yield clear_leveldb leveldb, resume
probes = [
# { x: 1234.5678 }
[ "", '', ]
[ "1234", 1234, ]
[ "Infinity", Infinity, ]
[ "String.fromCodePoint 0x10ffff", String.fromCodePoint 0x10ffff ]
[ "false", false, ]
[ "new Date 0", new Date 0, ]
[ "new Date 8e11", new Date 8e11, ]
[ "new Date()", new Date(), ]
[ "null", null, ]
[ "true", true, ]
[ "一", '一', ]
[ "三", '三', ]
[ "二", '二', ]
[ "𠀀", '𠀀', ]
[ "𠀀\x00", '𠀀\x00', ]
]
CND.shuffle probes
for probe in probes
probe_bfr = CODEC.encode probe
yield leveldb.put probe_bfr, '1', resume
probe_bfrs = yield read_all_keys leveldb, resume
last_probe_bfr = null
for probe_bfr in probe_bfrs
if last_probe_bfr?
T.eq ( Buffer.compare last_probe_bfr, probe_bfr ), -1
last_probe_bfr = probe_bfr
leveldb.close -> done()
#-----------------------------------------------------------------------------------------------------------
@[ "sort routes with values (1)" ] = ( T, done ) ->
step ( resume ) =>
settings =
db: leveldown
keyEncoding: 'binary'
leveldb = levelup '/tmp/hollerith2-test', settings
yield clear_leveldb leveldb, resume
probes = [
[ 'pos', 'strokeorder', '352513553254', '𤿯', ]
[ 'pos', 'strokeorder', '3525141121', '𠴦', ]
[ 'pos', 'strokeorder', '35251454', '𨒡', ]
[ 'pos', 'strokeorder', '3525152', '邭', ]
[ 'pos', 'strokeorder', '352515251115115113541', '𪚫', ]
[ 'pos', 'strokeorder', '35251525112511511', '𪚧', ]
[ 'pos', 'strokeorder', '352515251214251214', '𧑴', ]
[ 'pos', 'strokeorder', '3525153', '劬', ]
[ 'pos', 'strokeorder', '3525153\x00', '劬', ]
[ 'pos', 'strokeorder\x00', '352513553254', '𤿯', ]
]
matchers = ( probe for probe in probes )
CND.shuffle probes
for probe in probes
probe_bfr = CODEC.encode probe
yield leveldb.put probe_bfr, '1', resume
probe_bfrs = yield read_all_keys leveldb, resume
# debug '©Fd5iw', probe_bfrs
probes = ( CODEC.decode probe_bfr for probe_bfr in probe_bfrs )
show_keys_and_key_bfrs probes, probe_bfrs
for probe, probe_idx in probes
matcher = matchers[ probe_idx ]
T.eq probe, matcher
leveldb.close -> done()
#-----------------------------------------------------------------------------------------------------------
@[ "sort routes with values (2)" ] = ( T, done ) ->
step ( resume ) =>
settings =
db: leveldown
keyEncoding: 'binary'
leveldb = levelup '/tmp/hollerith2-test', settings
yield clear_leveldb leveldb, resume
probes = [
[ 'a', null, ]
[ 'a', false, ]
[ 'a', true, ]
[ 'a', new Date(), ]
[ 'a', -Infinity, ]
[ 'a', +1234, ]
[ 'a', +Infinity, ]
[ 'a', 'b', ]
[ 'a', 'b\x00', ]
[ 'a\x00', +1234, ]
[ 'a\x00', 'b', ]
[ 'aa', +1234, ]
[ 'aa', 'b', ]
[ 'aa', 'b\x00', ]
]
matchers = ( probe for probe in probes )
CND.shuffle probes
for probe in probes
probe_bfr = CODEC.encode probe
yield leveldb.put probe_bfr, '1', resume
probe_bfrs = yield read_all_keys leveldb, resume
# debug '©Fd5iw', probe_bfrs
probes = ( CODEC.decode probe_bfr for probe_bfr in probe_bfrs )
show_keys_and_key_bfrs probes, probe_bfrs
for probe, probe_idx in probes
matcher = matchers[ probe_idx ]
T.eq probe, matcher
leveldb.close -> done()
#-----------------------------------------------------------------------------------------------------------
@[ "read sample data" ] = ( T, done ) ->
probes_idx = 2
idx = -1
step ( resume ) =>
debug '©bUJhI', 'XX'
yield @_feed_test_data db, probes_idx, resume
debug '©PRzA5', 'XX'
input = db[ '%self' ].createReadStream()
input
.pipe D.$show()
.pipe $ ( { key, value, }, send ) => send [ key, value, ]
.pipe $ ( [ key, value, ], send ) => send [ key, value, ] unless HOLLERITH._is_meta db, key
.pipe $ ( [ key, value, ], send ) =>
# debug '©RluhF', ( HOLLERITH.CODEC.decode key ), ( JSON.parse value )
send [ key, value, ]
.pipe D.$collect()
.pipe $ ( facets, send ) =>
# debug '©54IKt', facets
help '\n' + HOLLERITH.DUMP.rpr_of_facets db, facets
buffer = new Buffer JSON.stringify [ '开', '彡' ]
debug '©GJfL6', HOLLERITH.CODEC.rpr_of_buffer buffer
.pipe D.$on_end => done()
#.........................................................................................................
return null
#-----------------------------------------------------------------------------------------------------------
@[ "read and write keys with lists" ] = ( T, done ) ->
probes_idx = 0
idx = -1
count = 0
probes = [
[ 'a', 1, ]
[ 'a', [], ]
[ 'a', [ 1, ], ]
[ 'a', [ true, ], ]
[ 'a', [ 'x', 'y', 'b', ], ]
[ 'a', [ 120, 1 / 3, ], ]
[ 'a', [ 'x', ], ]
]
matchers = ( probe for probe in probes )
#.........................................................................................................
for probe, probe_idx in probes
buffer = HOLLERITH.CODEC.encode probe
result = HOLLERITH.CODEC.decode buffer
T.eq result, matchers[ probe_idx ]
#.........................................................................................................
done()
#-----------------------------------------------------------------------------------------------------------
@[ "encode keys with list elements" ] = ( T, done ) ->
probes = [
[ 'foo', 'bar', ]
[ 'foo', [ 'bar', ], ]
[ [], 'bar', ]
[ 'foo', [], ]
[ [ 'foo', ], 'bar', ]
[ [ 42, ], 'bar', ]
[ 'foo', [ 42, ] ]
]
for probe in probes
T.eq probe, HOLLERITH.CODEC.decode HOLLERITH.CODEC.encode probe
done()
#-----------------------------------------------------------------------------------------------------------
@[ "read and write phrases with unanalyzed lists" ] = ( T, done ) ->
# ### !!!!!!!!!!!!!!!!!!!!!! ###
# warn "skipped"
# return done()
# ### !!!!!!!!!!!!!!!!!!!!!! ###
idx = -1
count = 0
#.........................................................................................................
probes = [
[ 'probe#00', 'some-predicate', [], ]
[ 'probe#01', 'some-predicate', [ -1 ], ]
[ 'probe#02', 'some-predicate', [ 0 ], ]
[ 'probe#03', 'some-predicate', [ 1 ], ]
[ 'probe#04', 'some-predicate', [ 2 ], ]
[ 'probe#05', 'some-predicate', [ 2, -1, ], ]
[ 'probe#06', 'some-predicate', [ 2, 0, ], ]
[ 'probe#07', 'some-predicate', [ 2, 1, ], ]
[ 'probe#08', 'some-predicate', [ 2, 1, 0 ], ]
[ 'probe#09', 'some-predicate', [ 2, 2, ], ]
[ 'probe#10', 'some-predicate', [ 2, [ 2, ], ], ]
[ 'probe#11', 'some-predicate', [ 3 ], ]
]
#.........................................................................................................
write_probes = ( handler ) =>
step ( resume ) =>
yield HOLLERITH.clear db, resume
input = D.create_throughstream()
input
# .pipe ( [ sbj, prd, obj, ], send ) =>
# if prd is 'some-predicate' # always the case in this example
# obj
.pipe HOLLERITH.$write db, solids: [ 'some-predicate', ]
.pipe D.$on_end =>
urge "test data written"
handler()
#.....................................................................................................
input.write probe for probe in probes
input.end()
#.........................................................................................................
step ( resume ) =>
#.......................................................................................................
yield write_probes resume
input = HOLLERITH.create_phrasestream db
debug '©FphJK', input[ '%meta' ]
input
.pipe $ ( phrase, send ) =>
count += +1
idx += +1
# debug '©Sc5FG', phrase
# T.eq phrase, matchers[ idx ]
.pipe D.$on_end =>
# T.eq count, matchers.length
done()
#-----------------------------------------------------------------------------------------------------------
@[ "read partial POS phrases" ] = ( T, done ) ->
# ### !!!!!!!!!!!!!!!!!!!!!! ###
# warn "skipped"
# return done()
# ### !!!!!!!!!!!!!!!!!!!!!! ###
probes_idx = 4
idx = -1
count = 0
#.........................................................................................................
matchers = [
[ 'pos', 'guide', 'xxx', '𧷟1' ]
[ 'pos', 'guide/', 'yyy', '𧷟1' ]
[ 'pos', 'guide/lineup/length', 1, '𧷟1', ]
[ 'pos', 'guide/lineup/length', 2, '𧷟2', ]
[ 'pos', 'guide/lineup/length', 3, '𧷟3', ]
[ 'pos', 'guide/lineup/length', 4, '𧷟4', ]
[ 'pos', 'guide/lineup/length', 5, '𧷟', ]
[ 'pos', 'guide/lineup/length', 6, '𧷟6', ]
[ 'pos', 'guide/uchr/has', '八', '𧷟', 0 ]
[ 'pos', 'guide/uchr/has', '刀', '𧷟', 1 ]
[ 'pos', 'guide/uchr/has', '宀', '𧷟', 2 ]
[ 'pos', 'guide/uchr/has', '貝', '𧷟', 4 ]
[ 'pos', 'guide/uchr/has', '', '𧷟', 3 ]
]
#.........................................................................................................
step ( resume ) =>
yield @_feed_test_data db, probes_idx, resume
# prefix = [ 'pos', 'guide', ]
prefix = [ 'pos', 'guide', ]
input = HOLLERITH.create_phrasestream db, { prefix, star: '*', }
# input = HOLLERITH.create_phrasestream db, { prefix, }
debug '©FphJK', input[ '%meta' ]
settings = { indexed: no, }
input
.pipe $ ( phrase, send ) =>
count += +1
idx += +1
debug '©Sc5FG', phrase
T.eq phrase, matchers[ idx ]
.pipe D.$on_end =>
T.eq count, matchers.length
done()
#-----------------------------------------------------------------------------------------------------------
@[ "read single phrases (1)" ] = ( T, done ) ->
probes_idx = 4
matcher = [ 'spo', '𧷟', 'guide/lineup/length', 5 ]
#.........................................................................................................
step ( resume ) =>
yield @_feed_test_data db, probes_idx, resume
# prefix = [ 'pos', 'guide', ]
prefix = [ 'spo', '𧷟', 'guide/lineup/length', ]
query = { prefix, star: '*', }
input = HOLLERITH.read_one_phrase db, query, ( error, phrase ) ->
throw error if error?
debug '©61ENl', phrase
T.eq phrase, matcher
done()
#-----------------------------------------------------------------------------------------------------------
@[ "read single phrases (2)" ] = ( T, done ) ->
probes_idx = 4
matcher = [ 'spo', '𧷟', 'guide/lineup/length', 5 ]
#.........................................................................................................
step ( resume ) =>
yield @_feed_test_data db, probes_idx, resume
prefix = [ 'spo', '𧷟', 'guide/lineup/length', ]
query = { prefix, star: '*', fallback: 'not to be used', }
input = HOLLERITH.read_one_phrase db, query, ( error, phrase ) ->
throw error if error?
debug '©61ENl', phrase
T.eq phrase, matcher
done()
#-----------------------------------------------------------------------------------------------------------
@[ "read single phrases (3)" ] = ( T, done ) ->
probes_idx = 4
matcher = "expected 1 phrase, got 0"
#.........................................................................................................
step ( resume ) =>
yield @_feed_test_data db, probes_idx, resume
prefix = [ 'spo', '中', 'guide/lineup/length', ]
query = { prefix, star: '*', }
input = HOLLERITH.read_one_phrase db, query, ( error, phrase ) ->
throw new Error "expected error" unless error?
T.eq error[ 'message' ], matcher
done()
#-----------------------------------------------------------------------------------------------------------
@[ "read single phrases (4)" ] = ( T, done ) ->
probes_idx = 4
matcher = "this entry is missing"
#.........................................................................................................
step ( resume ) =>
yield @_feed_test_data db, probes_idx, resume
prefix = [ 'spo', '中', 'guide/lineup/length', ]
query = { prefix, star: '*', fallback: matcher, }
input = HOLLERITH.read_one_phrase db, query, ( error, phrase ) ->
throw error if error?
T.eq phrase, matcher
done()
#-----------------------------------------------------------------------------------------------------------
@[ "writing phrases with non-unique keys fails" ] = ( T, done ) ->
alert """test case "writing phrases with non-unique keys fails" to be written"""
done()
#-----------------------------------------------------------------------------------------------------------
@[ "reminders" ] = ( T, done ) ->
alert "H.$write() must test for repeated keys"
done()
#-----------------------------------------------------------------------------------------------------------
@[ "invalid key not accepted (1)" ] = ( T, done ) ->
domain = ( require 'domain' ).create()
domain.on 'error', ( error ) ->
# debug '©AOSmn', JSON.stringify error[ 'message' ]
T.eq error[ 'message' ], "invalid SPO key, must be list: 'xxx'"
later done
domain.run ->
input = D.create_throughstream()
input
.pipe HOLLERITH.$write db
.pipe D.$on_end ->
# T.fail "should throw error"
later done
input.write 'xxx'
input.end()
#-----------------------------------------------------------------------------------------------------------
@[ "invalid key not accepted (2)" ] = ( T, done ) ->
domain = ( require 'domain' ).create()
domain.on 'error', ( error ) ->
# debug '©AOSmn', JSON.stringify error[ 'message' ]
T.eq error[ 'message' ], "invalid SPO key, must be of length 3: [ 'foo' ]"
done()
domain.run ->
input = D.create_throughstream()
input.pipe HOLLERITH.$write db
input.write [ 'foo', ]
#-----------------------------------------------------------------------------------------------------------
@[ "catching errors (2)" ] = ( T, done ) ->
run = ( method, handler ) ->
domain = ( require 'domain' ).create()
domain.on 'error', ( error ) ->
handler error
domain.run ->
method()
#.........................................................................................................
f = ->
input = D.create_throughstream()
input
.pipe HOLLERITH.$write db
.pipe D.$on_end ->
later done
input.write [ 'foo', 'bar', 'baz', ]
input.end()
run f, ( error ) ->
debug '©WaXJV', JSON.stringify error[ 'message' ]
T.eq true, false
done()
#-----------------------------------------------------------------------------------------------------------
@[ "catching errors (1)" ] = ( T, done ) ->
#.........................................................................................................
d = D.run ->
input = D.create_throughstream()
input
.pipe HOLLERITH.$write db
.pipe D.$on_end ->
later done
input.write [ 'foo', 'bar', 'baz', 'gnu', ]
input.end()
, ( error ) ->
T.eq error[ 'message' ], "invalid SPO key, must be of length 3: [ 'foo', 'bar', 'baz', 'gnu' ]"
later done
#-----------------------------------------------------------------------------------------------------------
@[ "catching errors (2)" ] = ( T, done ) ->
message = "should not produce error"
#.........................................................................................................
d = D.run ->
input = D.create_throughstream()
input
.pipe HOLLERITH.$write db
.pipe D.$on_end ->
T.succeed message
later done
input.write [ 'foo', 'bar', 'baz', ]
input.end()
, ( error ) ->
T.fail message
later done
#-----------------------------------------------------------------------------------------------------------
@[ "building PODs from SPO phrases" ] = ( T, done ) ->
probes_idx = 4
idx = -1
count = 0
# #.........................................................................................................
# matchers = [
# [ 'spo', '𧷟', 'cp/cid', 163295 ]
# [ 'spo', '𧷟', 'guide/lineup/length', 5 ]
# [ 'spo', '𧷟', 'guide/uchr/has', [ '八', '刀', '宀', '', '貝' ] ]
# [ 'spo', '𧷟', 'rank/cjt', 5432 ]
# ]
#.........................................................................................................
$shorten_spo = ->
return $ ( phrase, send ) =>
unless ( CND.isa_list phrase ) and phrase[ 0 ] is 'spo'
return send.error new Error "not an SPO phrase: #{rpr phrase}"
spo = phrase[ 1 .. ]
### TAINT repeated validation? ###
HOLLERITH.validate_spo spo
send spo
#.........................................................................................................
$consolidate = ->
last_sbj = null
pod = null
return $ ( spo, send, end ) =>
if spo?
### TAINT repeated validation? ###
HOLLERITH.validate_spo spo
[ sbj, prd, obj, ] = spo
#...................................................................................................
if sbj is last_sbj
pod[ prd ] = obj
#...................................................................................................
else
if pod?
### TAINT implicit key `pod` ###
send [ last_sbj, 'pod', pod, ]
pod = '%sbj': sbj
pod[ prd ] = obj
last_sbj = sbj
#...................................................................................................
# send spo
#.....................................................................................................
if end?
send [ last_sbj, 'pod', pod, ] if last_sbj?
end()
#.........................................................................................................
step ( resume ) =>
yield @_feed_test_data db, probes_idx, resume
prefix = [ 'spo', ]
input = HOLLERITH.create_phrasestream db, { prefix, }
input
.pipe $shorten_spo()
.pipe $consolidate()
.pipe D.$show()
.pipe HOLLERITH.$write db
.pipe D.$on_end done
###
#-----------------------------------------------------------------------------------------------------------
@[ "keep ordering and completeness in asynchronous streams" ] = ( T, T_done ) ->
step ( resume ) =>
idx = 0
input_A = D.create_throughstream()
#.......................................................................................................
input_B = input_A
.pipe D.$stop_time "keep ordering and completeness in asynchronous streams"
.pipe $async ( data, done ) ->
dt = CND.random_number 0.5, 1.5
# debug '©WscFi', data, dt
after dt, =>
warn "send #{rpr data}"
done data
.pipe $ ( data, send ) ->
help "read #{rpr data}"
T.eq data, idx
idx += +1
send data
.pipe D.$on_end =>
T_done()
#.......................................................................................................
write = ->
for n in [ 0 .. 10 ]
# help "write #{n}"
input_A.write n
yield after 0.1, resume
input_A.end()
#.......................................................................................................
write()
###
#-----------------------------------------------------------------------------------------------------------
@[ "read phrases in lockstep" ] = ( T, done ) ->
probes_idx = 2
#.........................................................................................................
step ( resume ) =>
yield @_feed_test_data db, probes_idx, resume
input_1 = HOLLERITH.create_phrasestream db, { prefix: [ 'pos', 'strokecount' ], }
input_2 = HOLLERITH.create_phrasestream db, { prefix: [ 'pos', 'componentcount' ], }
input_3 = HOLLERITH.create_phrasestream db, { prefix: [ 'pos', 'components' ], }
input_1
.pipe D.$lockstep input_2, fallback: null
.pipe D.$lockstep input_3, fallback: null
.pipe $ ( data, send ) => help JSON.stringify data; send data
.pipe D.$on_end done
#-----------------------------------------------------------------------------------------------------------
@[ "has_any yields existence of key" ] = ( T, done ) ->
probes_idx = 2
probes_and_matchers = [
[ [ 'spo', '形', 'strokecount', ], true, ]
[ [ 'spo', '丁', 'componentcount', ], true, ]
[ [ 'spo', '三', 'componentcount', ], true, ]
[ [ 'spo', '夫', 'componentcount', ], true, ]
[ [ 'spo', '國', 'componentcount', ], true, ]
[ [ 'spo', '形', 'componentcount', ], true, ]
[ [ 'spo', '丁', 'components', ], true, ]
[ [ 'spo', '丁', 'xxxx', ], false, ]
[ [ 'spo', '丁', ], true, ]
[ [ 'spo', ], true, ]
[ [ 'xxx', ], false, ]
]
#.........................................................................................................
step ( resume ) =>
yield @_feed_test_data db, probes_idx, resume
for [ probe, matcher, ] in probes_and_matchers
T.eq matcher, yield HOLLERITH.has_any db, { prefix: probe, }, resume
done()
#-----------------------------------------------------------------------------------------------------------
@[ "$write rejects duplicate S/P pairs" ] = ( T, done ) ->
probes_idx = 2
#.........................................................................................................
step ( resume ) =>
yield @_feed_test_data db, probes_idx, resume
#.......................................................................................................
try_writing = ->
input = D.create_throughstream()
#.....................................................................................................
input
.pipe D.$show()
.pipe HOLLERITH.$write db
.pipe D.$on_end ->
T.fail "should never be called"
done()
#.....................................................................................................
input.write [ '形', 'strokecount', 1234, ]
input.end()
#.......................................................................................................
D.run try_writing, ( error ) ->
T.eq "S/P pair already in DB: [ '形', 'strokecount' ]", error[ 'message' ]
done()
#-----------------------------------------------------------------------------------------------------------
@[ "codec accepts long keys" ] = ( T, done ) ->
probes_idx = 2
probes = []
long_text = ( new Array 1025 ).join '#'
# probes.push [ 'foo', long_text, [ long_text, long_text, long_text, long_text, long_text, ], ]
# probes.push [ 'foo', [ long_text, long_text, long_text, long_text, long_text, ],
# [ long_text, long_text, long_text, long_text, long_text, ], ]
# probes.push [ 'foo', [ long_text, long_text, long_text, long_text, long_text, ], ]
probes.push [ 'foo', [ long_text, long_text, long_text, long_text, ], 42, ]
#.........................................................................................................
step ( resume ) =>
yield @_feed_test_data db, probes_idx, resume
#.......................................................................................................
try_writing = ->
input = D.create_throughstream()
#.....................................................................................................
input
# .pipe D.$show()
.pipe HOLLERITH.$write db
.pipe D.$on_end ->
T.eq 1, 1
done()
#.....................................................................................................
for probe in probes
input.write probe
# yield later resume
input.end()
#.......................................................................................................
D.run try_writing, ( error ) ->
T.fail "should not throw error"
warn error
done()
#-----------------------------------------------------------------------------------------------------------
@[ "write private types (1)" ] = ( T, done ) ->
probes_idx = 5
idx = -1
count = 0
#.........................................................................................................
matchers = [
["pos","componentcount",1,"丁"]
["pos","components","丁","丁",0]
["pos","mtime",123456789,{"type":"route","value":"/foo/bar"}]
["pos","strokecount",2,"丁"]
["spo","丁","componentcount",1]
["spo","丁","components",["丁"]]
["spo","丁","strokecount",2]
["spo",{"type":"route","value":"/foo/bar"},"mtime",123456789]
]
#.........................................................................................................
write_data = ( handler ) =>
input = D.create_throughstream()
input
# .pipe D.$show()
.pipe HOLLERITH.$write db
.pipe D.$on_end -> handler()
#.......................................................................................................
for probe in @_feed_test_data.probes[ probes_idx ]
input.write probe
input.end()
#.........................................................................................................
read_data = ( handler ) ->
#.......................................................................................................
input = HOLLERITH.create_phrasestream db
input
# .pipe D.$show()
.pipe $ ( phrase, send ) =>
count += +1
idx += +1
debug '©Sc5FG', JSON.stringify phrase
T.eq phrase, matchers[ idx ]
.pipe D.$on_end -> handler()
#.........................................................................................................
step ( resume ) =>
yield HOLLERITH.clear db, resume
yield write_data resume
yield read_data resume
done()
#-----------------------------------------------------------------------------------------------------------
@[ "write private types (2)" ] = ( T, done ) ->
probes_idx = 5
idx = -1
count = 0
#.........................................................................................................
encoder = ( type, value ) ->
debug '©XXX-encoder', type, rpr value
return value.split '/' if type is 'route'
throw new Error "unknown private type #{rpr type}"
#.........................................................................................................
xdb_route = join __dirname, '..', 'dbs/tests-with-private-types'
#.........................................................................................................
xdb_settings =
size: 500
encoder: encoder
#.........................................................................................................
xdb = HOLLERITH.new_db xdb_route, xdb_settings
#.........................................................................................................
matchers = [
["pos","componentcount",1,"丁"]
["pos","components","丁","丁",0]
["pos","mtime",123456789,{"type":"route","value":["","foo","bar"]}]
["pos","strokecount",2,"丁"]
["spo","丁","componentcount",1]
["spo","丁","components",["丁"]]
["spo","丁","strokecount",2]
["spo",{"type":"route","value":["","foo","bar"]},"mtime",123456789]
]
#.........................................................................................................
write_data = ( handler ) =>
input = D.create_throughstream()
input
# .pipe D.$show()
.pipe HOLLERITH.$write xdb
.pipe D.$on_end -> handler()
#.......................................................................................................
for probe in @_feed_test_data.probes[ probes_idx ]
input.write probe
input.end()
#.........................................................................................................
read_data = ( handler ) ->
#.......................................................................................................
input = HOLLERITH.create_phrasestream xdb
input
# .pipe D.$show()
.pipe $ ( phrase, send ) =>
count += +1
idx += +1
debug '©Sc5FG', JSON.stringify phrase
T.eq phrase, matchers[ idx ]
.pipe D.$on_end -> handler()
#.........................................................................................................
step ( resume ) =>
yield HOLLERITH.clear xdb, resume
yield write_data resume
yield read_data resume
yield xdb[ '%self' ].close resume
done()
#-----------------------------------------------------------------------------------------------------------
@[ "write private types (3)" ] = ( T, done ) ->
probes_idx = 5
idx = -1
count = 0
#.........................................................................................................
encoder = ( type, value ) ->
# debug '©XXX-encoder', type, rpr value
return value.split '/' if type is 'route'
throw new Error "unknown private type #{rpr type}"
#.........................................................................................................
decoder = ( type, value ) ->
# debug '©XXX-decoder', type, rpr value
return value.join '/' if type is 'route'
throw new Error "unknown private type #{rpr type}"
#.........................................................................................................
xdb_route = join __dirname, '..', 'dbs/tests-with-private-types'
#.........................................................................................................
xdb_settings =
size: 500
encoder: encoder
decoder: decoder
#.........................................................................................................
xdb = HOLLERITH.new_db xdb_route, xdb_settings
#.........................................................................................................
matchers = [
["pos","componentcount",1,"丁"]
["pos","components","丁","丁",0]
["pos","mtime",123456789,"/foo/bar"]
["pos","strokecount",2,"丁"]
["spo","丁","componentcount",1]
["spo","丁","components",["丁"]]
["spo","丁","strokecount",2]
["spo","/foo/bar","mtime",123456789]
]
#.........................................................................................................
write_data = ( handler ) =>
input = D.create_throughstream()
input
# .pipe D.$show()
.pipe HOLLERITH.$write xdb
.pipe D.$on_end -> handler()
#.......................................................................................................
for probe in @_feed_test_data.probes[ probes_idx ]
input.write probe
input.end()
#.........................................................................................................
read_data = ( handler ) ->
#.......................................................................................................
input = HOLLERITH.create_phrasestream xdb
input
# .pipe D.$show()
.pipe $ ( phrase, send ) =>
count += +1
idx += +1
urge '©Sc5FG', JSON.stringify phrase
T.eq phrase, matchers[ idx ]
.pipe D.$on_end -> handler()
#.........................................................................................................
step ( resume ) =>
yield HOLLERITH.clear xdb, resume
yield write_data resume
yield read_data resume
yield xdb[ '%self' ].close resume
done()
#-----------------------------------------------------------------------------------------------------------
@[ "bloom filter serialization without writes" ] = ( T, done ) ->
#.........................................................................................................
# step ( resume ) =>
xdb = HOLLERITH.new_db get_new_db_name()
input = HOLLERITH.create_phrasestream xdb
input.pause()
input.pipe HOLLERITH.$write xdb
input.resume()
input.end()
T.ok true
done()
#-----------------------------------------------------------------------------------------------------------
@[ "Pinyin Unicode Sorting" ] = ( T, done ) ->
#.........................................................................................................
write_data = ( handler ) ->
input = D.create_throughstream()
#.......................................................................................................
input
.pipe HOLLERITH.$write db
.pipe D.$on_end ->
handler()
#.......................................................................................................
input.write [ '01', 'reading', 'ā', ]
input.write [ '02', 'reading', 'ɑ̄', ]
input.write [ '03', 'reading', 'ē', ]
input.write [ '04', 'reading', 'ī', ]
input.write [ '05', 'reading', 'ō', ]
input.write [ '06', 'reading', 'ū', ]
input.write [ '07', 'reading', 'ǖ', ]
input.write [ '08', 'reading', 'Ā', ]
input.write [ '09', 'reading', 'Ē', ]
input.write [ '10', 'reading', 'Ī', ]
input.write [ '11', 'reading', 'Ō', ]
input.write [ '12', 'reading', 'Ū', ]
input.write [ '13', 'reading', 'Ǖ', ]
input.write [ '14', 'reading', 'á', ]
input.write [ '15', 'reading', 'ɑ́', ]
input.write [ '16', 'reading', 'é', ]
input.write [ '17', 'reading', 'í', ]
input.write [ '18', 'reading', 'ó', ]
input.write [ '19', 'reading', 'ú', ]
input.write [ '20', 'reading', 'ǘ', ]
input.write [ '21', 'reading', 'Á', ]
input.write [ '22', 'reading', 'É', ]
input.write [ '23', 'reading', 'Í', ]
input.write [ '24', 'reading', 'Ó', ]
input.write [ '25', 'reading', 'Ú', ]
input.write [ '26', 'reading', 'Ǘ', ]
input.write [ '27', 'reading', 'ǎ', ]
input.write [ '28', 'reading', 'ɑ̌', ]
input.write [ '29', 'reading', 'ě', ]
input.write [ '30', 'reading', 'ǐ', ]
input.write [ '31', 'reading', 'ǒ', ]
input.write [ '32', 'reading', 'ǔ', ]
input.write [ '33', 'reading', 'ǚ', ]
input.write [ '34', 'reading', 'Ǎ', ]
input.write [ '35', 'reading', 'Ě', ]
input.write [ '36', 'reading', 'Ǐ', ]
input.write [ '37', 'reading', 'Ǒ', ]
input.write [ '38', 'reading', 'Ǔ', ]
input.write [ '39', 'reading', 'Ǚ', ]
input.write [ '40', 'reading', 'à', ]
input.write [ '41', 'reading', 'ɑ̀', ]
input.write [ '42', 'reading', 'è', ]
input.write [ '43', 'reading', 'ì', ]
input.write [ '44', 'reading', 'ò', ]
input.write [ '45', 'reading', 'ù', ]
input.write [ '46', 'reading', 'ǜ', ]
input.write [ '47', 'reading', 'À', ]
input.write [ '48', 'reading', 'È', ]
input.write [ '49', 'reading', 'Ì', ]
input.write [ '50', 'reading', 'Ò', ]
input.write [ '51', 'reading', 'Ù', ]
input.write [ '52', 'reading', 'Ǜ', ]
input.write [ '53', 'reading', 'a', ]
input.write [ '54', 'reading', 'ɑ', ]
input.write [ '55', 'reading', 'e', ]
input.write [ '56', 'reading', 'i', ]
input.write [ '57', 'reading', 'o', ]
input.write [ '58', 'reading', 'u', ]
input.write [ '59', 'reading', 'ü', ]
input.write [ '60', 'reading', 'A', ]
input.write [ '61', 'reading', 'Ɑ', ]
input.write [ '62', 'reading', 'E', ]
input.write [ '63', 'reading', 'I', ]
input.write [ '64', 'reading', 'O', ]
input.write [ '65', 'reading', 'U', ]
input.write [ '66', 'reading', 'Ü', ]
#.......................................................................................................
input.end()
#.........................................................................................................
show = ( handler ) ->
query = { prefix: [ 'pos', ], star: '*', }
input = HOLLERITH.create_phrasestream db, query
input
.pipe do =>
collector = []
return $ ( phrase, send, end ) =>
if phrase?
[ _, _, letter, _, ] = phrase
collector.push letter
if end?
urge collector = collector.join ''
T.eq collector, 'AEIOUaeiouÀÁÈÉÌÍÒÓÙÚÜàáèéìíòóùúüĀāĒēĚěĪīŌōŪūǍǎǏǐǑǒǓǔǕǖǗǘǙǚǛǜɑɑ̀ɑ́ɑ̄ɑ̌Ɑ'
end()
.pipe D.$observe ( phrase ) =>
info JSON.stringify phrase
.pipe D.$on_end ->
handler()
#.........................................................................................................
step ( resume ) =>
yield clear_leveldb db[ '%self' ], resume
# yield feed_test_data db, probes_idx, resume
yield write_data resume
yield show resume
done()
#-----------------------------------------------------------------------------------------------------------
@[ "use non-string subjects in phrases (1)" ] = ( T, done ) ->
#.........................................................................................................
write_data = ( handler ) ->
input = D.create_throughstream()
#.......................................................................................................
input
.pipe HOLLERITH.$write db
.pipe D.$on_end ->
handler()
#.......................................................................................................
input.write [ '千', 'guide/kwic/v3/sortcode', [ [ [ '0686f---', null ], '千', [], [] ] ], ]
input.write [ '于', 'guide/kwic/v3/sortcode', [ [ [ '0019f---', null ], '于', [], [] ] ], ]
input.write [ '干', 'guide/kwic/v3/sortcode', [ [ [ '0020f---', null ], '干', [], [] ] ], ]
#.......................................................................................................
### Three phrases to register '千 looks similar to both 于 and 干': ###
input.write [ '千', 'shape/similarity', [ '于', '干', ], ]
input.write [ '于', 'shape/similarity', [ '干', '千', ], ]
input.write [ '干', 'shape/similarity', [ '千', '于', ], ]
### The same as the above, experimentally using nested phrases whose subject is itself a phrase: ###
input.write [ [ '千', 'shape/similarity', [ '于', '干', ], ], 'guide/kwic/v3/sortcode', [ [ [ '0686f---', null ], '千', [], [] ] ], ]
input.write [ [ '于', 'shape/similarity', [ '千', '干', ], ], 'guide/kwic/v3/sortcode', [ [ [ '0019f---', null ], '于', [], [] ] ], ]
input.write [ [ '干', 'shape/similarity', [ '千', '于', ], ], 'guide/kwic/v3/sortcode', [ [ [ '0020f---', null ], '干', [], [] ] ], ]
#.......................................................................................................
### Two sub-factorial renderings of 千 as 亻一 and 丿十: ###
input.write [ '亻', 'guide/kwic/v3/sortcode', [ [ [ '0774f---', null ], '亻', [], [] ] ], ]
input.write [ '一', 'guide/kwic/v3/sortcode', [ [ [ '0000f---', null ], '一', [], [] ] ], ]
input.write [ '丿', 'guide/kwic/v3/sortcode', [ [ [ '0645f---', null ], '丿', [], [] ] ], ]
input.write [ '十', 'guide/kwic/v3/sortcode', [ [ [ '0104f---', null ], '十', [], [] ] ], ]
input.write [
[ '千', 'guide/lineup/uchr', '亻一', ], 'guide/kwic/v3/sortcode', [
[ [ '0774f---', '0000f---', null, ], [ '亻', [ '一', ], [] ], ]
[ [ '0000f---', null, '0774f---', ], [ '一', [], [ '亻', ] ], ]
] ]
input.write [
[ '千', 'guide/lineup/uchr', '丿十', ], 'guide/kwic/v3/sortcode', [
[ [ '0645f---', '0104f---', null, ], [ '丿', [ '十', ], [] ], ]
[ [ '0104f---', null, '0645f---', ], [ '十', [], [ '丿', ] ], ]
] ]
#.......................................................................................................
input.end()
#.........................................................................................................
show = ( handler ) ->
input = HOLLERITH.create_phrasestream db
input
.pipe D.$observe ( phrase ) =>
info JSON.stringify phrase
.pipe D.$on_end ->
handler()
#.........................................................................................................
step ( resume ) =>
yield clear_leveldb db[ '%self' ], resume
# yield feed_test_data db, probes_idx, resume
yield write_data resume
yield show resume
done()
#-----------------------------------------------------------------------------------------------------------
@[ "use non-string subjects in phrases (2)" ] = ( T, done ) ->
#.........................................................................................................
write_data = ( handler ) ->
input = D.create_throughstream()
#.......................................................................................................
input
.pipe HOLLERITH.$write db
.pipe D.$on_end ->
handler()
#.......................................................................................................
input.write [ '千', 'reading/py/base', [ 'qian', ], ]
input.write [ '于', 'reading/py/base', [ 'yu', ], ]
input.write [ '干', 'reading/py/base', [ 'gan', ], ]
#.......................................................................................................
### Three phrases to register '千 looks similar to both 于 and 干': ###
input.write [ '千', 'shape/similarity', [ '于', '干', ], ]
input.write [ '于', 'shape/similarity', [ '干', '千', ], ]
input.write [ '干', 'shape/similarity', [ '千', '于', ], ]
### The same as the above, experimentally using nested phrases whose subject is itself a phrase: ###
input.write [ [ '千', 'shape/similarity', [ '于', '干', ], 0, ], 'reading/py/base', [ 'qian', ], ]
input.write [ [ '于', 'shape/similarity', [ '千', '干', ], 0, ], 'reading/py/base', [ 'yu', ], ]
input.write [ [ '干', 'shape/similarity', [ '千', '于', ], 0, ], 'reading/py/base', [ 'gan', ], ]
#.......................................................................................................
input.write [ [ '千', 'reading/py/base', [ 'qian', ], 0, ], 'shape/similarity', [ '于', '干', ], ]
input.write [ [ '于', 'reading/py/base', [ 'yu', ], 0, ], 'shape/similarity', [ '千', '干', ], ]
input.write [ [ '干', 'reading/py/base', [ 'gan', ], 0, ], 'shape/similarity', [ '千', '于', ], ]
#.......................................................................................................
input.end()
#.........................................................................................................
show = ( handler ) ->
input = HOLLERITH.create_phrasestream db
input
.pipe D.$observe ( phrase ) =>
info JSON.stringify phrase
.pipe D.$on_end ->
handler()
#.........................................................................................................
step ( resume ) =>
yield clear_leveldb db[ '%self' ], resume
# yield feed_test_data db, probes_idx, resume
yield write_data resume
yield show resume
done()
#-----------------------------------------------------------------------------------------------------------
@[ "use non-string subjects in phrases (3)" ] = ( T, done ) ->
#.........................................................................................................
write_data = ( handler ) ->
input = D.create_throughstream()
#.......................................................................................................
input
.pipe HOLLERITH.$write db
.pipe D.$on_end ->
handler()
#.......................................................................................................
### Readings for 3 glyphs: ###
input.write [ [ '千', ], 'reading/py/base', [ 'qian', ], ]
input.write [ [ '于', ], 'reading/py/base', [ 'yu', ], ]
input.write [ [ '干', ], 'reading/py/base', [ 'gan', ], ]
#.......................................................................................................
### Three phrases to register '千 looks similar to both 于 and 干': ###
input.write [ [ '千', ], 'shape/similarity', [ '于', '干', ], ]
input.write [ [ '于', ], 'shape/similarity', [ '干', '千', ], ]
input.write [ [ '干', ], 'shape/similarity', [ '千', '于', ], ]
#.......................................................................................................
### The same as the above, experimentally using nested phrases whose subject is itself a phrase: ###
### (1) these will lead from reading to similarity, as in
`["pos","reading/py/base","gan",["干","shape/similarity",["千","于"]],0]`, meaning these phrases
are suitable for building a dictionary organzed by Pinyin readings with cross-references
to similar characters: ###
input.write [ [ '千', 'shape/similarity', [ '于', '干', ], ], 'reading/py/base', [ 'qian', ], ]
input.write [ [ '于', 'shape/similarity', [ '千', '干', ], ], 'reading/py/base', [ 'yu', ], ]
input.write [ [ '干', 'shape/similarity', [ '千', '于', ], ], 'reading/py/base', [ 'gan', ], ]
#.......................................................................................................
### (2) these will lead from similarity to reading, as in
`["pos","shape/similarity","于",["千","reading/py/base",["qian"]],0]` ###
input.write [ [ '千', 'reading/py/base', [ 'qian', ], ], 'shape/similarity', [ '于', '干', ], ]
input.write [ [ '于', 'reading/py/base', [ 'yu', ], ], 'shape/similarity', [ '千', '干', ], ]
input.write [ [ '干', 'reading/py/base', [ 'gan', ], ], 'shape/similarity', [ '千', '于', ], ]
#.......................................................................................................
input.end()
#.........................................................................................................
show = ( handler ) ->
input = HOLLERITH.create_phrasestream db
input
.pipe D.$observe ( phrase ) =>
info JSON.stringify phrase
.pipe D.$on_end ->
handler()
#.........................................................................................................
step ( resume ) =>
yield clear_leveldb db[ '%self' ], resume
# yield feed_test_data db, probes_idx, resume
yield write_data resume
yield show resume
done()
#-----------------------------------------------------------------------------------------------------------
@[ "use non-string subjects in phrases (4)" ] = ( T, done ) ->
#.........................................................................................................
write_data = ( handler ) ->
input = D.create_throughstream()
#.......................................................................................................
input
.pipe HOLLERITH.$write db
.pipe D.$on_end ->
handler()
#.......................................................................................................
### Readings for 3 glyphs: ###
input.write [ [ '千', ], 'reading/py/base', [ 'qian', ], ]
input.write [ [ '于', ], 'reading/py/base', [ 'yu', 'foo', ], ]
input.write [ [ '干', ], 'reading/py/base', [ 'gan', ], ]
input.write [ [ '人', ], 'reading/py/base', [ 'ren', ], ]
#.......................................................................................................
### Three phrases to register '千 looks similar to both 于 and 干': ###
# input.write [ [ '千', ], 'shape/similarity', [ '于', '干', ], ]
# input.write [ [ '于', ], 'shape/similarity', [ '干', '千', ], ]
# input.write [ [ '干', ], 'shape/similarity', [ '千', '于', ], ]
#.......................................................................................................
### The same as the above, experimentally using nested phrases whose subject is itself a phrase: ###
### (1) these will lead from reading to similarity, as in
`["pos","reading/py/base","gan",["干","shape/similarity",["千","于"]],0]`, meaning these phrases
are suitable for building a dictionary organzed by Pinyin readings with cross-references
to similar characters: ###
# input.write [ [ '千', 'shape/similarity', [ '于', '干', ], ], 'reading/py/base', 'qian', ]
# input.write [ [ '于', 'shape/similarity', [ '千', '干', ], ], 'reading/py/base', 'yu', ]
# input.write [ [ '干', 'shape/similarity', [ '千', '于', ], ], 'reading/py/base', 'gan', ]
input.write [ [ '千', 'shape/similarity', '于', ], 'reading/py/base', 'qian', ]
input.write [ [ '千', 'shape/similarity', '干', ], 'reading/py/base', 'qian', ]
input.write [ [ '于', 'shape/similarity', '千', ], 'reading/py/base', 'yu', ]
input.write [ [ '于', 'shape/similarity', '干', ], 'reading/py/base', 'yu', ]
input.write [ [ '干', 'shape/similarity', '千', ], 'reading/py/base', 'gan', ]
input.write [ [ '干', 'shape/similarity', '于', ], 'reading/py/base', 'gan', ]
input.write [ [ '于', 'shape/similarity', '千', 1, ], 'reading/py/base', 'foo', ]
input.write [ [ '于', 'shape/similarity', '干', 2, ], 'reading/py/base', 'foo', ]
#.......................................................................................................
# ### (2) these will lead from similarity to reading, as in
# `["pos","shape/similarity","于",["千","reading/py/base",["qian"]],0]`. These phrases carry the same
# information as the corresponding ones in `use non-string subjects in phrases (3)`, above,
# but here the referenced similarity phrases have singular objects; consequently, subject / predicate
# pairs may be repeated, which is why introducing an index is mandatory. As such, the index
# need not be a number or for meaningful series—it only needs to be unique within the respective
# group: ###
# input.write [ [ '千', 'reading/py/base', [ 'qian', ], 0, ], 'shape/similarity', '于', ]
# input.write [ [ '千', 'reading/py/base', [ 'qian', ], 1, ], 'shape/similarity', '干', ]
# input.write [ [ '于', 'reading/py/base', [ 'yu', ], 0, ], 'shape/similarity', '千', ]
# input.write [ [ '于', 'reading/py/base', [ 'yu', ], 1, ], 'shape/similarity', '干', ]
# input.write [ [ '干', 'reading/py/base', [ 'gan', ], 0, ], 'shape/similarity', '千', ]
# input.write [ [ '干', 'reading/py/base', [ 'gan', ], 1, ], 'shape/similarity', '于', ]
#.......................................................................................................
input.end()
#.........................................................................................................
show = ( handler ) ->
query = { prefix: [ 'pos', ], star: '*', }
input = HOLLERITH.create_phrasestream db #, query
input
.pipe D.$observe ( phrase ) =>
info JSON.stringify phrase
.pipe D.$on_end ->
handler()
#.........................................................................................................
step ( resume ) =>
yield clear_leveldb db[ '%self' ], resume
# yield feed_test_data db, probes_idx, resume
yield write_data resume
yield show resume
done()
#-----------------------------------------------------------------------------------------------------------
@[ "binary indexing" ] = ( T, done ) ->
#.........................................................................................................
$index = ( from_predicate, to_predicate, settings = {} ) =>
from_is_plural = settings[ 'from' ] is 'plural'
to_is_plural = settings[ 'to' ] is 'plural'
from_cache = {}
to_cache = {}
#.......................................................................................................
new_index_phrase = ( tsbj, tprd, tobj, fprd, fobj, tsbj_is_list, idx = 0 ) =>
return [ [ tsbj..., tprd, idx, tobj, ], fprd, fobj, ] if tsbj_is_list
return [ [ tsbj, tprd, idx, tobj, ], fprd, fobj, ]
#.......................................................................................................
link = ( from_phrase, to_phrase ) =>
[ fsbj, fprd, fobj, ] = from_phrase
[ tsbj, tprd, tobj, ] = to_phrase
tsbj_is_list = CND.isa_list tsbj
#.....................................................................................................
unless from_is_plural or to_is_plural
# fs ts
return [ new_index_phrase tsbj, tprd, tobj, fprd, fobj, tsbj_is_list ]
#.....................................................................................................
idx = -1
R = []
if from_is_plural
# fp tp
if to_is_plural
for sub_fobj in fobj
for sub_tobj in tobj
idx += +1
R.push new_index_phrase tsbj, tprd, sub_tobj, fprd, sub_fobj, tsbj_is_list, idx
else
# fp ts
for sub_fobj in fobj
idx += +1
R.push new_index_phrase tsbj, tprd, tobj, fprd, sub_fobj, tsbj_is_list, idx
else
# fs tp
for sub_tobj in tobj
idx += +1
R.push new_index_phrase tsbj, tprd, sub_tobj, fprd, fobj, tsbj_is_list, idx
#.....................................................................................................
return R
#.......................................................................................................
return $ ( phrase, send ) =>
send phrase
[ sbj, prd, obj, ] = phrase
#.....................................................................................................
switch prd
#...................................................................................................
when from_predicate
sbj_txt = JSON.stringify sbj
if ( to_phrase = to_cache[ sbj_txt ] )?
delete to_cache[ sbj_txt ]
send index_phrase for index_phrase in link phrase, to_phrase
else
from_cache[ sbj_txt ] = phrase
#...................................................................................................
when to_predicate
sbj_txt = JSON.stringify sbj
if ( from_phrase = from_cache[ sbj_txt ] )?
delete from_cache[ sbj_txt ]
send index_phrase for index_phrase in link from_phrase, phrase
else
to_cache[ sbj_txt ] = phrase
#.....................................................................................................
return null
#.........................................................................................................
write_data = ( handler ) ->
input = D.create_throughstream()
#.......................................................................................................
input
.pipe $index 'reading', 'variant', { from: 'plural', to: 'plural', }
.pipe $index 'reading', 'similarity', { from: 'plural', to: 'plural', }
.pipe $index 'reading', 'strokeorder', { from: 'plural', to: 'singular', }
.pipe $index 'strokeorder', 'reading', { from: 'singular', to: 'plural', }
.pipe $index 'strokeorder', 'usagecode', { from: 'singular', to: 'singular', }
# .pipe $index 'strokeorder', 'variant', { from: 'singular', to: 'plural', }
# .pipe $index 'strokeorder', 'similarity', { from: 'singular', to: 'plural', }
.pipe HOLLERITH.$write db
.pipe D.$on_end ->
handler()
#.......................................................................................................
input.write [ [ '千', ], 'variant', [ '仟', '韆', ], ]
input.write [ [ '千', ], 'similarity', [ '于', '干', ], ]
input.write [ [ '千', ], 'usagecode', 'CJKTHM', ]
input.write [ [ '千', ], 'strokeorder', '312', ]
input.write [ [ '千', ], 'reading', [ 'qian', 'foo', 'bar', ], ]
input.write [ [ '<NAME>', ], 'strokeorder', '32312', ]
input.write [ [ '<NAME>', ], 'usagecode', 'CJKTHm', ]
input.write [ [ '<NAME>', ], 'reading', [ 'qian', ], ]
input.write [ [ '<NAME>', ], 'strokeorder', '122125112125221134515454', ]
input.write [ [ '<NAME>', ], 'usagecode', 'KTHm', ]
input.write [ [ '<NAME>', ], 'reading', [ 'qian', ], ]
#.......................................................................................................
# input.write [ ["千","variant",0,"仟"],"strokeorder","312"]
# input.write [ ["千","variant",1,"韆"],"strokeorder","312"]
# input.write [ ["千","variant",0,"仟",'usagecode','CJKTHm'],"strokeorder","312"]
# input.write [ ["千","variant",1,"韆",'usagecode','KTHm'],"strokeorder","312"]
#.......................................................................................................
# input.write [ [ '千', ], 'variant', [ '仟', '韆', ], ]
# input.write [ [ '于', ], 'variant', [ '於', '亐', ], ]
# input.write [ [ '干', ], 'variant', [ '乾', '幹', '榦', '亁', '乹', ], ]
# input.write [ [ '人', ], 'variant', [ '亻', '𠔽', ], ]
# input.write [ [ '仁', ], 'variant', [ '忈', ], ]
# #.......................................................................................................
# input.write [ [ '千', ], 'usagecode', 'CJKTHM', ]
# input.write [ [ '于', ], 'usagecode', 'CJKTHM', ]
# input.write [ [ '干', ], 'usagecode', 'CJKTHM', ]
# input.write [ [ '人', ], 'usagecode', 'CJKTHM', ]
# input.write [ [ '<NAME>', ], 'usagecode', 'CJKTHM', ]
# input.write [ [ '仟', ], 'usagecode', 'CJKTHm', ]
# input.write [ [ '韆', ], 'usagecode', 'KTHm', ]
# input.write [ [ '於', ], 'usagecode', 'cJKTHM', ]
# input.write [ [ '亐', ], 'usagecode', 'K', ]
# input.write [ [ '乾', ], 'usagecode', 'CJKTHM', ]
# input.write [ [ '幹', ], 'usagecode', 'JKTHM', ]
# input.write [ [ '榦', ], 'usagecode', 'THm', ]
# input.write [ [ '亻', ], 'usagecode', 'p', ]
# #.......................................................................................................
# input.write [ [ '千', ], 'reading', [ 'qian', ], ]
# input.write [ [ '于', ], 'reading', [ 'yu', 'foo', 'bar', ], ]
# input.write [ [ '干', ], 'reading', [ 'gan', 'ほす', ], ]
# input.write [ [ '人', ], 'reading', [ 'ren', ], ]
# input.write [ [ '<NAME>', ], 'reading', [ 'ren', ], ]
# input.write [ [ '千', ], 'similarity', [ '于', '干', ], ]
# input.write [ [ '于', ], 'similarity', [ '干', '千', ], ]
# input.write [ [ '干', ], 'similarity', [ '千', '于', ], ]
# #.......................................................................................................
# input.write [ [ '千', ], 'strokeorder', '312', ]
# input.write [ [ '于', ], 'strokeorder', '112', ]
# input.write [ [ '干', ], 'strokeorder', '112', ]
# input.write [ [ '人', ], 'strokeorder', '34', ]
# input.write [ [ '<NAME>', ], 'strokeorder', '3211', ]
# input.write [ [ '<NAME>', ], 'strokeorder', '32312', ]
# input.write [ [ '<NAME>', ], 'strokeorder', '122125112125221134515454', ]
# input.write [ [ '於', ], 'strokeorder', '41353444', ]
# input.write [ [ '<NAME>', ], 'strokeorder', '115', ]
# input.write [ [ '<NAME>', ], 'strokeorder', '12251112315', ]
# input.write [ [ '幹', ], 'strokeorder', '1225111231112', ]
# input.write [ [ '<NAME>', ], 'strokeorder', '12251112341234', ]
# input.write [ [ '亻', ], 'strokeorder', '32', ]
#.......................................................................................................
input.end()
#.........................................................................................................
matchers = [
["pos","reading","bar",["千"],2]
["pos","reading","bar",["千","similarity",4,"于"]]
["pos","reading","bar",["千","similarity",5,"干"]]
["pos","reading","bar",["千","strokeorder",2,"312"]]
["pos","reading","bar",["千","variant",4,"仟"]]
["pos","reading","bar",["千","variant",5,"韆"]]
["pos","reading","foo",["千"],1]
["pos","reading","foo",["千","similarity",2,"于"]]
["pos","reading","foo",["千","similarity",3,"干"]]
["pos","reading","foo",["千","strokeorder",1,"312"]]
["pos","reading","foo",["千","variant",2,"仟"]]
["pos","reading","foo",["千","variant",3,"韆"]]
["pos","reading","qian",["仟"],0]
["pos","reading","qian",["仟","strokeorder",0,"32312"]]
["pos","reading","qian",["千"],0]
["pos","reading","qian",["千","similarity",0,"于"]]
["pos","reading","qian",["千","similarity",1,"干"]]
["pos","reading","qian",["千","strokeorder",0,"312"]]
["pos","reading","qian",["千","variant",0,"仟"]]
["pos","reading","qian",["千","variant",1,"韆"]]
["pos","reading","qian",["韆"],0]
["pos","reading","qian",["韆","strokeorder",0,"122125112125221134515454"]]
["pos","similarity","于",["千"],0]
["pos","similarity","干",["千"],1]
["pos","strokeorder","122125112125221134515454",["韆"]]
["pos","strokeorder","122125112125221134515454",["韆","reading",0,"qian"]]
["pos","strokeorder","122125112125221134515454",["韆","usagecode",0,"KTHm"]]
["pos","strokeorder","312",["千"]]
["pos","strokeorder","312",["千","reading",0,"qian"]]
["pos","strokeorder","312",["千","reading",1,"foo"]]
["pos","strokeorder","312",["千","reading",2,"bar"]]
["pos","strokeorder","312",["千","usagecode",0,"CJKTHM"]]
["pos","strokeorder","32312",["仟"]]
["pos","strokeorder","32312",["仟","reading",0,"qian"]]
["pos","strokeorder","32312",["仟","usagecode",0,"CJKTHm"]]
["pos","usagecode","CJKTHM",["千"]]
["pos","usagecode","CJKTHm",["仟"]]
["pos","usagecode","KTHm",["韆"]]
["pos","variant","仟",["千"],0]
["pos","variant","韆",["千"],1]
]
#.........................................................................................................
show = ( handler ) ->
query = { prefix: [ 'pos', ], star: '*', }
# query = { prefix: [ 'pos', 'strokeorder', '312', ], star: '*', }
input = HOLLERITH.create_phrasestream db, query
input
.pipe D.$observe ( phrase ) => info JSON.stringify phrase
#.....................................................................................................
.pipe do =>
idx = -1
return D.$observe ( phrase ) =>
idx += +1
T.eq phrase, matchers[ idx ]
#.....................................................................................................
.pipe D.$on_end -> handler()
#.........................................................................................................
step ( resume ) =>
yield clear_leveldb db[ '%self' ], resume
yield write_data resume
yield show resume
done()
#-----------------------------------------------------------------------------------------------------------
@[ "n-ary indexing (1)" ] = ( T, done ) ->
#.........................................................................................................
$index = ( descriptions ) =>
predicates = []
predicate_count = 0
arities = []
phrases = []
phrase_counts = {}
#.......................................................................................................
for predicate, arity of descriptions
predicate_count += +1
unless arity in [ 'singular', 'plural', ]
throw new Error "expected 'singular' or 'plural' for arity, got #{rpr arity}"
predicates.push predicate
phrases.push {}
arities.push arity
#.......................................................................................................
if predicate_count.length < 2
throw new Error "expected at least two predicate descriptions, got #{predicates.length}"
if predicate_count.length > 2
throw new Error "indexes with more than 2 steps not supported yet"
#.......................................................................................................
new_index_phrase = ( tsbj, tprd, tobj, fprd, fobj, tsbj_is_list, idx = 0 ) =>
return [ [ tsbj..., tprd, idx, tobj, ], fprd, fobj, ] if tsbj_is_list
return [ [ tsbj, tprd, idx, tobj, ], fprd, fobj, ]
#.......................................................................................................
link = ( phrases ) =>
throw new Error "indexes with anything but 2 steps not supported yet" if phrases.length != 2
[ from_phrase, to_phrase, ] = phrases
[ fsbj, fprd, fobj, ] = from_phrase
[ tsbj, tprd, tobj, ] = to_phrase
tsbj_is_list = CND.isa_list tsbj
from_is_plural = arities[ 0 ] is 'plural'
to_is_plural = arities[ 1 ] is 'plural'
#.....................................................................................................
unless from_is_plural or to_is_plural
return [ new_index_phrase tsbj, tprd, tobj, fprd, fobj, tsbj_is_list ]
#.....................................................................................................
idx = -1
R = []
if from_is_plural
if to_is_plural
for sub_fobj in fobj
for sub_tobj in tobj
idx += +1
R.push new_index_phrase tsbj, tprd, sub_tobj, fprd, sub_fobj, tsbj_is_list, idx
else
for sub_fobj in fobj
idx += +1
R.push new_index_phrase tsbj, tprd, tobj, fprd, sub_fobj, tsbj_is_list, idx
else
for sub_tobj in tobj
idx += +1
R.push new_index_phrase tsbj, tprd, sub_tobj, fprd, fobj, tsbj_is_list, idx
#.....................................................................................................
return R
#.......................................................................................................
return $ ( phrase, send ) =>
send phrase
[ sbj, prd, obj, ] = phrase
return unless ( prd_idx = predicates.indexOf prd ) >= 0
sbj_txt = JSON.stringify sbj
phrase_target = phrases[ sbj_txt]?= []
phrase_target[ prd_idx ] = phrase
phrase_counts[ sbj_txt ] = ( phrase_counts[ sbj_txt ] ? 0 ) + 1
return null if phrase_counts[ sbj_txt ] < predicate_count
#.....................................................................................................
send index_phrase for index_phrase in link phrases[ sbj_txt ]
return null
#.........................................................................................................
write_data = ( handler ) ->
input = D.create_throughstream()
#.......................................................................................................
input
.pipe $index 'reading': 'plural', 'similarity': 'plural'
.pipe $index 'reading': 'plural', 'variant': 'plural'
.pipe $index 'reading': 'plural', 'strokeorder': 'singular'
.pipe $index 'strokeorder': 'singular', 'reading': 'plural'
.pipe $index 'strokeorder': 'singular', 'variant': 'plural'
.pipe $index 'strokeorder': 'singular', 'similarity': 'plural'
.pipe HOLLERITH.$write db
.pipe D.$on_end ->
handler()
#.......................................................................................................
input.write [ [ '千', ], 'variant', [ '仟', '韆', ], ]
input.write [ [ '千', ], 'similarity', [ '于', '干', ], ]
input.write [ [ '千', ], 'usagecode', 'CJKTHM', ]
input.write [ [ '千', ], 'strokeorder', '312', ]
input.write [ [ '千', ], 'reading', [ 'qian', 'foo', 'bar', ], ]
input.write [ [ '仟', ], 'strokeorder', '32312', ]
input.write [ [ '仟', ], 'usagecode', 'CJKTHm', ]
input.write [ [ '仟', ], 'reading', [ 'qian', ], ]
input.write [ [ '<NAME>', ], 'strokeorder', '122125112125221134515454', ]
input.write [ [ '<NAME>', ], 'usagecode', 'KTHm', ]
input.write [ [ '<NAME>', ], 'reading', [ 'qian', ], ]
#.......................................................................................................
input.end()
#.........................................................................................................
matchers = [
["pos","reading","bar",["千"],2]
["pos","reading","bar",["千","similarity",4,"于"]]
["pos","reading","bar",["千","similarity",5,"干"]]
["pos","reading","bar",["千","strokeorder",2,"312"]]
["pos","reading","bar",["千","variant",4,"仟"]]
["pos","reading","bar",["千","variant",5,"韆"]]
["pos","reading","foo",["千"],1]
["pos","reading","foo",["千","similarity",2,"于"]]
["pos","reading","foo",["千","similarity",3,"干"]]
["pos","reading","foo",["千","strokeorder",1,"312"]]
["pos","reading","foo",["千","variant",2,"仟"]]
["pos","reading","foo",["千","variant",3,"韆"]]
["pos","reading","qian",["仟"],0]
["pos","reading","qian",["仟","strokeorder",0,"32312"]]
["pos","reading","qian",["千"],0]
["pos","reading","qian",["千","similarity",0,"于"]]
["pos","reading","qian",["千","similarity",1,"干"]]
["pos","reading","qian",["千","strokeorder",0,"312"]]
["pos","reading","qian",["千","variant",0,"仟"]]
["pos","reading","qian",["千","variant",1,"韆"]]
["pos","reading","qian",["韆"],0]
["pos","reading","qian",["韆","strokeorder",0,"122125112125221134515454"]]
["pos","similarity","于",["千"],0]
["pos","similarity","干",["千"],1]
["pos","strokeorder","122125112125221134515454",["韆"]]
["pos","strokeorder","122125112125221134515454",["韆","reading",0,"qian"]]
["pos","strokeorder","312",["千"]]
["pos","strokeorder","312",["千","reading",0,"qian"]]
["pos","strokeorder","312",["千","reading",1,"foo"]]
["pos","strokeorder","312",["千","reading",2,"bar"]]
["pos","strokeorder","312",["千","similarity",0,"于"]]
["pos","strokeorder","312",["千","similarity",1,"干"]]
["pos","strokeorder","312",["千","variant",0,"仟"]]
["pos","strokeorder","312",["千","variant",1,"韆"]]
["pos","strokeorder","32312",["仟"]]
["pos","strokeorder","32312",["仟","reading",0,"qian"]]
["pos","usagecode","CJKTHM",["千"]]
["pos","usagecode","CJKTHm",["仟"]]
["pos","usagecode","KTHm",["韆"]]
["pos","variant","仟",["千"],0]
["pos","variant","韆",["千"],1]
]
#.........................................................................................................
show = ( handler ) ->
query = { prefix: [ 'pos', ], star: '*', }
input = HOLLERITH.create_phrasestream db, query
input
.pipe D.$observe ( phrase ) => info JSON.stringify phrase
#.....................................................................................................
.pipe do =>
idx = -1
return D.$observe ( phrase ) =>
idx += +1
T.eq phrase, matchers[ idx ]
#.....................................................................................................
.pipe D.$on_end -> handler()
#.........................................................................................................
step ( resume ) =>
yield clear_leveldb db[ '%self' ], resume
yield write_data resume
yield show resume
done()
#-----------------------------------------------------------------------------------------------------------
@[ "n-ary indexing (2)" ] = ( T, done ) ->
#.........................................................................................................
write_data = ( handler ) ->
input = D.create_throughstream()
#.......................................................................................................
input
.pipe HOLLERITH.$index 'reading': 'plural', 'similarity': 'plural'
.pipe HOLLERITH.$index 'reading': 'plural', 'variant': 'plural'
.pipe HOLLERITH.$index 'reading': 'plural', 'strokeorder': 'singular'
.pipe HOLLERITH.$index 'strokeorder': 'singular', 'reading': 'plural'
.pipe HOLLERITH.$index 'strokeorder': 'singular', 'variant': 'plural'
.pipe HOLLERITH.$index 'strokeorder': 'singular', 'similarity': 'plural'
.pipe HOLLERITH.$write db
.pipe D.$on_end ->
handler()
#.......................................................................................................
input.write [ [ '千', ], 'variant', [ '仟', '韆', ], ]
input.write [ [ '千', ], 'similarity', [ '于', '干', ], ]
input.write [ [ '千', ], 'usagecode', 'CJKTHM', ]
input.write [ [ '千', ], 'strokeorder', '312', ]
input.write [ [ '千', ], 'reading', [ 'qian', 'foo', 'bar', ], ]
input.write [ [ '仟', ], 'strokeorder', '32312', ]
input.write [ [ '仟', ], 'usagecode', 'CJKTHm', ]
input.write [ [ '仟', ], 'reading', [ 'qian', ], ]
input.write [ [ '韆', ], 'strokeorder', '122125112125221134515454', ]
input.write [ [ '韆', ], 'usagecode', 'KTHm', ]
input.write [ [ '韆', ], 'reading', [ 'qian', ], ]
#.......................................................................................................
input.end()
#.........................................................................................................
matchers = [
["pos","reading","bar",["千"],2]
["pos","reading","bar",["千","similarity",4,"于"]]
["pos","reading","bar",["千","similarity",5,"干"]]
["pos","reading","bar",["千","strokeorder",2,"312"]]
["pos","reading","bar",["千","variant",4,"仟"]]
["pos","reading","bar",["千","variant",5,"韆"]]
["pos","reading","foo",["千"],1]
["pos","reading","foo",["千","similarity",2,"于"]]
["pos","reading","foo",["千","similarity",3,"干"]]
["pos","reading","foo",["千","strokeorder",1,"312"]]
["pos","reading","foo",["千","variant",2,"仟"]]
["pos","reading","foo",["千","variant",3,"韆"]]
["pos","reading","qian",["仟"],0]
["pos","reading","qian",["仟","strokeorder",0,"32312"]]
["pos","reading","qian",["千"],0]
["pos","reading","qian",["千","similarity",0,"于"]]
["pos","reading","qian",["千","similarity",1,"干"]]
["pos","reading","qian",["千","strokeorder",0,"312"]]
["pos","reading","qian",["千","variant",0,"仟"]]
["pos","reading","qian",["千","variant",1,"韆"]]
["pos","reading","qian",["韆"],0]
["pos","reading","qian",["韆","strokeorder",0,"122125112125221134515454"]]
["pos","similarity","于",["千"],0]
["pos","similarity","干",["千"],1]
["pos","strokeorder","122125112125221134515454",["韆"]]
["pos","strokeorder","122125112125221134515454",["韆","reading",0,"qian"]]
["pos","strokeorder","312",["千"]]
["pos","strokeorder","312",["千","reading",0,"qian"]]
["pos","strokeorder","312",["千","reading",1,"foo"]]
["pos","strokeorder","312",["千","reading",2,"bar"]]
["pos","strokeorder","312",["千","similarity",0,"于"]]
["pos","strokeorder","312",["千","similarity",1,"干"]]
["pos","strokeorder","312",["千","variant",0,"仟"]]
["pos","strokeorder","312",["千","variant",1,"韆"]]
["pos","strokeorder","32312",["仟"]]
["pos","strokeorder","32312",["仟","reading",0,"qian"]]
["pos","usagecode","CJKTHM",["千"]]
["pos","usagecode","CJKTHm",["仟"]]
["pos","usagecode","KTHm",["韆"]]
["pos","variant","仟",["千"],0]
["pos","variant","韆",["千"],1]
]
#.........................................................................................................
show = ( handler ) ->
query = { prefix: [ 'pos', ], star: '*', }
input = HOLLERITH.create_phrasestream db, query
input
.pipe D.$observe ( phrase ) => info JSON.stringify phrase
#.....................................................................................................
.pipe do =>
idx = -1
return D.$observe ( phrase ) =>
idx += +1
T.eq phrase, matchers[ idx ]
#.....................................................................................................
.pipe D.$on_end -> handler()
#.........................................................................................................
step ( resume ) =>
yield clear_leveldb db[ '%self' ], resume
yield write_data resume
yield show resume
done()
#-----------------------------------------------------------------------------------------------------------
@_prune = ->
for name, value of @
continue if name.startsWith '_'
delete @[ name ] unless name in include
return null
############################################################################################################
unless module.parent?
# debug '0980', JSON.stringify ( Object.keys @ ), null, ' '
include = [
# "write without error (1)"
# "write without error (2)"
# "read without error"
# "read keys without error (1)"
# "read keys without error (2)"
# "read keys without error (3)"
# "read keys without error (4)"
# "create_facetstream throws with wrong arguments"
# "read POS facets"
# "read POS phrases (1)"
# "read POS phrases (2)"
# "read SPO phrases"
# "sorting (1)"
# "sorting (2)"
# "H2 codec `encode` throws on anything but a list"
# "sort texts with H2 codec (1)"
# "sort texts with H2 codec (2)"
# "sort numbers with H2 codec (1)"
# "sort mixed values with H2 codec"
# "sort lists of mixed values with H2 codec"
# "sort routes with values (1)"
# "sort routes with values (2)"
# "read sample data"
# "read and write keys with lists"
# "encode keys with list elements"
# "read and write phrases with unanalyzed lists"
# "read partial POS phrases"
# "read single phrases (1)"
# "read single phrases (2)"
# "read single phrases (3)"
# "read single phrases (4)"
# "writing phrases with non-unique keys fails"
# "reminders"
# "invalid key not accepted (1)"
# "invalid key not accepted (2)"
# "catching errors (2)"
# "catching errors (1)"
# "building PODs from SPO phrases"
# "read phrases in lockstep"
# "has_any yields existence of key"
# "$write rejects duplicate S/P pairs"
# "codec accepts long keys"
# "write private types (1)"
# "write private types (2)"
# "write private types (3)"
# "bloom filter serialization without writes"
# "use non-string subjects in phrases"
# '$write rejects duplicate S/P pairs'
# 'codec accepts long keys'
# 'write private types (1)'
# 'use non-string subjects in phrases (1)'
# 'use non-string subjects in phrases (2)'
# 'use non-string subjects in phrases (3)'
# 'use non-string subjects in phrases (4)'
# 'binary indexing'
'n-ary indexing (1)'
'n-ary indexing (2)'
# "Pinyin Unicode Sorting"
# "ensure `Buffer.compare` gives same sorting as LevelDB"
]
# @_prune()
@_main()
# @[ "XXX" ] null, -> help "(done)"
# @[ "YYY" ] null, -> help "(done)"
# @[ "ZZZ" ] null, -> help "(done)"
# debug '©P9AOR', ( HOLLERITH.CODEC[ 'typemarkers' ][ 'null' ] ).toString 16
# debug '©xxmIp', ( HOLLERITH.CODEC[ 'typemarkers' ][ 'false' ] ).toString 16
# debug '©ZeY26', ( HOLLERITH.CODEC[ 'typemarkers' ][ 'true' ] ).toString 16
# debug '©WgER9', ( HOLLERITH.CODEC[ 'typemarkers' ][ 'date' ] ).toString 16
# debug '©UmpjJ', ( HOLLERITH.CODEC[ 'typemarkers' ][ 'ninfinity' ] ).toString 16
# debug '©Url0K', ( HOLLERITH.CODEC[ 'typemarkers' ][ 'nnumber' ] ).toString 16
# debug '©nFIIi', ( HOLLERITH.CODEC[ 'typemarkers' ][ 'pnumber' ] ).toString 16
# debug '©LZ58R', ( HOLLERITH.CODEC[ 'typemarkers' ][ 'pinfinity' ] ).toString 16
# debug '©MYxda', ( HOLLERITH.CODEC[ 'typemarkers' ][ 'text' ] ).toString 16
| true |
############################################################################################################
njs_path = require 'path'
# njs_fs = require 'fs'
join = njs_path.join
#...........................................................................................................
CND = require 'cnd'
rpr = CND.rpr
badge = 'HOLLERITH/tests'
log = CND.get_logger 'plain', badge
info = CND.get_logger 'info', badge
whisper = CND.get_logger 'whisper', badge
alert = CND.get_logger 'alert', badge
debug = CND.get_logger 'debug', badge
warn = CND.get_logger 'warn', badge
help = CND.get_logger 'help', badge
urge = CND.get_logger 'urge', badge
echo = CND.echo.bind CND
#...........................................................................................................
suspend = require 'coffeenode-suspend'
step = suspend.step
after = suspend.after
# eventually = suspend.eventually
### TAINT experimentally using `later` in place of `setImmediate` ###
later = suspend.immediately
#...........................................................................................................
test = require 'guy-test'
#...........................................................................................................
D = require 'pipedreams'
$ = D.remit.bind D
$async = D.remit_async.bind D
#...........................................................................................................
HOLLERITH = require './main'
db = null
#...........................................................................................................
levelup = require 'level'
leveldown = require 'leveldown'
CODEC = require 'hollerith-codec'
#...........................................................................................................
ƒ = CND.format_number
# #-----------------------------------------------------------------------------------------------------------
# @_sort_list = ( list ) ->
# @_encode_list list
# list.sort Buffer.compare
# @_decode_list list
# return list
#===========================================================================================================
# HELPERS
#-----------------------------------------------------------------------------------------------------------
show_keys_and_key_bfrs = ( keys, key_bfrs ) ->
f = ( p ) -> ( t for t in ( p.toString 'hex' ).split /(..)/ when t isnt '' ).join ' '
#.........................................................................................................
columnify_settings =
paddingChr: ' '
#.........................................................................................................
data = []
key_bfrs = ( f p for p in key_bfrs )
for key, idx in keys
key_txt = ( rpr key ).replace /\\u0000/g, '∇'
data.push { 'str': key_txt, 'bfr': key_bfrs[ idx ]}
help '\n' + CND.columnify data, columnify_settings
return null
#-----------------------------------------------------------------------------------------------------------
show_db_entries = ( handler ) ->
input = db[ '%self' ].createReadStream()
input
.pipe D.$show()
.pipe $ ( { key, value, }, send ) => send [ key, value, ]
.pipe $ ( [ key, value, ], send ) => send [ key, value, ] unless HOLLERITH._is_meta db, key
.pipe $ ( [ key, value, ], send ) =>
# debug '©RluhF', ( HOLLERITH.CODEC.decode key ), ( JSON.parse value )
send [ key, value, ]
.pipe D.$collect()
.pipe $ ( facets, send ) =>
help '\n' + HOLLERITH.DUMP.rpr_of_facets db, facets
# buffer = new Buffer JSON.stringify [ '开', '彡' ]
# debug '©GJfL6', HOLLERITH.CODEC.rpr_of_buffer null, buffer
.pipe D.$on_end => handler()
#-----------------------------------------------------------------------------------------------------------
get_new_db_name = ->
get_new_db_name.idx += +1
return "/tmp/hollerith2-testdb-#{get_new_db_name.idx}"
get_new_db_name.idx = 0
#-----------------------------------------------------------------------------------------------------------
read_all_keys = ( db, handler ) ->
Z = []
input = db.createKeyStream()
input.on 'end', -> handler null, Z
input
.pipe $ ( data, send ) => Z.push data
#-----------------------------------------------------------------------------------------------------------
clear_leveldb = ( leveldb, handler ) ->
step ( resume ) =>
route = leveldb[ 'location' ]
yield leveldb.close resume
whisper "closed LevelDB"
yield leveldown.destroy route, resume
whisper "destroyed LevelDB"
yield leveldb.open resume
whisper "re-opened LevelDB"
# help "erased and re-opened LevelDB at #{route}"
handler null
#-----------------------------------------------------------------------------------------------------------
@_main = ( handler ) ->
db_route = join __dirname, '..', 'dbs/tests'
db_settings = size: 500
db = HOLLERITH.new_db db_route, db_settings
test @, 'timeout': 2500
#-----------------------------------------------------------------------------------------------------------
@_feed_test_data = ( db, probes_idx, settings, handler ) ->
switch arity = arguments.length
when 3
handler = settings
settings = null
when 4
null
else
throw new Error "expected 3 or 4 arguments, got #{arity}"
#.........................................................................................................
step ( resume ) =>
yield HOLLERITH.clear db, resume
whisper "writing test dataset ##{probes_idx} with settings #{rpr settings}"
input = D.create_throughstream()
#.......................................................................................................
switch probes_idx
#-----------------------------------------------------------------------------------------------------
when -1
# settings =
input
.pipe HOLLERITH.$write db, settings
# .pipe D.$show()
.pipe D.$on_end ( end ) =>
whisper "test data written"
handler null
end()
#...................................................................................................
for n in [ 0 .. 1000 ]
key = [ "PI:KEY:<KEY>END_PI ]
input.write key
yield later resume
input.end()
#-----------------------------------------------------------------------------------------------------
when 0, 2, 3, 4, 5
input
.pipe HOLLERITH.$write db, settings
# .pipe D.$show()
.pipe D.$on_end ( end ) =>
whisper "test data written"
handler null
end()
#...................................................................................................
for probe in @_feed_test_data.probes[ probes_idx ]
# key = HPI:KEY:<KEY>END_PILERPI:KEY:<KEY>END_PI.new_so_key db, probe...
# debug '©WV0j2', probe
input.write probe
yield later resume
input.end()
#-----------------------------------------------------------------------------------------------------
when 1
input
.pipe HOLLERITH.$write db, settings
# .pipe D.$show()
.pipe D.$on_end ( end ) =>
whisper "test data written"
end()
handler null
#...................................................................................................
for url_key in @_feed_test_data.probes[ probes_idx ]
key = HPI:KEY:<KEY>END_PI.key_from_url db, url_key
input.write key
yield later resume
input.end()
#-------------------------------------------------------------------------------------------------------
else return handler new Error "illegal probes index #{rpr probes_idx}"
#.........................................................................................................
return null
#-----------------------------------------------------------------------------------------------------------
@_feed_test_data.probes = []
#...........................................................................................................
### probes_idx == 0 ###
@_feed_test_data.probes.push [
[ '𧷟1', 'guide/lineup/length', 1, ]
[ '𧷟2', 'guide/lineup/length', 2, ]
[ '𧷟3', 'guide/lineup/length', 3, ]
[ '𧷟4', 'guide/lineup/length', 4, ]
[ '𧷟', 'guide/lineup/length', 5, ]
[ '𧷟6', 'guide/lineup/length', 6, ]
[ '𧷟', 'cp/cid', 163295, ]
[ '𧷟', 'guide/uchr/has', [ '八', '刀', '宀', '', '貝', ], ]
[ '𧷟', 'rank/cjt', 5432, ]
[ '八', 'factor/strokeclass/wbf', '34', ]
[ '刀', 'factor/strokeclass/wbf', '5(12)3', ]
[ '宀', 'factor/strokeclass/wbf', '44', ]
[ '', 'factor/strokeclass/wbf', '12', ]
[ '貝', 'factor/strokeclass/wbf', '25(12)', ]
[ '八', 'rank/cjt', 12541, ]
[ '刀', 'rank/cjt', 12542, ]
[ '宀', 'rank/cjt', 12543, ]
[ '', 'rank/cjt', 12544, ]
[ '貝', 'rank/cjt', 12545, ]
]
#...........................................................................................................
### probes_idx == 1 ###
@_feed_test_data.probes.push [
'so|glyph:劬|cp/fncr:u-cjk/52ac|0'
'so|glyph:邭|cp/fncr:u-cjk/90ad|0'
'so|glyph:𠴦|cp/fncr:u-cjk-xb/20d26|0'
'so|glyph:𤿯|cp/fncr:u-cjk-xb/24fef|0'
'so|glyph:𧑴|cp/fncr:u-cjk-xb/27474|0'
'so|glyph:𨒡|cp/fncr:u-cjk-xb/284a1|0'
'so|glyph:𪚧|cp/fncr:u-cjk-xb/2a6a7|0'
'so|glyph:𪚫|cp/fncr:u-cjk-xb/2a6ab|0'
'so|glyph:𤿯|strokeorder:352513553254|0'
'so|glyph:𠴦|strokeorder:3525141121|0'
'so|glyph:𨒡|strokeorder:35251454|0'
'so|glyph:邭|strokeorder:3525152|0'
'so|glyph:𪚫|strokeorder:352515251115115113541|0'
'so|glyph:𪚧|strokeorder:35251525112511511|0'
'so|glyph:𧑴|strokeorder:352515251214251214|0'
'so|glyph:劬|strokeorder:3525153|0'
]
#-----------------------------------------------------------------------------------------------------------
### probes_idx == 2 ###
@_feed_test_data.probes.push [
[ '丁', 'strokecount', 2, ]
[ '三', 'strokecount', 3, ]
[ '夫', 'strokecount', 5, ]
[ '國', 'strokecount', 11, ]
[ '形', 'strokecount', 7, ]
[ '丁', 'componentcount', 1, ]
[ '三', 'componentcount', 1, ]
[ '夫', 'componentcount', 1, ]
[ '國', 'componentcount', 4, ]
[ '形', 'componentcount', 2, ]
[ '丁', 'components', [ '丁', ], ]
[ '三', 'components', [ '三', ], ]
[ '夫', 'components', [ '夫', ], ]
[ '國', 'components', [ '囗', '戈', '口', '一', ], ]
[ '形', 'components', [ '开', '彡', ], ]
]
#-----------------------------------------------------------------------------------------------------------
### probes_idx == 3 ###
@_feed_test_data.probes.push [
[ '丁', 'isa', [ 'glyph', 'guide', ] ]
[ '三', 'isa', [ 'glyph', 'guide', ] ]
[ '夫', 'isa', [ 'glyph', 'guide', ] ]
[ '國', 'isa', [ 'glyph', ] ]
[ '形', 'isa', [ 'glyph', ] ]
[ 'glyph:丁', 'strokeorder/count', 2, ]
[ 'glyph:三', 'strokeorder/count', 3, ]
[ 'glyph:夫', 'strokeorder/count', 5, ]
[ 'glyph:國', 'strokeorder/count', 11, ]
[ 'glyph:形', 'strokeorder/count', 7, ]
[ 'glyph:丁', 'guide/count', 1, ]
[ 'glyph:三', 'guide/count', 1, ]
[ 'glyph:夫', 'guide/count', 1, ]
[ 'glyph:國', 'guide/count', 4, ]
[ 'glyph:形', 'guide/count', 2, ]
[ 'glyph:丁', 'guide/lineup', [ '丁', ], ]
[ 'glyph:三', 'guide/lineup', [ '三', ], ]
[ 'glyph:夫', 'guide/lineup', [ '夫', ], ]
[ 'glyph:國', 'guide/lineup', [ '囗', '戈', '口', '一', ], ]
[ 'glyph:形', 'guide/lineup', [ '开', '彡', ], ]
]
#...........................................................................................................
### probes_idx == 4 ###
@_feed_test_data.probes.push [
[ '𧷟1', 'guide/lineup/length', 1, ]
[ '𧷟2', 'guide/lineup/length', 2, ]
[ '𧷟3', 'guide/lineup/length', 3, ]
[ '𧷟4', 'guide/lineup/length', 4, ]
[ '𧷟', 'guide/lineup/length', 5, ]
[ '𧷟6', 'guide/lineup/length', 6, ]
[ '𧷟', 'cp/cid', 163295, ]
[ '𧷟', 'guide/uchr/has', [ '八', '刀', '宀', '', '貝', ], ]
[ '𧷟', 'rank/cjt', 5432, ]
[ '八', 'factor/strokeclass/wbf', '34', ]
[ '刀', 'factor/strokeclass/wbf', '5(12)3', ]
[ '宀', 'factor/strokeclass/wbf', '44', ]
[ '', 'factor/strokeclass/wbf', '12', ]
[ '貝', 'factor/strokeclass/wbf', '25(12)', ]
[ '八', 'rank/cjt', 12541, ]
[ '刀', 'rank/cjt', 12542, ]
[ '宀', 'rank/cjt', 12543, ]
[ '', 'rank/cjt', 12544, ]
[ '貝', 'rank/cjt', 12545, ]
[ '𧷟1', 'a', 42 ]
[ '𧷟1', 'ab', 42 ]
[ '𧷟1', 'guide', 'xxx' ]
[ '𧷟1', 'guide/', 'yyy' ]
[ '𧷟1', 'z', 42 ]
]
#-----------------------------------------------------------------------------------------------------------
### probes_idx == 5 ###
@_feed_test_data.probes.push [
[ '丁', 'strokecount', 2, ]
# [ '三', 'strokecount', 3, ]
# [ '夫', 'strokecount', 5, ]
# [ '國', 'strokecount', 11, ]
# [ '形', 'strokecount', 7, ]
[ '丁', 'componentcount', 1, ]
# [ '三', 'componentcount', 1, ]
# [ '夫', 'componentcount', 1, ]
# [ '國', 'componentcount', 4, ]
# [ '形', 'componentcount', 2, ]
[ '丁', 'components', [ '丁', ], ]
# [ '三', 'components', [ '三', ], ]
# [ '夫', 'components', [ '夫', ], ]
# [ '國', 'components', [ '囗', '戈', '口', '一', ], ]
# [ '形', 'components', [ '开', '彡', ], ]
# [ { type: 'route', value: '/foo/bar', }, 'mtime', new Date '2011-10-10T14:48:00Z', ]
[ { type: 'route', value: '/foo/bar', }, 'mtime', 123456789, ]
]
# pos|guide/kwic/sortcode
# # [
# # "1027~~~~,00","0156~~~~,01,0509~~~~,02,0000~~~~,03,--------,04,--------,05,--------,06,--------,07,--------,08,--------,09,--------,10,--------,11,--------,12,"
# # "0156~~~~,01","0509~~~~,02,0000~~~~,03,--------,04,--------,05,--------,06,--------,07,--------,08,--------,09,--------,10,--------,11,--------,12,1027~~~~,00,"
# # "0509~~~~,02","0000~~~~,03,--------,04,--------,05,--------,06,--------,07,--------,08,--------,09,--------,10,--------,11,--------,12,1027~~~~,00,0156~~~~,01,"
# # "0000~~~~,03","--------,04,--------,05,--------,06,--------,07,--------,08,--------,09,--------,10,--------,11,--------,12,1027~~~~,00,0156~~~~,01,0509~~~~,02,"
# # ]
# 0087~~~~,00,0291~~~~,01,0555~~~~,02,--------,03,--------,04,--------,05,--------,06,--------,07,--------,08,--------,09,--------,10,--------,11,--------,12,|𦖈|0
# 0087~~~~,00,0291~~~~,01,0823x2h-,02,--------,03,--------,04,--------,05,--------,06,--------,07,--------,08,--------,09,--------,10,--------,11,--------,12,|聗|0
# 0087~~~~,00,0291~~~~,01,1023~~~~,02,--------,03,--------,04,--------,05,--------,06,--------,07,--------,08,--------,09,--------,10,--------,11,--------,12,|𤋕|0
# 0087~~~~,00,0294~~~~,01,0060~~~~,02,--------,03,--------,04,--------,05,--------,06,--------,07,--------,08,--------,09,--------,10,--------,11,--------,12,|𦖔|0
# 0087~~~~,00,0294~~~~,01,0555~~~~,02,--------,03,--------,04,--------,05,--------,06,--------,07,--------,08,--------,09,--------,10,--------,11,--------,12,|𦗆|0
# 0087~~~~,00,0295~~~~,01,0802~~~~,02,0958~~~~,03,--------,04,--------,05,--------,06,--------,07,--------,08,--------,09,--------,10,--------,11,--------,12,|𥪻|0
# 0087~~~~,00,0312~~~~,01,--------,02,--------,03,--------,04,--------,05,--------,06,--------,07,--------,08,--------,09,--------,10,--------,11,--------,12,|𦔲|0
# 0087~~~~,00,0314~~~~,01,1173~~~~,02,--------,03,--------,04,--------,05,--------,06,--------,07,--------,08,--------,09,--------,10,--------,11,--------,12,|𦕀|0
# 0087~~~~,00,0319~~~~,01,--------,02,--------,03,--------,04,--------,05,--------,06,--------,07,--------,08,--------,09,--------,10,--------,11,--------,12,|𦕇|0
# 0087~~~~,00,0355~~~~,01,--------,02,--------,03,--------,04,--------,05,--------,06,--------,07,--------,08,--------,09,--------,10,--------,11,--------,12,|𦕆|0
# 0087~~~~,00,0373~~~~,01,0284~~~~,02,--------,03,--------,04,--------,05,--------,06,--------,07,--------,08,--------,09,--------,10,--------,11,--------,12,|𦕧|0
#-----------------------------------------------------------------------------------------------------------
@[ "write without error (1)" ] = ( T, done ) ->
probes_idx = 0
idx = -1
write_settings =
batch: 10
step ( resume ) =>
yield @_feed_test_data db, probes_idx, write_settings, resume
done()
#-----------------------------------------------------------------------------------------------------------
@[ "write without error (2)" ] = ( T, done ) ->
probes_idx = -1
idx = -1
write_settings =
batch: 10
step ( resume ) =>
yield @_feed_test_data db, probes_idx, write_settings, resume
done()
#-----------------------------------------------------------------------------------------------------------
@[ "read without error" ] = ( T, done ) ->
probes_idx = 0
idx = -1
step ( resume ) =>
yield @_feed_test_data db, probes_idx, resume
# done()
input = HOLLERITH.create_facetstream db
input
# .pipe HOLLERITH.$url_from_key db
.pipe $ ( [ key, value, ], send ) =>
idx += +1
# T.eq key, matchers[ idx ]
.pipe D.$on_end ( end ) => end; done()
#-----------------------------------------------------------------------------------------------------------
@[ "read keys without error (1)" ] = ( T, done ) ->
step ( resume ) =>
yield HOLLERITH.clear db, resume
### TAINT awaiting better solution ###
NULL = HOLLERITH._encode_value db, 1
for idx in [ 0 ... 10 ]
key_bfr = HOLLERITH._encode_key db, [ 'x', idx, 'x', ]
db[ '%self' ].put key_bfr, NULL
#.......................................................................................................
probe_idx = 4
count = 0
query = HOLLERITH._query_from_prefix db, [ 'x', probe_idx, ]
# debug '©ETONp', HOLLERITH.CODEC.rpr_of_buffer key_bfr
input = db[ '%self' ].createReadStream query
input
.pipe $ ( { key, value, }, send ) =>
count += 1
T.eq ( HOLLERITH._decode_key db, key )[ 1 ], probe_idx
.pipe D.$on_end ( end ) =>
T.eq count, 1
end()
done()
#-----------------------------------------------------------------------------------------------------------
@[ "read keys without error (2)" ] = ( T, done ) ->
step ( resume ) =>
yield HOLLERITH.clear db, resume
### TAINT awaiting better solution ###
NULL = HOLLERITH._encode_value db, 1
for idx in [ 0 ... 10 ]
db[ '%self' ].put ( HOLLERITH._encode_key db, [ 'x', idx, 'x', ] ), NULL
#.......................................................................................................
probe_idx = 4
count = 0
prefix = [ 'x', probe_idx, ]
input = HOLLERITH.create_facetstream db, { prefix, }
input
.pipe $ ( facet, send ) =>
count += 1
[ key, value, ] = facet
T.eq key[ 1 ], probe_idx
.pipe D.$on_end ( end ) =>
T.eq count, 1
end()
done()
#-----------------------------------------------------------------------------------------------------------
@[ "read keys without error (3)" ] = ( T, done ) ->
step ( resume ) =>
yield HOLLERITH.clear db, resume
### TAINT awaiting better solution ###
NULL = HOLLERITH._encode_value db, 1
for idx in [ 0 ... 10 ]
db[ '%self' ].put ( HOLLERITH._encode_key db, [ 'x', idx, 'x', ] ), NULL
#.......................................................................................................
probe_idx = 3
count = 0
delta = 2
lo = [ 'x', probe_idx, ]
hi = [ 'x', probe_idx + delta, ]
query = { gte: ( HOLLERITH._encode_key db, lo ), lte: ( HOLLERITH._query_from_prefix db, hi )[ 'lte' ], }
input = db[ '%self' ].createReadStream query
input
.pipe $ ( { key, value, }, send ) =>
count += 1
T.eq ( HOLLERITH._decode_key db, key )[ 1 ], probe_idx + count - 1
.pipe D.$on_end ( end ) =>
T.eq count, delta + 1
end()
done()
#-----------------------------------------------------------------------------------------------------------
@[ "read keys without error (4)" ] = ( T, done ) ->
step ( resume ) =>
yield HOLLERITH.clear db, resume
for idx in [ 0 ... 10 ]
db[ '%self' ].put ( HOLLERITH._encode_key db, [ 'x', idx, 'x', ] ), HOLLERITH._encode_value db, 1
#.......................................................................................................
probe_idx = 3
count = 0
delta = 2
lo = [ 'x', probe_idx, ]
hi = [ 'x', probe_idx + delta, ]
input = HOLLERITH.create_facetstream db, { lo, hi, }
input
.pipe $ ( [ key, value, ], send ) =>
count += 1
T.eq key[ 1 ], probe_idx + count - 1
.pipe D.$on_end ( end ) =>
T.eq count, delta + 1
end()
done()
#-----------------------------------------------------------------------------------------------------------
@[ "create_facetstream throws with wrong arguments" ] = ( T, done ) ->
message = "illegal to specify `hi` but not `lo`"
T.throws message, ( -> HOLLERITH.create_facetstream db, hi: [ 'xxx', ] )
done()
#-----------------------------------------------------------------------------------------------------------
@[ "read POS facets" ] = ( T, done ) ->
probes_idx = 0
idx = -1
#.........................................................................................................
key_matchers = [
[ 'pos', 'guide/lineup/length', 2, '𧷟2', ]
[ 'pos', 'guide/lineup/length', 3, '𧷟3', ]
[ 'pos', 'guide/lineup/length', 4, '𧷟4', ]
]
#.........................................................................................................
step ( resume ) =>
yield @_feed_test_data db, probes_idx, resume
lo = [ 'pos', 'guide/lineup/length', 2, ]
hi = [ 'pos', 'guide/lineup/length', 4, ]
# input = HOLLERITH.create_keystream db, lo
input = HOLLERITH.create_facetstream db, { lo, hi, }
input
# .pipe HOLLERITH.$url_from_key db
.pipe $ ( [ key, value, ], send ) =>
idx += +1
phrase = HOLLERITH.as_phrase db, key, value
T.eq key, key_matchers[ idx ]
.pipe D.$on_end ( end ) => end(); done()
#-----------------------------------------------------------------------------------------------------------
@[ "read POS phrases (1)" ] = ( T, done ) ->
probes_idx = 0
idx = -1
#.........................................................................................................
matchers = [
[ 'pos', 'guide/lineup/length', 2, '𧷟2', ]
[ 'pos', 'guide/lineup/length', 3, '𧷟3', ]
[ 'pos', 'guide/lineup/length', 4, '𧷟4', ]
]
#.........................................................................................................
step ( resume ) =>
yield @_feed_test_data db, probes_idx, resume
lo = [ 'pos', 'guide/lineup/length', 2, ]
hi = [ 'pos', 'guide/lineup/length', 4, ]
input = HOLLERITH.create_phrasestream db, { lo, hi, }
input
.pipe $ ( phrase, send ) =>
idx += +1
T.eq phrase, matchers[ idx ]
.pipe D.$on_end ( end ) => end(); done()
#-----------------------------------------------------------------------------------------------------------
@[ "read POS phrases (2)" ] = ( T, done ) ->
probes_idx = 0
idx = -1
count = 0
#.........................................................................................................
matchers = [
[ 'pos', 'guide/uchr/has', '八', '𧷟', 0, ]
[ 'pos', 'guide/uchr/has', '刀', '𧷟', 1, ]
[ 'pos', 'guide/uchr/has', '宀', '𧷟', 2, ]
[ 'pos', 'guide/uchr/has', '貝', '𧷟', 4, ]
[ 'pos', 'guide/uchr/has', '', '𧷟', 3, ]
]
#.........................................................................................................
step ( resume ) =>
yield @_feed_test_data db, probes_idx, resume
prefix = [ 'pos', 'guide/uchr/has', ]
input = HOLLERITH.create_phrasestream db, { prefix, }
settings = { indexed: no, }
input
.pipe $ ( phrase, send ) =>
debug '©DsAfY', rpr phrase
count += +1
idx += +1
T.eq phrase, matchers[ idx ]
.pipe D.$on_end ( end ) =>
T.eq count, matchers.length
end()
done()
#-----------------------------------------------------------------------------------------------------------
@[ "read SPO phrases" ] = ( T, done ) ->
debug '©Rsoxb', db[ '%self' ].isOpen()
probes_idx = 0
idx = -1
count = 0
#.........................................................................................................
matchers = [
[ 'spo', '𧷟', 'cp/cid', 163295 ]
[ 'spo', '𧷟', 'guide/lineup/length', 5 ]
[ 'spo', '𧷟', 'guide/uchr/has', [ '八', '刀', '宀', '', '貝' ] ]
[ 'spo', '𧷟', 'rank/cjt', 5432 ]
]
#.........................................................................................................
step ( resume ) =>
yield @_feed_test_data db, probes_idx, resume
prefix = [ 'spo', '𧷟', ]
input = HOLLERITH.create_phrasestream db, { prefix, }
input
.pipe $ ( phrase, send ) =>
debug '©DsAfY', rpr phrase
count += +1
idx += +1
T.eq phrase, matchers[ idx ]
.pipe D.$on_end ( end ) =>
T.eq count, matchers.length
end()
done()
#-----------------------------------------------------------------------------------------------------------
@[ "sorting (1)" ] = ( T, done ) ->
step ( resume ) =>
settings =
db: leveldown
keyEncoding: 'binary'
leveldb = levelup '/tmp/hollerith2-test', settings
yield clear_leveldb leveldb, resume
probes = [
'a'
'ab'
'abc'
'abc\x00'
'abc\x00a'
'abca'
'abcb'
'abcc'
'abcd'
'abcde'
'abcdef'
'abcdefg' ]
matchers = [
new Buffer [ 0x61, ]
new Buffer [ 0x61, 0x62, ]
new Buffer [ 0x61, 0x62, 0x63, ]
new Buffer [ 0x61, 0x62, 0x63, 0x00, ]
new Buffer [ 0x61, 0x62, 0x63, 0x00, 0x61, ]
new Buffer [ 0x61, 0x62, 0x63, 0x61, ]
new Buffer [ 0x61, 0x62, 0x63, 0x62, ]
new Buffer [ 0x61, 0x62, 0x63, 0x63, ]
new Buffer [ 0x61, 0x62, 0x63, 0x64, ]
new Buffer [ 0x61, 0x62, 0x63, 0x64, 0x65, ]
new Buffer [ 0x61, 0x62, 0x63, 0x64, 0x65, 0x66, ]
new Buffer [ 0x61, 0x62, 0x63, 0x64, 0x65, 0x66, 0x67, ] ]
CND.shuffle probes
for probe in probes
probe_bfr = new Buffer probe, 'utf-8'
yield leveldb.put probe_bfr, '1', resume
probe_bfrs = yield read_all_keys leveldb, resume
probe_bfrs = yield read_all_keys leveldb, resume
# debug '©RXPvv', '\n' + rpr probe_bfrs
for probe_bfr, probe_idx in probe_bfrs
matcher = matchers[ probe_idx ]
### TAINT looks like `T.eq buffer1, buffer2` doesn't work---sometimes... ###
# T.eq probe_bfr, matcher
T.ok probe_bfr.equals matcher
leveldb.close -> done()
#-----------------------------------------------------------------------------------------------------------
@[ "sorting (2)" ] = ( T, done ) ->
### This test is here because there seemed to occur some strange ordering issues when
using memdown instead of leveldown ###
step ( resume ) =>
settings =
db: leveldown
keyEncoding: 'binary'
leveldb = levelup '/tmp/hollerith2-test', settings
yield clear_leveldb leveldb, resume
probes = [
new Buffer [ 0x00, ]
new Buffer [ 0x01, ]
new Buffer [ 0x02, ]
new Buffer [ 0x03, ]
new Buffer [ 0xf9, ]
new Buffer [ 0xfa, ]
new Buffer [ 0xfb, ]
new Buffer [ 0xfc, ]
new Buffer [ 0xfd, ]
]
matchers = ( probe for probe in probes )
CND.shuffle probes
for probe in probes
yield leveldb.put probe, '1', resume
probe_bfrs = yield read_all_keys leveldb, resume
for probe_bfr, probe_idx in probe_bfrs
matcher = matchers[ probe_idx ]
# debug '©15060', probe_idx, probe_bfr, matcher
### TAINT looks like `T.eq buffer1, buffer2` doesn't work---sometimes... ###
T.ok probe_bfr.equals matcher
leveldb.close -> done()
#-----------------------------------------------------------------------------------------------------------
@[ "H2 codec `encode` throws on anything but a list" ] = ( T, done ) ->
T.throws "expected a list, got a text", ( -> CODEC.encode 'unaccaptable' )
T.throws "expected a list, got a number", ( -> CODEC.encode 42 )
T.throws "expected a list, got a boolean", ( -> CODEC.encode true )
T.throws "expected a list, got a boolean", ( -> CODEC.encode false )
T.throws /^expected a list, got a (?:js)?undefined$/, ( -> CODEC.encode() )
done()
#-----------------------------------------------------------------------------------------------------------
@[ "sort texts with H2 codec (1)" ] = ( T, done ) ->
step ( resume ) =>
settings =
db: leveldown
keyEncoding: 'binary'
leveldb = levelup '/tmp/hollerith2-test', settings
yield clear_leveldb leveldb, resume
probes = [
'a'
'ab'
'abc'
'abc\x00'
'abc\x00a'
'abca'
'abca\x00'
'abcb'
'abcc'
'abcd'
'abcde'
'abcdef'
'abcdefg'
]
matchers = ( [ probe, ] for probe in probes )
CND.shuffle probes
for probe in probes
yield leveldb.put ( CODEC.encode [ probe, ] ), '1', resume
probe_bfrs = yield read_all_keys leveldb, resume
probes = ( CODEC.decode probe_bfr for probe_bfr in probe_bfrs )
show_keys_and_key_bfrs probes, probe_bfrs
for probe, probe_idx in probes
matcher = matchers[ probe_idx ]
T.eq probe, matcher
leveldb.close -> done()
#-----------------------------------------------------------------------------------------------------------
@[ "sort texts with H2 codec (2)" ] = ( T, done ) ->
step ( resume ) =>
settings =
db: leveldown
keyEncoding: 'binary'
leveldb = levelup '/tmp/hollerith2-test', settings
yield clear_leveldb leveldb, resume
probes = [
''
' '
'a'
'abc'
'一'
'一二'
'一二三'
'三'
'二'
'𠀀'
'𠀀\x00'
'𠀀a'
'𪜀'
'𫝀'
String.fromCodePoint 0x10ffff
]
matchers = ( [ probe, ] for probe in probes )
CND.shuffle probes
for probe in probes
probe_bfr = CODEC.encode [ probe, ]
yield leveldb.put probe_bfr, '1', resume
probe_bfrs = yield read_all_keys leveldb, resume
# debug '©Fd5iw', probe_bfrs
probes = ( CODEC.decode probe_bfr for probe_bfr in probe_bfrs )
show_keys_and_key_bfrs probes, probe_bfrs
for probe, probe_idx in probes
matcher = matchers[ probe_idx ]
T.eq probe, matcher
leveldb.close -> done()
#-----------------------------------------------------------------------------------------------------------
@[ "sort numbers with H2 codec (1)" ] = ( T, done ) ->
step ( resume ) =>
settings =
db: leveldown
keyEncoding: 'binary'
leveldb = levelup '/tmp/hollerith2-test', settings
yield clear_leveldb leveldb, resume
probes_and_descriptions = [
[ -Infinity, "-Infinity" ]
[ -Number.MAX_VALUE, "-Number.MAX_VALUE" ]
[ Number.MIN_SAFE_INTEGER, "Number.MIN_SAFE_INTEGER" ]
[ -123456789, "-123456789" ]
[ -3, "-3" ]
[ -2, "-2" ]
[ -1.5, "-1.5" ]
[ -1, "-1" ]
[ -Number.EPSILON, "-Number.EPSILON" ]
[ -Number.MIN_VALUE, "-Number.MIN_VALUE" ]
[ 0, "0" ]
[ +Number.MIN_VALUE, "+Number.MIN_VALUE" ]
[ +Number.EPSILON, "+Number.EPSILON" ]
[ +1, "+1" ]
[ +1.5, "+1.5" ]
[ +2, "+2" ]
[ +3, "+3" ]
[ +123456789, "+123456789" ]
[ Number.MAX_SAFE_INTEGER, "Number.MAX_SAFE_INTEGER" ]
[ Number.MAX_VALUE, "Number.MAX_VALUE" ]
[ +Infinity, "+Infinity" ]
]
# probes_and_descriptions.sort ( a, b ) ->
# return +1 if a[ 0 ] > b[ 0 ]
# return -1 if a[ 0 ] < b[ 0 ]
# return 0
matchers = ( [ pad[ 0 ], ] for pad in probes_and_descriptions )
# descriptions = ( [ pad[ 1 ], ] for pad in probes_and_descriptions )
for pad in probes_and_descriptions
urge pad
CND.shuffle probes_and_descriptions
for [ probe, _, ] in probes_and_descriptions
probe_bfr = CODEC.encode [ probe, ]
yield leveldb.put probe_bfr, '1', resume
probe_bfrs = yield read_all_keys leveldb, resume
probes = ( CODEC.decode probe_bfr for probe_bfr in probe_bfrs )
show_keys_and_key_bfrs probes, probe_bfrs
for probe, probe_idx in probes
matcher = matchers[ probe_idx ]
T.eq probe, matcher
leveldb.close -> done()
#-----------------------------------------------------------------------------------------------------------
@[ "sort mixed values with H2 codec" ] = ( T, done ) ->
step ( resume ) =>
settings =
db: leveldown
keyEncoding: 'binary'
leveldb = levelup '/tmp/hollerith2-test', settings
yield clear_leveldb leveldb, resume
probes = [
null
false
true
CODEC[ 'sentinels' ][ 'firstdate' ]
new Date 0
new Date 8e11
new Date()
CODEC[ 'sentinels' ][ 'lastdate' ]
1234
Infinity
''
'一'
'三'
'二'
'𠀀'
'𠀀\x00'
String.fromCodePoint 0x10ffff
]
matchers = ( [ probe, ] for probe in probes )
CND.shuffle probes
for probe in probes
debug '©oMXJZ', probe
probe_bfr = CODEC.encode [ probe, ]
yield leveldb.put probe_bfr, '1', resume
probe_bfrs = yield read_all_keys leveldb, resume
# debug '©Fd5iw', probe_bfrs
probes = ( CODEC.decode probe_bfr for probe_bfr in probe_bfrs )
show_keys_and_key_bfrs probes, probe_bfrs
for probe, probe_idx in probes
matcher = matchers[ probe_idx ]
T.eq probe, matcher
leveldb.close -> done()
#-----------------------------------------------------------------------------------------------------------
@[ "sort lists of mixed values with H2 codec" ] = ( T, done ) ->
step ( resume ) =>
settings =
db: leveldown
keyEncoding: 'binary'
leveldb = levelup '/tmp/hollerith2-test', settings
yield clear_leveldb leveldb, resume
probes = [
[ "", '', ]
[ "1234", 1234, ]
[ "Infinity", Infinity, ]
[ "String.fromCodePoint 0x10ffff", String.fromCodePoint 0x10ffff ]
[ "false", false, ]
[ "new Date 0", new Date 0, ]
[ "new Date 8e11", new Date 8e11, ]
[ "new Date()", new Date(), ]
[ "null", null, ]
[ "true", true, ]
[ "一", '一', ]
[ "三", '三', ]
[ "二", '二', ]
[ "𠀀", '𠀀', ]
[ "𠀀\x00", '𠀀\x00', ]
]
matchers = ( probe for probe in probes )
CND.shuffle probes
for probe in probes
debug '©oMXJZ', probe
probe_bfr = CODEC.encode probe
yield leveldb.put probe_bfr, '1', resume
probe_bfrs = yield read_all_keys leveldb, resume
# debug '©Fd5iw', probe_bfrs
probes = ( CODEC.decode probe_bfr for probe_bfr in probe_bfrs )
show_keys_and_key_bfrs probes, probe_bfrs
for probe, probe_idx in probes
matcher = matchers[ probe_idx ]
T.eq probe, matcher
leveldb.close -> done()
#-----------------------------------------------------------------------------------------------------------
@[ "ensure `Buffer.compare` gives same sorting as LevelDB" ] = ( T, done ) ->
step ( resume ) =>
settings =
db: leveldown
keyEncoding: 'binary'
leveldb = levelup '/tmp/hollerith2-test', settings
yield clear_leveldb leveldb, resume
probes = [
# { x: 1234.5678 }
[ "", '', ]
[ "1234", 1234, ]
[ "Infinity", Infinity, ]
[ "String.fromCodePoint 0x10ffff", String.fromCodePoint 0x10ffff ]
[ "false", false, ]
[ "new Date 0", new Date 0, ]
[ "new Date 8e11", new Date 8e11, ]
[ "new Date()", new Date(), ]
[ "null", null, ]
[ "true", true, ]
[ "一", '一', ]
[ "三", '三', ]
[ "二", '二', ]
[ "𠀀", '𠀀', ]
[ "𠀀\x00", '𠀀\x00', ]
]
CND.shuffle probes
for probe in probes
probe_bfr = CODEC.encode probe
yield leveldb.put probe_bfr, '1', resume
probe_bfrs = yield read_all_keys leveldb, resume
last_probe_bfr = null
for probe_bfr in probe_bfrs
if last_probe_bfr?
T.eq ( Buffer.compare last_probe_bfr, probe_bfr ), -1
last_probe_bfr = probe_bfr
leveldb.close -> done()
#-----------------------------------------------------------------------------------------------------------
@[ "sort routes with values (1)" ] = ( T, done ) ->
step ( resume ) =>
settings =
db: leveldown
keyEncoding: 'binary'
leveldb = levelup '/tmp/hollerith2-test', settings
yield clear_leveldb leveldb, resume
probes = [
[ 'pos', 'strokeorder', '352513553254', '𤿯', ]
[ 'pos', 'strokeorder', '3525141121', '𠴦', ]
[ 'pos', 'strokeorder', '35251454', '𨒡', ]
[ 'pos', 'strokeorder', '3525152', '邭', ]
[ 'pos', 'strokeorder', '352515251115115113541', '𪚫', ]
[ 'pos', 'strokeorder', '35251525112511511', '𪚧', ]
[ 'pos', 'strokeorder', '352515251214251214', '𧑴', ]
[ 'pos', 'strokeorder', '3525153', '劬', ]
[ 'pos', 'strokeorder', '3525153\x00', '劬', ]
[ 'pos', 'strokeorder\x00', '352513553254', '𤿯', ]
]
matchers = ( probe for probe in probes )
CND.shuffle probes
for probe in probes
probe_bfr = CODEC.encode probe
yield leveldb.put probe_bfr, '1', resume
probe_bfrs = yield read_all_keys leveldb, resume
# debug '©Fd5iw', probe_bfrs
probes = ( CODEC.decode probe_bfr for probe_bfr in probe_bfrs )
show_keys_and_key_bfrs probes, probe_bfrs
for probe, probe_idx in probes
matcher = matchers[ probe_idx ]
T.eq probe, matcher
leveldb.close -> done()
#-----------------------------------------------------------------------------------------------------------
@[ "sort routes with values (2)" ] = ( T, done ) ->
step ( resume ) =>
settings =
db: leveldown
keyEncoding: 'binary'
leveldb = levelup '/tmp/hollerith2-test', settings
yield clear_leveldb leveldb, resume
probes = [
[ 'a', null, ]
[ 'a', false, ]
[ 'a', true, ]
[ 'a', new Date(), ]
[ 'a', -Infinity, ]
[ 'a', +1234, ]
[ 'a', +Infinity, ]
[ 'a', 'b', ]
[ 'a', 'b\x00', ]
[ 'a\x00', +1234, ]
[ 'a\x00', 'b', ]
[ 'aa', +1234, ]
[ 'aa', 'b', ]
[ 'aa', 'b\x00', ]
]
matchers = ( probe for probe in probes )
CND.shuffle probes
for probe in probes
probe_bfr = CODEC.encode probe
yield leveldb.put probe_bfr, '1', resume
probe_bfrs = yield read_all_keys leveldb, resume
# debug '©Fd5iw', probe_bfrs
probes = ( CODEC.decode probe_bfr for probe_bfr in probe_bfrs )
show_keys_and_key_bfrs probes, probe_bfrs
for probe, probe_idx in probes
matcher = matchers[ probe_idx ]
T.eq probe, matcher
leveldb.close -> done()
#-----------------------------------------------------------------------------------------------------------
@[ "read sample data" ] = ( T, done ) ->
probes_idx = 2
idx = -1
step ( resume ) =>
debug '©bUJhI', 'XX'
yield @_feed_test_data db, probes_idx, resume
debug '©PRzA5', 'XX'
input = db[ '%self' ].createReadStream()
input
.pipe D.$show()
.pipe $ ( { key, value, }, send ) => send [ key, value, ]
.pipe $ ( [ key, value, ], send ) => send [ key, value, ] unless HOLLERITH._is_meta db, key
.pipe $ ( [ key, value, ], send ) =>
# debug '©RluhF', ( HOLLERITH.CODEC.decode key ), ( JSON.parse value )
send [ key, value, ]
.pipe D.$collect()
.pipe $ ( facets, send ) =>
# debug '©54IKt', facets
help '\n' + HOLLERITH.DUMP.rpr_of_facets db, facets
buffer = new Buffer JSON.stringify [ '开', '彡' ]
debug '©GJfL6', HOLLERITH.CODEC.rpr_of_buffer buffer
.pipe D.$on_end => done()
#.........................................................................................................
return null
#-----------------------------------------------------------------------------------------------------------
@[ "read and write keys with lists" ] = ( T, done ) ->
probes_idx = 0
idx = -1
count = 0
probes = [
[ 'a', 1, ]
[ 'a', [], ]
[ 'a', [ 1, ], ]
[ 'a', [ true, ], ]
[ 'a', [ 'x', 'y', 'b', ], ]
[ 'a', [ 120, 1 / 3, ], ]
[ 'a', [ 'x', ], ]
]
matchers = ( probe for probe in probes )
#.........................................................................................................
for probe, probe_idx in probes
buffer = HOLLERITH.CODEC.encode probe
result = HOLLERITH.CODEC.decode buffer
T.eq result, matchers[ probe_idx ]
#.........................................................................................................
done()
#-----------------------------------------------------------------------------------------------------------
@[ "encode keys with list elements" ] = ( T, done ) ->
probes = [
[ 'foo', 'bar', ]
[ 'foo', [ 'bar', ], ]
[ [], 'bar', ]
[ 'foo', [], ]
[ [ 'foo', ], 'bar', ]
[ [ 42, ], 'bar', ]
[ 'foo', [ 42, ] ]
]
for probe in probes
T.eq probe, HOLLERITH.CODEC.decode HOLLERITH.CODEC.encode probe
done()
#-----------------------------------------------------------------------------------------------------------
@[ "read and write phrases with unanalyzed lists" ] = ( T, done ) ->
# ### !!!!!!!!!!!!!!!!!!!!!! ###
# warn "skipped"
# return done()
# ### !!!!!!!!!!!!!!!!!!!!!! ###
idx = -1
count = 0
#.........................................................................................................
probes = [
[ 'probe#00', 'some-predicate', [], ]
[ 'probe#01', 'some-predicate', [ -1 ], ]
[ 'probe#02', 'some-predicate', [ 0 ], ]
[ 'probe#03', 'some-predicate', [ 1 ], ]
[ 'probe#04', 'some-predicate', [ 2 ], ]
[ 'probe#05', 'some-predicate', [ 2, -1, ], ]
[ 'probe#06', 'some-predicate', [ 2, 0, ], ]
[ 'probe#07', 'some-predicate', [ 2, 1, ], ]
[ 'probe#08', 'some-predicate', [ 2, 1, 0 ], ]
[ 'probe#09', 'some-predicate', [ 2, 2, ], ]
[ 'probe#10', 'some-predicate', [ 2, [ 2, ], ], ]
[ 'probe#11', 'some-predicate', [ 3 ], ]
]
#.........................................................................................................
write_probes = ( handler ) =>
step ( resume ) =>
yield HOLLERITH.clear db, resume
input = D.create_throughstream()
input
# .pipe ( [ sbj, prd, obj, ], send ) =>
# if prd is 'some-predicate' # always the case in this example
# obj
.pipe HOLLERITH.$write db, solids: [ 'some-predicate', ]
.pipe D.$on_end =>
urge "test data written"
handler()
#.....................................................................................................
input.write probe for probe in probes
input.end()
#.........................................................................................................
step ( resume ) =>
#.......................................................................................................
yield write_probes resume
input = HOLLERITH.create_phrasestream db
debug '©FphJK', input[ '%meta' ]
input
.pipe $ ( phrase, send ) =>
count += +1
idx += +1
# debug '©Sc5FG', phrase
# T.eq phrase, matchers[ idx ]
.pipe D.$on_end =>
# T.eq count, matchers.length
done()
#-----------------------------------------------------------------------------------------------------------
@[ "read partial POS phrases" ] = ( T, done ) ->
# ### !!!!!!!!!!!!!!!!!!!!!! ###
# warn "skipped"
# return done()
# ### !!!!!!!!!!!!!!!!!!!!!! ###
probes_idx = 4
idx = -1
count = 0
#.........................................................................................................
matchers = [
[ 'pos', 'guide', 'xxx', '𧷟1' ]
[ 'pos', 'guide/', 'yyy', '𧷟1' ]
[ 'pos', 'guide/lineup/length', 1, '𧷟1', ]
[ 'pos', 'guide/lineup/length', 2, '𧷟2', ]
[ 'pos', 'guide/lineup/length', 3, '𧷟3', ]
[ 'pos', 'guide/lineup/length', 4, '𧷟4', ]
[ 'pos', 'guide/lineup/length', 5, '𧷟', ]
[ 'pos', 'guide/lineup/length', 6, '𧷟6', ]
[ 'pos', 'guide/uchr/has', '八', '𧷟', 0 ]
[ 'pos', 'guide/uchr/has', '刀', '𧷟', 1 ]
[ 'pos', 'guide/uchr/has', '宀', '𧷟', 2 ]
[ 'pos', 'guide/uchr/has', '貝', '𧷟', 4 ]
[ 'pos', 'guide/uchr/has', '', '𧷟', 3 ]
]
#.........................................................................................................
step ( resume ) =>
yield @_feed_test_data db, probes_idx, resume
# prefix = [ 'pos', 'guide', ]
prefix = [ 'pos', 'guide', ]
input = HOLLERITH.create_phrasestream db, { prefix, star: '*', }
# input = HOLLERITH.create_phrasestream db, { prefix, }
debug '©FphJK', input[ '%meta' ]
settings = { indexed: no, }
input
.pipe $ ( phrase, send ) =>
count += +1
idx += +1
debug '©Sc5FG', phrase
T.eq phrase, matchers[ idx ]
.pipe D.$on_end =>
T.eq count, matchers.length
done()
#-----------------------------------------------------------------------------------------------------------
@[ "read single phrases (1)" ] = ( T, done ) ->
probes_idx = 4
matcher = [ 'spo', '𧷟', 'guide/lineup/length', 5 ]
#.........................................................................................................
step ( resume ) =>
yield @_feed_test_data db, probes_idx, resume
# prefix = [ 'pos', 'guide', ]
prefix = [ 'spo', '𧷟', 'guide/lineup/length', ]
query = { prefix, star: '*', }
input = HOLLERITH.read_one_phrase db, query, ( error, phrase ) ->
throw error if error?
debug '©61ENl', phrase
T.eq phrase, matcher
done()
#-----------------------------------------------------------------------------------------------------------
@[ "read single phrases (2)" ] = ( T, done ) ->
probes_idx = 4
matcher = [ 'spo', '𧷟', 'guide/lineup/length', 5 ]
#.........................................................................................................
step ( resume ) =>
yield @_feed_test_data db, probes_idx, resume
prefix = [ 'spo', '𧷟', 'guide/lineup/length', ]
query = { prefix, star: '*', fallback: 'not to be used', }
input = HOLLERITH.read_one_phrase db, query, ( error, phrase ) ->
throw error if error?
debug '©61ENl', phrase
T.eq phrase, matcher
done()
#-----------------------------------------------------------------------------------------------------------
@[ "read single phrases (3)" ] = ( T, done ) ->
probes_idx = 4
matcher = "expected 1 phrase, got 0"
#.........................................................................................................
step ( resume ) =>
yield @_feed_test_data db, probes_idx, resume
prefix = [ 'spo', '中', 'guide/lineup/length', ]
query = { prefix, star: '*', }
input = HOLLERITH.read_one_phrase db, query, ( error, phrase ) ->
throw new Error "expected error" unless error?
T.eq error[ 'message' ], matcher
done()
#-----------------------------------------------------------------------------------------------------------
@[ "read single phrases (4)" ] = ( T, done ) ->
probes_idx = 4
matcher = "this entry is missing"
#.........................................................................................................
step ( resume ) =>
yield @_feed_test_data db, probes_idx, resume
prefix = [ 'spo', '中', 'guide/lineup/length', ]
query = { prefix, star: '*', fallback: matcher, }
input = HOLLERITH.read_one_phrase db, query, ( error, phrase ) ->
throw error if error?
T.eq phrase, matcher
done()
#-----------------------------------------------------------------------------------------------------------
@[ "writing phrases with non-unique keys fails" ] = ( T, done ) ->
alert """test case "writing phrases with non-unique keys fails" to be written"""
done()
#-----------------------------------------------------------------------------------------------------------
@[ "reminders" ] = ( T, done ) ->
alert "H.$write() must test for repeated keys"
done()
#-----------------------------------------------------------------------------------------------------------
@[ "invalid key not accepted (1)" ] = ( T, done ) ->
domain = ( require 'domain' ).create()
domain.on 'error', ( error ) ->
# debug '©AOSmn', JSON.stringify error[ 'message' ]
T.eq error[ 'message' ], "invalid SPO key, must be list: 'xxx'"
later done
domain.run ->
input = D.create_throughstream()
input
.pipe HOLLERITH.$write db
.pipe D.$on_end ->
# T.fail "should throw error"
later done
input.write 'xxx'
input.end()
#-----------------------------------------------------------------------------------------------------------
@[ "invalid key not accepted (2)" ] = ( T, done ) ->
domain = ( require 'domain' ).create()
domain.on 'error', ( error ) ->
# debug '©AOSmn', JSON.stringify error[ 'message' ]
T.eq error[ 'message' ], "invalid SPO key, must be of length 3: [ 'foo' ]"
done()
domain.run ->
input = D.create_throughstream()
input.pipe HOLLERITH.$write db
input.write [ 'foo', ]
#-----------------------------------------------------------------------------------------------------------
@[ "catching errors (2)" ] = ( T, done ) ->
run = ( method, handler ) ->
domain = ( require 'domain' ).create()
domain.on 'error', ( error ) ->
handler error
domain.run ->
method()
#.........................................................................................................
f = ->
input = D.create_throughstream()
input
.pipe HOLLERITH.$write db
.pipe D.$on_end ->
later done
input.write [ 'foo', 'bar', 'baz', ]
input.end()
run f, ( error ) ->
debug '©WaXJV', JSON.stringify error[ 'message' ]
T.eq true, false
done()
#-----------------------------------------------------------------------------------------------------------
@[ "catching errors (1)" ] = ( T, done ) ->
#.........................................................................................................
d = D.run ->
input = D.create_throughstream()
input
.pipe HOLLERITH.$write db
.pipe D.$on_end ->
later done
input.write [ 'foo', 'bar', 'baz', 'gnu', ]
input.end()
, ( error ) ->
T.eq error[ 'message' ], "invalid SPO key, must be of length 3: [ 'foo', 'bar', 'baz', 'gnu' ]"
later done
#-----------------------------------------------------------------------------------------------------------
@[ "catching errors (2)" ] = ( T, done ) ->
message = "should not produce error"
#.........................................................................................................
d = D.run ->
input = D.create_throughstream()
input
.pipe HOLLERITH.$write db
.pipe D.$on_end ->
T.succeed message
later done
input.write [ 'foo', 'bar', 'baz', ]
input.end()
, ( error ) ->
T.fail message
later done
#-----------------------------------------------------------------------------------------------------------
@[ "building PODs from SPO phrases" ] = ( T, done ) ->
probes_idx = 4
idx = -1
count = 0
# #.........................................................................................................
# matchers = [
# [ 'spo', '𧷟', 'cp/cid', 163295 ]
# [ 'spo', '𧷟', 'guide/lineup/length', 5 ]
# [ 'spo', '𧷟', 'guide/uchr/has', [ '八', '刀', '宀', '', '貝' ] ]
# [ 'spo', '𧷟', 'rank/cjt', 5432 ]
# ]
#.........................................................................................................
$shorten_spo = ->
return $ ( phrase, send ) =>
unless ( CND.isa_list phrase ) and phrase[ 0 ] is 'spo'
return send.error new Error "not an SPO phrase: #{rpr phrase}"
spo = phrase[ 1 .. ]
### TAINT repeated validation? ###
HOLLERITH.validate_spo spo
send spo
#.........................................................................................................
$consolidate = ->
last_sbj = null
pod = null
return $ ( spo, send, end ) =>
if spo?
### TAINT repeated validation? ###
HOLLERITH.validate_spo spo
[ sbj, prd, obj, ] = spo
#...................................................................................................
if sbj is last_sbj
pod[ prd ] = obj
#...................................................................................................
else
if pod?
### TAINT implicit key `pod` ###
send [ last_sbj, 'pod', pod, ]
pod = '%sbj': sbj
pod[ prd ] = obj
last_sbj = sbj
#...................................................................................................
# send spo
#.....................................................................................................
if end?
send [ last_sbj, 'pod', pod, ] if last_sbj?
end()
#.........................................................................................................
step ( resume ) =>
yield @_feed_test_data db, probes_idx, resume
prefix = [ 'spo', ]
input = HOLLERITH.create_phrasestream db, { prefix, }
input
.pipe $shorten_spo()
.pipe $consolidate()
.pipe D.$show()
.pipe HOLLERITH.$write db
.pipe D.$on_end done
###
#-----------------------------------------------------------------------------------------------------------
@[ "keep ordering and completeness in asynchronous streams" ] = ( T, T_done ) ->
step ( resume ) =>
idx = 0
input_A = D.create_throughstream()
#.......................................................................................................
input_B = input_A
.pipe D.$stop_time "keep ordering and completeness in asynchronous streams"
.pipe $async ( data, done ) ->
dt = CND.random_number 0.5, 1.5
# debug '©WscFi', data, dt
after dt, =>
warn "send #{rpr data}"
done data
.pipe $ ( data, send ) ->
help "read #{rpr data}"
T.eq data, idx
idx += +1
send data
.pipe D.$on_end =>
T_done()
#.......................................................................................................
write = ->
for n in [ 0 .. 10 ]
# help "write #{n}"
input_A.write n
yield after 0.1, resume
input_A.end()
#.......................................................................................................
write()
###
#-----------------------------------------------------------------------------------------------------------
@[ "read phrases in lockstep" ] = ( T, done ) ->
probes_idx = 2
#.........................................................................................................
step ( resume ) =>
yield @_feed_test_data db, probes_idx, resume
input_1 = HOLLERITH.create_phrasestream db, { prefix: [ 'pos', 'strokecount' ], }
input_2 = HOLLERITH.create_phrasestream db, { prefix: [ 'pos', 'componentcount' ], }
input_3 = HOLLERITH.create_phrasestream db, { prefix: [ 'pos', 'components' ], }
input_1
.pipe D.$lockstep input_2, fallback: null
.pipe D.$lockstep input_3, fallback: null
.pipe $ ( data, send ) => help JSON.stringify data; send data
.pipe D.$on_end done
#-----------------------------------------------------------------------------------------------------------
@[ "has_any yields existence of key" ] = ( T, done ) ->
probes_idx = 2
probes_and_matchers = [
[ [ 'spo', '形', 'strokecount', ], true, ]
[ [ 'spo', '丁', 'componentcount', ], true, ]
[ [ 'spo', '三', 'componentcount', ], true, ]
[ [ 'spo', '夫', 'componentcount', ], true, ]
[ [ 'spo', '國', 'componentcount', ], true, ]
[ [ 'spo', '形', 'componentcount', ], true, ]
[ [ 'spo', '丁', 'components', ], true, ]
[ [ 'spo', '丁', 'xxxx', ], false, ]
[ [ 'spo', '丁', ], true, ]
[ [ 'spo', ], true, ]
[ [ 'xxx', ], false, ]
]
#.........................................................................................................
step ( resume ) =>
yield @_feed_test_data db, probes_idx, resume
for [ probe, matcher, ] in probes_and_matchers
T.eq matcher, yield HOLLERITH.has_any db, { prefix: probe, }, resume
done()
#-----------------------------------------------------------------------------------------------------------
@[ "$write rejects duplicate S/P pairs" ] = ( T, done ) ->
probes_idx = 2
#.........................................................................................................
step ( resume ) =>
yield @_feed_test_data db, probes_idx, resume
#.......................................................................................................
try_writing = ->
input = D.create_throughstream()
#.....................................................................................................
input
.pipe D.$show()
.pipe HOLLERITH.$write db
.pipe D.$on_end ->
T.fail "should never be called"
done()
#.....................................................................................................
input.write [ '形', 'strokecount', 1234, ]
input.end()
#.......................................................................................................
D.run try_writing, ( error ) ->
T.eq "S/P pair already in DB: [ '形', 'strokecount' ]", error[ 'message' ]
done()
#-----------------------------------------------------------------------------------------------------------
@[ "codec accepts long keys" ] = ( T, done ) ->
probes_idx = 2
probes = []
long_text = ( new Array 1025 ).join '#'
# probes.push [ 'foo', long_text, [ long_text, long_text, long_text, long_text, long_text, ], ]
# probes.push [ 'foo', [ long_text, long_text, long_text, long_text, long_text, ],
# [ long_text, long_text, long_text, long_text, long_text, ], ]
# probes.push [ 'foo', [ long_text, long_text, long_text, long_text, long_text, ], ]
probes.push [ 'foo', [ long_text, long_text, long_text, long_text, ], 42, ]
#.........................................................................................................
step ( resume ) =>
yield @_feed_test_data db, probes_idx, resume
#.......................................................................................................
try_writing = ->
input = D.create_throughstream()
#.....................................................................................................
input
# .pipe D.$show()
.pipe HOLLERITH.$write db
.pipe D.$on_end ->
T.eq 1, 1
done()
#.....................................................................................................
for probe in probes
input.write probe
# yield later resume
input.end()
#.......................................................................................................
D.run try_writing, ( error ) ->
T.fail "should not throw error"
warn error
done()
#-----------------------------------------------------------------------------------------------------------
@[ "write private types (1)" ] = ( T, done ) ->
probes_idx = 5
idx = -1
count = 0
#.........................................................................................................
matchers = [
["pos","componentcount",1,"丁"]
["pos","components","丁","丁",0]
["pos","mtime",123456789,{"type":"route","value":"/foo/bar"}]
["pos","strokecount",2,"丁"]
["spo","丁","componentcount",1]
["spo","丁","components",["丁"]]
["spo","丁","strokecount",2]
["spo",{"type":"route","value":"/foo/bar"},"mtime",123456789]
]
#.........................................................................................................
write_data = ( handler ) =>
input = D.create_throughstream()
input
# .pipe D.$show()
.pipe HOLLERITH.$write db
.pipe D.$on_end -> handler()
#.......................................................................................................
for probe in @_feed_test_data.probes[ probes_idx ]
input.write probe
input.end()
#.........................................................................................................
read_data = ( handler ) ->
#.......................................................................................................
input = HOLLERITH.create_phrasestream db
input
# .pipe D.$show()
.pipe $ ( phrase, send ) =>
count += +1
idx += +1
debug '©Sc5FG', JSON.stringify phrase
T.eq phrase, matchers[ idx ]
.pipe D.$on_end -> handler()
#.........................................................................................................
step ( resume ) =>
yield HOLLERITH.clear db, resume
yield write_data resume
yield read_data resume
done()
#-----------------------------------------------------------------------------------------------------------
@[ "write private types (2)" ] = ( T, done ) ->
probes_idx = 5
idx = -1
count = 0
#.........................................................................................................
encoder = ( type, value ) ->
debug '©XXX-encoder', type, rpr value
return value.split '/' if type is 'route'
throw new Error "unknown private type #{rpr type}"
#.........................................................................................................
xdb_route = join __dirname, '..', 'dbs/tests-with-private-types'
#.........................................................................................................
xdb_settings =
size: 500
encoder: encoder
#.........................................................................................................
xdb = HOLLERITH.new_db xdb_route, xdb_settings
#.........................................................................................................
matchers = [
["pos","componentcount",1,"丁"]
["pos","components","丁","丁",0]
["pos","mtime",123456789,{"type":"route","value":["","foo","bar"]}]
["pos","strokecount",2,"丁"]
["spo","丁","componentcount",1]
["spo","丁","components",["丁"]]
["spo","丁","strokecount",2]
["spo",{"type":"route","value":["","foo","bar"]},"mtime",123456789]
]
#.........................................................................................................
write_data = ( handler ) =>
input = D.create_throughstream()
input
# .pipe D.$show()
.pipe HOLLERITH.$write xdb
.pipe D.$on_end -> handler()
#.......................................................................................................
for probe in @_feed_test_data.probes[ probes_idx ]
input.write probe
input.end()
#.........................................................................................................
read_data = ( handler ) ->
#.......................................................................................................
input = HOLLERITH.create_phrasestream xdb
input
# .pipe D.$show()
.pipe $ ( phrase, send ) =>
count += +1
idx += +1
debug '©Sc5FG', JSON.stringify phrase
T.eq phrase, matchers[ idx ]
.pipe D.$on_end -> handler()
#.........................................................................................................
step ( resume ) =>
yield HOLLERITH.clear xdb, resume
yield write_data resume
yield read_data resume
yield xdb[ '%self' ].close resume
done()
#-----------------------------------------------------------------------------------------------------------
@[ "write private types (3)" ] = ( T, done ) ->
probes_idx = 5
idx = -1
count = 0
#.........................................................................................................
encoder = ( type, value ) ->
# debug '©XXX-encoder', type, rpr value
return value.split '/' if type is 'route'
throw new Error "unknown private type #{rpr type}"
#.........................................................................................................
decoder = ( type, value ) ->
# debug '©XXX-decoder', type, rpr value
return value.join '/' if type is 'route'
throw new Error "unknown private type #{rpr type}"
#.........................................................................................................
xdb_route = join __dirname, '..', 'dbs/tests-with-private-types'
#.........................................................................................................
xdb_settings =
size: 500
encoder: encoder
decoder: decoder
#.........................................................................................................
xdb = HOLLERITH.new_db xdb_route, xdb_settings
#.........................................................................................................
matchers = [
["pos","componentcount",1,"丁"]
["pos","components","丁","丁",0]
["pos","mtime",123456789,"/foo/bar"]
["pos","strokecount",2,"丁"]
["spo","丁","componentcount",1]
["spo","丁","components",["丁"]]
["spo","丁","strokecount",2]
["spo","/foo/bar","mtime",123456789]
]
#.........................................................................................................
write_data = ( handler ) =>
input = D.create_throughstream()
input
# .pipe D.$show()
.pipe HOLLERITH.$write xdb
.pipe D.$on_end -> handler()
#.......................................................................................................
for probe in @_feed_test_data.probes[ probes_idx ]
input.write probe
input.end()
#.........................................................................................................
read_data = ( handler ) ->
#.......................................................................................................
input = HOLLERITH.create_phrasestream xdb
input
# .pipe D.$show()
.pipe $ ( phrase, send ) =>
count += +1
idx += +1
urge '©Sc5FG', JSON.stringify phrase
T.eq phrase, matchers[ idx ]
.pipe D.$on_end -> handler()
#.........................................................................................................
step ( resume ) =>
yield HOLLERITH.clear xdb, resume
yield write_data resume
yield read_data resume
yield xdb[ '%self' ].close resume
done()
#-----------------------------------------------------------------------------------------------------------
@[ "bloom filter serialization without writes" ] = ( T, done ) ->
#.........................................................................................................
# step ( resume ) =>
xdb = HOLLERITH.new_db get_new_db_name()
input = HOLLERITH.create_phrasestream xdb
input.pause()
input.pipe HOLLERITH.$write xdb
input.resume()
input.end()
T.ok true
done()
#-----------------------------------------------------------------------------------------------------------
@[ "Pinyin Unicode Sorting" ] = ( T, done ) ->
#.........................................................................................................
write_data = ( handler ) ->
input = D.create_throughstream()
#.......................................................................................................
input
.pipe HOLLERITH.$write db
.pipe D.$on_end ->
handler()
#.......................................................................................................
input.write [ '01', 'reading', 'ā', ]
input.write [ '02', 'reading', 'ɑ̄', ]
input.write [ '03', 'reading', 'ē', ]
input.write [ '04', 'reading', 'ī', ]
input.write [ '05', 'reading', 'ō', ]
input.write [ '06', 'reading', 'ū', ]
input.write [ '07', 'reading', 'ǖ', ]
input.write [ '08', 'reading', 'Ā', ]
input.write [ '09', 'reading', 'Ē', ]
input.write [ '10', 'reading', 'Ī', ]
input.write [ '11', 'reading', 'Ō', ]
input.write [ '12', 'reading', 'Ū', ]
input.write [ '13', 'reading', 'Ǖ', ]
input.write [ '14', 'reading', 'á', ]
input.write [ '15', 'reading', 'ɑ́', ]
input.write [ '16', 'reading', 'é', ]
input.write [ '17', 'reading', 'í', ]
input.write [ '18', 'reading', 'ó', ]
input.write [ '19', 'reading', 'ú', ]
input.write [ '20', 'reading', 'ǘ', ]
input.write [ '21', 'reading', 'Á', ]
input.write [ '22', 'reading', 'É', ]
input.write [ '23', 'reading', 'Í', ]
input.write [ '24', 'reading', 'Ó', ]
input.write [ '25', 'reading', 'Ú', ]
input.write [ '26', 'reading', 'Ǘ', ]
input.write [ '27', 'reading', 'ǎ', ]
input.write [ '28', 'reading', 'ɑ̌', ]
input.write [ '29', 'reading', 'ě', ]
input.write [ '30', 'reading', 'ǐ', ]
input.write [ '31', 'reading', 'ǒ', ]
input.write [ '32', 'reading', 'ǔ', ]
input.write [ '33', 'reading', 'ǚ', ]
input.write [ '34', 'reading', 'Ǎ', ]
input.write [ '35', 'reading', 'Ě', ]
input.write [ '36', 'reading', 'Ǐ', ]
input.write [ '37', 'reading', 'Ǒ', ]
input.write [ '38', 'reading', 'Ǔ', ]
input.write [ '39', 'reading', 'Ǚ', ]
input.write [ '40', 'reading', 'à', ]
input.write [ '41', 'reading', 'ɑ̀', ]
input.write [ '42', 'reading', 'è', ]
input.write [ '43', 'reading', 'ì', ]
input.write [ '44', 'reading', 'ò', ]
input.write [ '45', 'reading', 'ù', ]
input.write [ '46', 'reading', 'ǜ', ]
input.write [ '47', 'reading', 'À', ]
input.write [ '48', 'reading', 'È', ]
input.write [ '49', 'reading', 'Ì', ]
input.write [ '50', 'reading', 'Ò', ]
input.write [ '51', 'reading', 'Ù', ]
input.write [ '52', 'reading', 'Ǜ', ]
input.write [ '53', 'reading', 'a', ]
input.write [ '54', 'reading', 'ɑ', ]
input.write [ '55', 'reading', 'e', ]
input.write [ '56', 'reading', 'i', ]
input.write [ '57', 'reading', 'o', ]
input.write [ '58', 'reading', 'u', ]
input.write [ '59', 'reading', 'ü', ]
input.write [ '60', 'reading', 'A', ]
input.write [ '61', 'reading', 'Ɑ', ]
input.write [ '62', 'reading', 'E', ]
input.write [ '63', 'reading', 'I', ]
input.write [ '64', 'reading', 'O', ]
input.write [ '65', 'reading', 'U', ]
input.write [ '66', 'reading', 'Ü', ]
#.......................................................................................................
input.end()
#.........................................................................................................
show = ( handler ) ->
query = { prefix: [ 'pos', ], star: '*', }
input = HOLLERITH.create_phrasestream db, query
input
.pipe do =>
collector = []
return $ ( phrase, send, end ) =>
if phrase?
[ _, _, letter, _, ] = phrase
collector.push letter
if end?
urge collector = collector.join ''
T.eq collector, 'AEIOUaeiouÀÁÈÉÌÍÒÓÙÚÜàáèéìíòóùúüĀāĒēĚěĪīŌōŪūǍǎǏǐǑǒǓǔǕǖǗǘǙǚǛǜɑɑ̀ɑ́ɑ̄ɑ̌Ɑ'
end()
.pipe D.$observe ( phrase ) =>
info JSON.stringify phrase
.pipe D.$on_end ->
handler()
#.........................................................................................................
step ( resume ) =>
yield clear_leveldb db[ '%self' ], resume
# yield feed_test_data db, probes_idx, resume
yield write_data resume
yield show resume
done()
#-----------------------------------------------------------------------------------------------------------
@[ "use non-string subjects in phrases (1)" ] = ( T, done ) ->
#.........................................................................................................
write_data = ( handler ) ->
input = D.create_throughstream()
#.......................................................................................................
input
.pipe HOLLERITH.$write db
.pipe D.$on_end ->
handler()
#.......................................................................................................
input.write [ '千', 'guide/kwic/v3/sortcode', [ [ [ '0686f---', null ], '千', [], [] ] ], ]
input.write [ '于', 'guide/kwic/v3/sortcode', [ [ [ '0019f---', null ], '于', [], [] ] ], ]
input.write [ '干', 'guide/kwic/v3/sortcode', [ [ [ '0020f---', null ], '干', [], [] ] ], ]
#.......................................................................................................
### Three phrases to register '千 looks similar to both 于 and 干': ###
input.write [ '千', 'shape/similarity', [ '于', '干', ], ]
input.write [ '于', 'shape/similarity', [ '干', '千', ], ]
input.write [ '干', 'shape/similarity', [ '千', '于', ], ]
### The same as the above, experimentally using nested phrases whose subject is itself a phrase: ###
input.write [ [ '千', 'shape/similarity', [ '于', '干', ], ], 'guide/kwic/v3/sortcode', [ [ [ '0686f---', null ], '千', [], [] ] ], ]
input.write [ [ '于', 'shape/similarity', [ '千', '干', ], ], 'guide/kwic/v3/sortcode', [ [ [ '0019f---', null ], '于', [], [] ] ], ]
input.write [ [ '干', 'shape/similarity', [ '千', '于', ], ], 'guide/kwic/v3/sortcode', [ [ [ '0020f---', null ], '干', [], [] ] ], ]
#.......................................................................................................
### Two sub-factorial renderings of 千 as 亻一 and 丿十: ###
input.write [ '亻', 'guide/kwic/v3/sortcode', [ [ [ '0774f---', null ], '亻', [], [] ] ], ]
input.write [ '一', 'guide/kwic/v3/sortcode', [ [ [ '0000f---', null ], '一', [], [] ] ], ]
input.write [ '丿', 'guide/kwic/v3/sortcode', [ [ [ '0645f---', null ], '丿', [], [] ] ], ]
input.write [ '十', 'guide/kwic/v3/sortcode', [ [ [ '0104f---', null ], '十', [], [] ] ], ]
input.write [
[ '千', 'guide/lineup/uchr', '亻一', ], 'guide/kwic/v3/sortcode', [
[ [ '0774f---', '0000f---', null, ], [ '亻', [ '一', ], [] ], ]
[ [ '0000f---', null, '0774f---', ], [ '一', [], [ '亻', ] ], ]
] ]
input.write [
[ '千', 'guide/lineup/uchr', '丿十', ], 'guide/kwic/v3/sortcode', [
[ [ '0645f---', '0104f---', null, ], [ '丿', [ '十', ], [] ], ]
[ [ '0104f---', null, '0645f---', ], [ '十', [], [ '丿', ] ], ]
] ]
#.......................................................................................................
input.end()
#.........................................................................................................
show = ( handler ) ->
input = HOLLERITH.create_phrasestream db
input
.pipe D.$observe ( phrase ) =>
info JSON.stringify phrase
.pipe D.$on_end ->
handler()
#.........................................................................................................
step ( resume ) =>
yield clear_leveldb db[ '%self' ], resume
# yield feed_test_data db, probes_idx, resume
yield write_data resume
yield show resume
done()
#-----------------------------------------------------------------------------------------------------------
@[ "use non-string subjects in phrases (2)" ] = ( T, done ) ->
#.........................................................................................................
write_data = ( handler ) ->
input = D.create_throughstream()
#.......................................................................................................
input
.pipe HOLLERITH.$write db
.pipe D.$on_end ->
handler()
#.......................................................................................................
input.write [ '千', 'reading/py/base', [ 'qian', ], ]
input.write [ '于', 'reading/py/base', [ 'yu', ], ]
input.write [ '干', 'reading/py/base', [ 'gan', ], ]
#.......................................................................................................
### Three phrases to register '千 looks similar to both 于 and 干': ###
input.write [ '千', 'shape/similarity', [ '于', '干', ], ]
input.write [ '于', 'shape/similarity', [ '干', '千', ], ]
input.write [ '干', 'shape/similarity', [ '千', '于', ], ]
### The same as the above, experimentally using nested phrases whose subject is itself a phrase: ###
input.write [ [ '千', 'shape/similarity', [ '于', '干', ], 0, ], 'reading/py/base', [ 'qian', ], ]
input.write [ [ '于', 'shape/similarity', [ '千', '干', ], 0, ], 'reading/py/base', [ 'yu', ], ]
input.write [ [ '干', 'shape/similarity', [ '千', '于', ], 0, ], 'reading/py/base', [ 'gan', ], ]
#.......................................................................................................
input.write [ [ '千', 'reading/py/base', [ 'qian', ], 0, ], 'shape/similarity', [ '于', '干', ], ]
input.write [ [ '于', 'reading/py/base', [ 'yu', ], 0, ], 'shape/similarity', [ '千', '干', ], ]
input.write [ [ '干', 'reading/py/base', [ 'gan', ], 0, ], 'shape/similarity', [ '千', '于', ], ]
#.......................................................................................................
input.end()
#.........................................................................................................
show = ( handler ) ->
input = HOLLERITH.create_phrasestream db
input
.pipe D.$observe ( phrase ) =>
info JSON.stringify phrase
.pipe D.$on_end ->
handler()
#.........................................................................................................
step ( resume ) =>
yield clear_leveldb db[ '%self' ], resume
# yield feed_test_data db, probes_idx, resume
yield write_data resume
yield show resume
done()
#-----------------------------------------------------------------------------------------------------------
@[ "use non-string subjects in phrases (3)" ] = ( T, done ) ->
#.........................................................................................................
write_data = ( handler ) ->
input = D.create_throughstream()
#.......................................................................................................
input
.pipe HOLLERITH.$write db
.pipe D.$on_end ->
handler()
#.......................................................................................................
### Readings for 3 glyphs: ###
input.write [ [ '千', ], 'reading/py/base', [ 'qian', ], ]
input.write [ [ '于', ], 'reading/py/base', [ 'yu', ], ]
input.write [ [ '干', ], 'reading/py/base', [ 'gan', ], ]
#.......................................................................................................
### Three phrases to register '千 looks similar to both 于 and 干': ###
input.write [ [ '千', ], 'shape/similarity', [ '于', '干', ], ]
input.write [ [ '于', ], 'shape/similarity', [ '干', '千', ], ]
input.write [ [ '干', ], 'shape/similarity', [ '千', '于', ], ]
#.......................................................................................................
### The same as the above, experimentally using nested phrases whose subject is itself a phrase: ###
### (1) these will lead from reading to similarity, as in
`["pos","reading/py/base","gan",["干","shape/similarity",["千","于"]],0]`, meaning these phrases
are suitable for building a dictionary organzed by Pinyin readings with cross-references
to similar characters: ###
input.write [ [ '千', 'shape/similarity', [ '于', '干', ], ], 'reading/py/base', [ 'qian', ], ]
input.write [ [ '于', 'shape/similarity', [ '千', '干', ], ], 'reading/py/base', [ 'yu', ], ]
input.write [ [ '干', 'shape/similarity', [ '千', '于', ], ], 'reading/py/base', [ 'gan', ], ]
#.......................................................................................................
### (2) these will lead from similarity to reading, as in
`["pos","shape/similarity","于",["千","reading/py/base",["qian"]],0]` ###
input.write [ [ '千', 'reading/py/base', [ 'qian', ], ], 'shape/similarity', [ '于', '干', ], ]
input.write [ [ '于', 'reading/py/base', [ 'yu', ], ], 'shape/similarity', [ '千', '干', ], ]
input.write [ [ '干', 'reading/py/base', [ 'gan', ], ], 'shape/similarity', [ '千', '于', ], ]
#.......................................................................................................
input.end()
#.........................................................................................................
show = ( handler ) ->
input = HOLLERITH.create_phrasestream db
input
.pipe D.$observe ( phrase ) =>
info JSON.stringify phrase
.pipe D.$on_end ->
handler()
#.........................................................................................................
step ( resume ) =>
yield clear_leveldb db[ '%self' ], resume
# yield feed_test_data db, probes_idx, resume
yield write_data resume
yield show resume
done()
#-----------------------------------------------------------------------------------------------------------
@[ "use non-string subjects in phrases (4)" ] = ( T, done ) ->
#.........................................................................................................
write_data = ( handler ) ->
input = D.create_throughstream()
#.......................................................................................................
input
.pipe HOLLERITH.$write db
.pipe D.$on_end ->
handler()
#.......................................................................................................
### Readings for 3 glyphs: ###
input.write [ [ '千', ], 'reading/py/base', [ 'qian', ], ]
input.write [ [ '于', ], 'reading/py/base', [ 'yu', 'foo', ], ]
input.write [ [ '干', ], 'reading/py/base', [ 'gan', ], ]
input.write [ [ '人', ], 'reading/py/base', [ 'ren', ], ]
#.......................................................................................................
### Three phrases to register '千 looks similar to both 于 and 干': ###
# input.write [ [ '千', ], 'shape/similarity', [ '于', '干', ], ]
# input.write [ [ '于', ], 'shape/similarity', [ '干', '千', ], ]
# input.write [ [ '干', ], 'shape/similarity', [ '千', '于', ], ]
#.......................................................................................................
### The same as the above, experimentally using nested phrases whose subject is itself a phrase: ###
### (1) these will lead from reading to similarity, as in
`["pos","reading/py/base","gan",["干","shape/similarity",["千","于"]],0]`, meaning these phrases
are suitable for building a dictionary organzed by Pinyin readings with cross-references
to similar characters: ###
# input.write [ [ '千', 'shape/similarity', [ '于', '干', ], ], 'reading/py/base', 'qian', ]
# input.write [ [ '于', 'shape/similarity', [ '千', '干', ], ], 'reading/py/base', 'yu', ]
# input.write [ [ '干', 'shape/similarity', [ '千', '于', ], ], 'reading/py/base', 'gan', ]
input.write [ [ '千', 'shape/similarity', '于', ], 'reading/py/base', 'qian', ]
input.write [ [ '千', 'shape/similarity', '干', ], 'reading/py/base', 'qian', ]
input.write [ [ '于', 'shape/similarity', '千', ], 'reading/py/base', 'yu', ]
input.write [ [ '于', 'shape/similarity', '干', ], 'reading/py/base', 'yu', ]
input.write [ [ '干', 'shape/similarity', '千', ], 'reading/py/base', 'gan', ]
input.write [ [ '干', 'shape/similarity', '于', ], 'reading/py/base', 'gan', ]
input.write [ [ '于', 'shape/similarity', '千', 1, ], 'reading/py/base', 'foo', ]
input.write [ [ '于', 'shape/similarity', '干', 2, ], 'reading/py/base', 'foo', ]
#.......................................................................................................
# ### (2) these will lead from similarity to reading, as in
# `["pos","shape/similarity","于",["千","reading/py/base",["qian"]],0]`. These phrases carry the same
# information as the corresponding ones in `use non-string subjects in phrases (3)`, above,
# but here the referenced similarity phrases have singular objects; consequently, subject / predicate
# pairs may be repeated, which is why introducing an index is mandatory. As such, the index
# need not be a number or for meaningful series—it only needs to be unique within the respective
# group: ###
# input.write [ [ '千', 'reading/py/base', [ 'qian', ], 0, ], 'shape/similarity', '于', ]
# input.write [ [ '千', 'reading/py/base', [ 'qian', ], 1, ], 'shape/similarity', '干', ]
# input.write [ [ '于', 'reading/py/base', [ 'yu', ], 0, ], 'shape/similarity', '千', ]
# input.write [ [ '于', 'reading/py/base', [ 'yu', ], 1, ], 'shape/similarity', '干', ]
# input.write [ [ '干', 'reading/py/base', [ 'gan', ], 0, ], 'shape/similarity', '千', ]
# input.write [ [ '干', 'reading/py/base', [ 'gan', ], 1, ], 'shape/similarity', '于', ]
#.......................................................................................................
input.end()
#.........................................................................................................
show = ( handler ) ->
query = { prefix: [ 'pos', ], star: '*', }
input = HOLLERITH.create_phrasestream db #, query
input
.pipe D.$observe ( phrase ) =>
info JSON.stringify phrase
.pipe D.$on_end ->
handler()
#.........................................................................................................
step ( resume ) =>
yield clear_leveldb db[ '%self' ], resume
# yield feed_test_data db, probes_idx, resume
yield write_data resume
yield show resume
done()
#-----------------------------------------------------------------------------------------------------------
@[ "binary indexing" ] = ( T, done ) ->
#.........................................................................................................
$index = ( from_predicate, to_predicate, settings = {} ) =>
from_is_plural = settings[ 'from' ] is 'plural'
to_is_plural = settings[ 'to' ] is 'plural'
from_cache = {}
to_cache = {}
#.......................................................................................................
new_index_phrase = ( tsbj, tprd, tobj, fprd, fobj, tsbj_is_list, idx = 0 ) =>
return [ [ tsbj..., tprd, idx, tobj, ], fprd, fobj, ] if tsbj_is_list
return [ [ tsbj, tprd, idx, tobj, ], fprd, fobj, ]
#.......................................................................................................
link = ( from_phrase, to_phrase ) =>
[ fsbj, fprd, fobj, ] = from_phrase
[ tsbj, tprd, tobj, ] = to_phrase
tsbj_is_list = CND.isa_list tsbj
#.....................................................................................................
unless from_is_plural or to_is_plural
# fs ts
return [ new_index_phrase tsbj, tprd, tobj, fprd, fobj, tsbj_is_list ]
#.....................................................................................................
idx = -1
R = []
if from_is_plural
# fp tp
if to_is_plural
for sub_fobj in fobj
for sub_tobj in tobj
idx += +1
R.push new_index_phrase tsbj, tprd, sub_tobj, fprd, sub_fobj, tsbj_is_list, idx
else
# fp ts
for sub_fobj in fobj
idx += +1
R.push new_index_phrase tsbj, tprd, tobj, fprd, sub_fobj, tsbj_is_list, idx
else
# fs tp
for sub_tobj in tobj
idx += +1
R.push new_index_phrase tsbj, tprd, sub_tobj, fprd, fobj, tsbj_is_list, idx
#.....................................................................................................
return R
#.......................................................................................................
return $ ( phrase, send ) =>
send phrase
[ sbj, prd, obj, ] = phrase
#.....................................................................................................
switch prd
#...................................................................................................
when from_predicate
sbj_txt = JSON.stringify sbj
if ( to_phrase = to_cache[ sbj_txt ] )?
delete to_cache[ sbj_txt ]
send index_phrase for index_phrase in link phrase, to_phrase
else
from_cache[ sbj_txt ] = phrase
#...................................................................................................
when to_predicate
sbj_txt = JSON.stringify sbj
if ( from_phrase = from_cache[ sbj_txt ] )?
delete from_cache[ sbj_txt ]
send index_phrase for index_phrase in link from_phrase, phrase
else
to_cache[ sbj_txt ] = phrase
#.....................................................................................................
return null
#.........................................................................................................
write_data = ( handler ) ->
input = D.create_throughstream()
#.......................................................................................................
input
.pipe $index 'reading', 'variant', { from: 'plural', to: 'plural', }
.pipe $index 'reading', 'similarity', { from: 'plural', to: 'plural', }
.pipe $index 'reading', 'strokeorder', { from: 'plural', to: 'singular', }
.pipe $index 'strokeorder', 'reading', { from: 'singular', to: 'plural', }
.pipe $index 'strokeorder', 'usagecode', { from: 'singular', to: 'singular', }
# .pipe $index 'strokeorder', 'variant', { from: 'singular', to: 'plural', }
# .pipe $index 'strokeorder', 'similarity', { from: 'singular', to: 'plural', }
.pipe HOLLERITH.$write db
.pipe D.$on_end ->
handler()
#.......................................................................................................
input.write [ [ '千', ], 'variant', [ '仟', '韆', ], ]
input.write [ [ '千', ], 'similarity', [ '于', '干', ], ]
input.write [ [ '千', ], 'usagecode', 'CJKTHM', ]
input.write [ [ '千', ], 'strokeorder', '312', ]
input.write [ [ '千', ], 'reading', [ 'qian', 'foo', 'bar', ], ]
input.write [ [ 'PI:NAME:<NAME>END_PI', ], 'strokeorder', '32312', ]
input.write [ [ 'PI:NAME:<NAME>END_PI', ], 'usagecode', 'CJKTHm', ]
input.write [ [ 'PI:NAME:<NAME>END_PI', ], 'reading', [ 'qian', ], ]
input.write [ [ 'PI:NAME:<NAME>END_PI', ], 'strokeorder', '122125112125221134515454', ]
input.write [ [ 'PI:NAME:<NAME>END_PI', ], 'usagecode', 'KTHm', ]
input.write [ [ 'PI:NAME:<NAME>END_PI', ], 'reading', [ 'qian', ], ]
#.......................................................................................................
# input.write [ ["千","variant",0,"仟"],"strokeorder","312"]
# input.write [ ["千","variant",1,"韆"],"strokeorder","312"]
# input.write [ ["千","variant",0,"仟",'usagecode','CJKTHm'],"strokeorder","312"]
# input.write [ ["千","variant",1,"韆",'usagecode','KTHm'],"strokeorder","312"]
#.......................................................................................................
# input.write [ [ '千', ], 'variant', [ '仟', '韆', ], ]
# input.write [ [ '于', ], 'variant', [ '於', '亐', ], ]
# input.write [ [ '干', ], 'variant', [ '乾', '幹', '榦', '亁', '乹', ], ]
# input.write [ [ '人', ], 'variant', [ '亻', '𠔽', ], ]
# input.write [ [ '仁', ], 'variant', [ '忈', ], ]
# #.......................................................................................................
# input.write [ [ '千', ], 'usagecode', 'CJKTHM', ]
# input.write [ [ '于', ], 'usagecode', 'CJKTHM', ]
# input.write [ [ '干', ], 'usagecode', 'CJKTHM', ]
# input.write [ [ '人', ], 'usagecode', 'CJKTHM', ]
# input.write [ [ 'PI:NAME:<NAME>END_PI', ], 'usagecode', 'CJKTHM', ]
# input.write [ [ '仟', ], 'usagecode', 'CJKTHm', ]
# input.write [ [ '韆', ], 'usagecode', 'KTHm', ]
# input.write [ [ '於', ], 'usagecode', 'cJKTHM', ]
# input.write [ [ '亐', ], 'usagecode', 'K', ]
# input.write [ [ '乾', ], 'usagecode', 'CJKTHM', ]
# input.write [ [ '幹', ], 'usagecode', 'JKTHM', ]
# input.write [ [ '榦', ], 'usagecode', 'THm', ]
# input.write [ [ '亻', ], 'usagecode', 'p', ]
# #.......................................................................................................
# input.write [ [ '千', ], 'reading', [ 'qian', ], ]
# input.write [ [ '于', ], 'reading', [ 'yu', 'foo', 'bar', ], ]
# input.write [ [ '干', ], 'reading', [ 'gan', 'ほす', ], ]
# input.write [ [ '人', ], 'reading', [ 'ren', ], ]
# input.write [ [ 'PI:NAME:<NAME>END_PI', ], 'reading', [ 'ren', ], ]
# input.write [ [ '千', ], 'similarity', [ '于', '干', ], ]
# input.write [ [ '于', ], 'similarity', [ '干', '千', ], ]
# input.write [ [ '干', ], 'similarity', [ '千', '于', ], ]
# #.......................................................................................................
# input.write [ [ '千', ], 'strokeorder', '312', ]
# input.write [ [ '于', ], 'strokeorder', '112', ]
# input.write [ [ '干', ], 'strokeorder', '112', ]
# input.write [ [ '人', ], 'strokeorder', '34', ]
# input.write [ [ 'PI:NAME:<NAME>END_PI', ], 'strokeorder', '3211', ]
# input.write [ [ 'PI:NAME:<NAME>END_PI', ], 'strokeorder', '32312', ]
# input.write [ [ 'PI:NAME:<NAME>END_PI', ], 'strokeorder', '122125112125221134515454', ]
# input.write [ [ '於', ], 'strokeorder', '41353444', ]
# input.write [ [ 'PI:NAME:<NAME>END_PI', ], 'strokeorder', '115', ]
# input.write [ [ 'PI:NAME:<NAME>END_PI', ], 'strokeorder', '12251112315', ]
# input.write [ [ '幹', ], 'strokeorder', '1225111231112', ]
# input.write [ [ 'PI:NAME:<NAME>END_PI', ], 'strokeorder', '12251112341234', ]
# input.write [ [ '亻', ], 'strokeorder', '32', ]
#.......................................................................................................
input.end()
#.........................................................................................................
matchers = [
["pos","reading","bar",["千"],2]
["pos","reading","bar",["千","similarity",4,"于"]]
["pos","reading","bar",["千","similarity",5,"干"]]
["pos","reading","bar",["千","strokeorder",2,"312"]]
["pos","reading","bar",["千","variant",4,"仟"]]
["pos","reading","bar",["千","variant",5,"韆"]]
["pos","reading","foo",["千"],1]
["pos","reading","foo",["千","similarity",2,"于"]]
["pos","reading","foo",["千","similarity",3,"干"]]
["pos","reading","foo",["千","strokeorder",1,"312"]]
["pos","reading","foo",["千","variant",2,"仟"]]
["pos","reading","foo",["千","variant",3,"韆"]]
["pos","reading","qian",["仟"],0]
["pos","reading","qian",["仟","strokeorder",0,"32312"]]
["pos","reading","qian",["千"],0]
["pos","reading","qian",["千","similarity",0,"于"]]
["pos","reading","qian",["千","similarity",1,"干"]]
["pos","reading","qian",["千","strokeorder",0,"312"]]
["pos","reading","qian",["千","variant",0,"仟"]]
["pos","reading","qian",["千","variant",1,"韆"]]
["pos","reading","qian",["韆"],0]
["pos","reading","qian",["韆","strokeorder",0,"122125112125221134515454"]]
["pos","similarity","于",["千"],0]
["pos","similarity","干",["千"],1]
["pos","strokeorder","122125112125221134515454",["韆"]]
["pos","strokeorder","122125112125221134515454",["韆","reading",0,"qian"]]
["pos","strokeorder","122125112125221134515454",["韆","usagecode",0,"KTHm"]]
["pos","strokeorder","312",["千"]]
["pos","strokeorder","312",["千","reading",0,"qian"]]
["pos","strokeorder","312",["千","reading",1,"foo"]]
["pos","strokeorder","312",["千","reading",2,"bar"]]
["pos","strokeorder","312",["千","usagecode",0,"CJKTHM"]]
["pos","strokeorder","32312",["仟"]]
["pos","strokeorder","32312",["仟","reading",0,"qian"]]
["pos","strokeorder","32312",["仟","usagecode",0,"CJKTHm"]]
["pos","usagecode","CJKTHM",["千"]]
["pos","usagecode","CJKTHm",["仟"]]
["pos","usagecode","KTHm",["韆"]]
["pos","variant","仟",["千"],0]
["pos","variant","韆",["千"],1]
]
#.........................................................................................................
show = ( handler ) ->
query = { prefix: [ 'pos', ], star: '*', }
# query = { prefix: [ 'pos', 'strokeorder', '312', ], star: '*', }
input = HOLLERITH.create_phrasestream db, query
input
.pipe D.$observe ( phrase ) => info JSON.stringify phrase
#.....................................................................................................
.pipe do =>
idx = -1
return D.$observe ( phrase ) =>
idx += +1
T.eq phrase, matchers[ idx ]
#.....................................................................................................
.pipe D.$on_end -> handler()
#.........................................................................................................
step ( resume ) =>
yield clear_leveldb db[ '%self' ], resume
yield write_data resume
yield show resume
done()
#-----------------------------------------------------------------------------------------------------------
@[ "n-ary indexing (1)" ] = ( T, done ) ->
#.........................................................................................................
$index = ( descriptions ) =>
predicates = []
predicate_count = 0
arities = []
phrases = []
phrase_counts = {}
#.......................................................................................................
for predicate, arity of descriptions
predicate_count += +1
unless arity in [ 'singular', 'plural', ]
throw new Error "expected 'singular' or 'plural' for arity, got #{rpr arity}"
predicates.push predicate
phrases.push {}
arities.push arity
#.......................................................................................................
if predicate_count.length < 2
throw new Error "expected at least two predicate descriptions, got #{predicates.length}"
if predicate_count.length > 2
throw new Error "indexes with more than 2 steps not supported yet"
#.......................................................................................................
new_index_phrase = ( tsbj, tprd, tobj, fprd, fobj, tsbj_is_list, idx = 0 ) =>
return [ [ tsbj..., tprd, idx, tobj, ], fprd, fobj, ] if tsbj_is_list
return [ [ tsbj, tprd, idx, tobj, ], fprd, fobj, ]
#.......................................................................................................
link = ( phrases ) =>
throw new Error "indexes with anything but 2 steps not supported yet" if phrases.length != 2
[ from_phrase, to_phrase, ] = phrases
[ fsbj, fprd, fobj, ] = from_phrase
[ tsbj, tprd, tobj, ] = to_phrase
tsbj_is_list = CND.isa_list tsbj
from_is_plural = arities[ 0 ] is 'plural'
to_is_plural = arities[ 1 ] is 'plural'
#.....................................................................................................
unless from_is_plural or to_is_plural
return [ new_index_phrase tsbj, tprd, tobj, fprd, fobj, tsbj_is_list ]
#.....................................................................................................
idx = -1
R = []
if from_is_plural
if to_is_plural
for sub_fobj in fobj
for sub_tobj in tobj
idx += +1
R.push new_index_phrase tsbj, tprd, sub_tobj, fprd, sub_fobj, tsbj_is_list, idx
else
for sub_fobj in fobj
idx += +1
R.push new_index_phrase tsbj, tprd, tobj, fprd, sub_fobj, tsbj_is_list, idx
else
for sub_tobj in tobj
idx += +1
R.push new_index_phrase tsbj, tprd, sub_tobj, fprd, fobj, tsbj_is_list, idx
#.....................................................................................................
return R
#.......................................................................................................
return $ ( phrase, send ) =>
send phrase
[ sbj, prd, obj, ] = phrase
return unless ( prd_idx = predicates.indexOf prd ) >= 0
sbj_txt = JSON.stringify sbj
phrase_target = phrases[ sbj_txt]?= []
phrase_target[ prd_idx ] = phrase
phrase_counts[ sbj_txt ] = ( phrase_counts[ sbj_txt ] ? 0 ) + 1
return null if phrase_counts[ sbj_txt ] < predicate_count
#.....................................................................................................
send index_phrase for index_phrase in link phrases[ sbj_txt ]
return null
#.........................................................................................................
write_data = ( handler ) ->
input = D.create_throughstream()
#.......................................................................................................
input
.pipe $index 'reading': 'plural', 'similarity': 'plural'
.pipe $index 'reading': 'plural', 'variant': 'plural'
.pipe $index 'reading': 'plural', 'strokeorder': 'singular'
.pipe $index 'strokeorder': 'singular', 'reading': 'plural'
.pipe $index 'strokeorder': 'singular', 'variant': 'plural'
.pipe $index 'strokeorder': 'singular', 'similarity': 'plural'
.pipe HOLLERITH.$write db
.pipe D.$on_end ->
handler()
#.......................................................................................................
input.write [ [ '千', ], 'variant', [ '仟', '韆', ], ]
input.write [ [ '千', ], 'similarity', [ '于', '干', ], ]
input.write [ [ '千', ], 'usagecode', 'CJKTHM', ]
input.write [ [ '千', ], 'strokeorder', '312', ]
input.write [ [ '千', ], 'reading', [ 'qian', 'foo', 'bar', ], ]
input.write [ [ '仟', ], 'strokeorder', '32312', ]
input.write [ [ '仟', ], 'usagecode', 'CJKTHm', ]
input.write [ [ '仟', ], 'reading', [ 'qian', ], ]
input.write [ [ 'PI:NAME:<NAME>END_PI', ], 'strokeorder', '122125112125221134515454', ]
input.write [ [ 'PI:NAME:<NAME>END_PI', ], 'usagecode', 'KTHm', ]
input.write [ [ 'PI:NAME:<NAME>END_PI', ], 'reading', [ 'qian', ], ]
#.......................................................................................................
input.end()
#.........................................................................................................
matchers = [
["pos","reading","bar",["千"],2]
["pos","reading","bar",["千","similarity",4,"于"]]
["pos","reading","bar",["千","similarity",5,"干"]]
["pos","reading","bar",["千","strokeorder",2,"312"]]
["pos","reading","bar",["千","variant",4,"仟"]]
["pos","reading","bar",["千","variant",5,"韆"]]
["pos","reading","foo",["千"],1]
["pos","reading","foo",["千","similarity",2,"于"]]
["pos","reading","foo",["千","similarity",3,"干"]]
["pos","reading","foo",["千","strokeorder",1,"312"]]
["pos","reading","foo",["千","variant",2,"仟"]]
["pos","reading","foo",["千","variant",3,"韆"]]
["pos","reading","qian",["仟"],0]
["pos","reading","qian",["仟","strokeorder",0,"32312"]]
["pos","reading","qian",["千"],0]
["pos","reading","qian",["千","similarity",0,"于"]]
["pos","reading","qian",["千","similarity",1,"干"]]
["pos","reading","qian",["千","strokeorder",0,"312"]]
["pos","reading","qian",["千","variant",0,"仟"]]
["pos","reading","qian",["千","variant",1,"韆"]]
["pos","reading","qian",["韆"],0]
["pos","reading","qian",["韆","strokeorder",0,"122125112125221134515454"]]
["pos","similarity","于",["千"],0]
["pos","similarity","干",["千"],1]
["pos","strokeorder","122125112125221134515454",["韆"]]
["pos","strokeorder","122125112125221134515454",["韆","reading",0,"qian"]]
["pos","strokeorder","312",["千"]]
["pos","strokeorder","312",["千","reading",0,"qian"]]
["pos","strokeorder","312",["千","reading",1,"foo"]]
["pos","strokeorder","312",["千","reading",2,"bar"]]
["pos","strokeorder","312",["千","similarity",0,"于"]]
["pos","strokeorder","312",["千","similarity",1,"干"]]
["pos","strokeorder","312",["千","variant",0,"仟"]]
["pos","strokeorder","312",["千","variant",1,"韆"]]
["pos","strokeorder","32312",["仟"]]
["pos","strokeorder","32312",["仟","reading",0,"qian"]]
["pos","usagecode","CJKTHM",["千"]]
["pos","usagecode","CJKTHm",["仟"]]
["pos","usagecode","KTHm",["韆"]]
["pos","variant","仟",["千"],0]
["pos","variant","韆",["千"],1]
]
#.........................................................................................................
show = ( handler ) ->
query = { prefix: [ 'pos', ], star: '*', }
input = HOLLERITH.create_phrasestream db, query
input
.pipe D.$observe ( phrase ) => info JSON.stringify phrase
#.....................................................................................................
.pipe do =>
idx = -1
return D.$observe ( phrase ) =>
idx += +1
T.eq phrase, matchers[ idx ]
#.....................................................................................................
.pipe D.$on_end -> handler()
#.........................................................................................................
step ( resume ) =>
yield clear_leveldb db[ '%self' ], resume
yield write_data resume
yield show resume
done()
#-----------------------------------------------------------------------------------------------------------
@[ "n-ary indexing (2)" ] = ( T, done ) ->
#.........................................................................................................
write_data = ( handler ) ->
input = D.create_throughstream()
#.......................................................................................................
input
.pipe HOLLERITH.$index 'reading': 'plural', 'similarity': 'plural'
.pipe HOLLERITH.$index 'reading': 'plural', 'variant': 'plural'
.pipe HOLLERITH.$index 'reading': 'plural', 'strokeorder': 'singular'
.pipe HOLLERITH.$index 'strokeorder': 'singular', 'reading': 'plural'
.pipe HOLLERITH.$index 'strokeorder': 'singular', 'variant': 'plural'
.pipe HOLLERITH.$index 'strokeorder': 'singular', 'similarity': 'plural'
.pipe HOLLERITH.$write db
.pipe D.$on_end ->
handler()
#.......................................................................................................
input.write [ [ '千', ], 'variant', [ '仟', '韆', ], ]
input.write [ [ '千', ], 'similarity', [ '于', '干', ], ]
input.write [ [ '千', ], 'usagecode', 'CJKTHM', ]
input.write [ [ '千', ], 'strokeorder', '312', ]
input.write [ [ '千', ], 'reading', [ 'qian', 'foo', 'bar', ], ]
input.write [ [ '仟', ], 'strokeorder', '32312', ]
input.write [ [ '仟', ], 'usagecode', 'CJKTHm', ]
input.write [ [ '仟', ], 'reading', [ 'qian', ], ]
input.write [ [ '韆', ], 'strokeorder', '122125112125221134515454', ]
input.write [ [ '韆', ], 'usagecode', 'KTHm', ]
input.write [ [ '韆', ], 'reading', [ 'qian', ], ]
#.......................................................................................................
input.end()
#.........................................................................................................
matchers = [
["pos","reading","bar",["千"],2]
["pos","reading","bar",["千","similarity",4,"于"]]
["pos","reading","bar",["千","similarity",5,"干"]]
["pos","reading","bar",["千","strokeorder",2,"312"]]
["pos","reading","bar",["千","variant",4,"仟"]]
["pos","reading","bar",["千","variant",5,"韆"]]
["pos","reading","foo",["千"],1]
["pos","reading","foo",["千","similarity",2,"于"]]
["pos","reading","foo",["千","similarity",3,"干"]]
["pos","reading","foo",["千","strokeorder",1,"312"]]
["pos","reading","foo",["千","variant",2,"仟"]]
["pos","reading","foo",["千","variant",3,"韆"]]
["pos","reading","qian",["仟"],0]
["pos","reading","qian",["仟","strokeorder",0,"32312"]]
["pos","reading","qian",["千"],0]
["pos","reading","qian",["千","similarity",0,"于"]]
["pos","reading","qian",["千","similarity",1,"干"]]
["pos","reading","qian",["千","strokeorder",0,"312"]]
["pos","reading","qian",["千","variant",0,"仟"]]
["pos","reading","qian",["千","variant",1,"韆"]]
["pos","reading","qian",["韆"],0]
["pos","reading","qian",["韆","strokeorder",0,"122125112125221134515454"]]
["pos","similarity","于",["千"],0]
["pos","similarity","干",["千"],1]
["pos","strokeorder","122125112125221134515454",["韆"]]
["pos","strokeorder","122125112125221134515454",["韆","reading",0,"qian"]]
["pos","strokeorder","312",["千"]]
["pos","strokeorder","312",["千","reading",0,"qian"]]
["pos","strokeorder","312",["千","reading",1,"foo"]]
["pos","strokeorder","312",["千","reading",2,"bar"]]
["pos","strokeorder","312",["千","similarity",0,"于"]]
["pos","strokeorder","312",["千","similarity",1,"干"]]
["pos","strokeorder","312",["千","variant",0,"仟"]]
["pos","strokeorder","312",["千","variant",1,"韆"]]
["pos","strokeorder","32312",["仟"]]
["pos","strokeorder","32312",["仟","reading",0,"qian"]]
["pos","usagecode","CJKTHM",["千"]]
["pos","usagecode","CJKTHm",["仟"]]
["pos","usagecode","KTHm",["韆"]]
["pos","variant","仟",["千"],0]
["pos","variant","韆",["千"],1]
]
#.........................................................................................................
show = ( handler ) ->
query = { prefix: [ 'pos', ], star: '*', }
input = HOLLERITH.create_phrasestream db, query
input
.pipe D.$observe ( phrase ) => info JSON.stringify phrase
#.....................................................................................................
.pipe do =>
idx = -1
return D.$observe ( phrase ) =>
idx += +1
T.eq phrase, matchers[ idx ]
#.....................................................................................................
.pipe D.$on_end -> handler()
#.........................................................................................................
step ( resume ) =>
yield clear_leveldb db[ '%self' ], resume
yield write_data resume
yield show resume
done()
#-----------------------------------------------------------------------------------------------------------
@_prune = ->
for name, value of @
continue if name.startsWith '_'
delete @[ name ] unless name in include
return null
############################################################################################################
unless module.parent?
# debug '0980', JSON.stringify ( Object.keys @ ), null, ' '
include = [
# "write without error (1)"
# "write without error (2)"
# "read without error"
# "read keys without error (1)"
# "read keys without error (2)"
# "read keys without error (3)"
# "read keys without error (4)"
# "create_facetstream throws with wrong arguments"
# "read POS facets"
# "read POS phrases (1)"
# "read POS phrases (2)"
# "read SPO phrases"
# "sorting (1)"
# "sorting (2)"
# "H2 codec `encode` throws on anything but a list"
# "sort texts with H2 codec (1)"
# "sort texts with H2 codec (2)"
# "sort numbers with H2 codec (1)"
# "sort mixed values with H2 codec"
# "sort lists of mixed values with H2 codec"
# "sort routes with values (1)"
# "sort routes with values (2)"
# "read sample data"
# "read and write keys with lists"
# "encode keys with list elements"
# "read and write phrases with unanalyzed lists"
# "read partial POS phrases"
# "read single phrases (1)"
# "read single phrases (2)"
# "read single phrases (3)"
# "read single phrases (4)"
# "writing phrases with non-unique keys fails"
# "reminders"
# "invalid key not accepted (1)"
# "invalid key not accepted (2)"
# "catching errors (2)"
# "catching errors (1)"
# "building PODs from SPO phrases"
# "read phrases in lockstep"
# "has_any yields existence of key"
# "$write rejects duplicate S/P pairs"
# "codec accepts long keys"
# "write private types (1)"
# "write private types (2)"
# "write private types (3)"
# "bloom filter serialization without writes"
# "use non-string subjects in phrases"
# '$write rejects duplicate S/P pairs'
# 'codec accepts long keys'
# 'write private types (1)'
# 'use non-string subjects in phrases (1)'
# 'use non-string subjects in phrases (2)'
# 'use non-string subjects in phrases (3)'
# 'use non-string subjects in phrases (4)'
# 'binary indexing'
'n-ary indexing (1)'
'n-ary indexing (2)'
# "Pinyin Unicode Sorting"
# "ensure `Buffer.compare` gives same sorting as LevelDB"
]
# @_prune()
@_main()
# @[ "XXX" ] null, -> help "(done)"
# @[ "YYY" ] null, -> help "(done)"
# @[ "ZZZ" ] null, -> help "(done)"
# debug '©P9AOR', ( HOLLERITH.CODEC[ 'typemarkers' ][ 'null' ] ).toString 16
# debug '©xxmIp', ( HOLLERITH.CODEC[ 'typemarkers' ][ 'false' ] ).toString 16
# debug '©ZeY26', ( HOLLERITH.CODEC[ 'typemarkers' ][ 'true' ] ).toString 16
# debug '©WgER9', ( HOLLERITH.CODEC[ 'typemarkers' ][ 'date' ] ).toString 16
# debug '©UmpjJ', ( HOLLERITH.CODEC[ 'typemarkers' ][ 'ninfinity' ] ).toString 16
# debug '©Url0K', ( HOLLERITH.CODEC[ 'typemarkers' ][ 'nnumber' ] ).toString 16
# debug '©nFIIi', ( HOLLERITH.CODEC[ 'typemarkers' ][ 'pnumber' ] ).toString 16
# debug '©LZ58R', ( HOLLERITH.CODEC[ 'typemarkers' ][ 'pinfinity' ] ).toString 16
# debug '©MYxda', ( HOLLERITH.CODEC[ 'typemarkers' ][ 'text' ] ).toString 16
|
[
{
"context": "//neocotic.com/UndoWikipediaBlackout) \n# (c) 2012 Alasdair Mercer \n# Freely distributable under the MIT license. ",
"end": 99,
"score": 0.9998787641525269,
"start": 84,
"tag": "NAME",
"value": "Alasdair Mercer"
},
{
"context": "Wikipedia Blackouts analytics account... | src/lib/analytics.coffee | neocotic/UndoWikipediaBlackout | 1 | # [Undo Wikipedia Blackout](http://neocotic.com/UndoWikipediaBlackout)
# (c) 2012 Alasdair Mercer
# Freely distributable under the MIT license.
# For all details and documentation:
# <http://neocotic.com/UndoWikipediaBlackout>
# Private constants
# -----------------
# Code for Undo Wikipedia Blackouts analytics account.
ACCOUNT = 'UA-29721369-1'
# Source URL of the analytics script.
SOURCE = 'https://ssl.google-analytics.com/ga.js'
# Analytics setup
# ---------------
analytics = window.analytics = new class Analytics extends utils.Class
# Public functions
# ----------------
# Add analytics to the current page.
add: ->
# Setup tracking details for analytics.
_gaq = window._gaq ?= []
_gaq.push ['_setAccount', ACCOUNT]
_gaq.push ['_trackPageview']
# Inject script to capture analytics.
ga = document.createElement 'script'
ga.async = 'async'
ga.src = SOURCE
script = document.getElementsByTagName('script')[0]
script.parentNode.insertBefore ga, script
# Determine whether or not analytics are enabled.
enabled: -> not store? or store.get 'analytics'
# Remove analytics from the current page.
remove: ->
# Delete scripts used to capture analytics.
for script in document.querySelectorAll "script[src='#{SOURCE}']"
script.parentNode.removeChild script
# Remove tracking details for analytics.
delete window._gaq
# Create an event with the information provided and track it in analytics.
track: (category, action, label, value, nonInteraction) -> if @enabled()
event = ['_trackEvent']
# Add the required information.
event.push category
event.push action
# Add the optional information where possible.
event.push label if label?
event.push value if value?
event.push nonInteraction if nonInteraction?
# Add the event to analytics.
_gaq = window._gaq ?= []
_gaq.push event
# Configuration
# -------------
# Initialize analytics.
store?.init 'analytics', yes | 29599 | # [Undo Wikipedia Blackout](http://neocotic.com/UndoWikipediaBlackout)
# (c) 2012 <NAME>
# Freely distributable under the MIT license.
# For all details and documentation:
# <http://neocotic.com/UndoWikipediaBlackout>
# Private constants
# -----------------
# Code for Undo Wikipedia Blackouts analytics account.
ACCOUNT = '<KEY>'
# Source URL of the analytics script.
SOURCE = 'https://ssl.google-analytics.com/ga.js'
# Analytics setup
# ---------------
analytics = window.analytics = new class Analytics extends utils.Class
# Public functions
# ----------------
# Add analytics to the current page.
add: ->
# Setup tracking details for analytics.
_gaq = window._gaq ?= []
_gaq.push ['_setAccount', ACCOUNT]
_gaq.push ['_trackPageview']
# Inject script to capture analytics.
ga = document.createElement 'script'
ga.async = 'async'
ga.src = SOURCE
script = document.getElementsByTagName('script')[0]
script.parentNode.insertBefore ga, script
# Determine whether or not analytics are enabled.
enabled: -> not store? or store.get 'analytics'
# Remove analytics from the current page.
remove: ->
# Delete scripts used to capture analytics.
for script in document.querySelectorAll "script[src='#{SOURCE}']"
script.parentNode.removeChild script
# Remove tracking details for analytics.
delete window._gaq
# Create an event with the information provided and track it in analytics.
track: (category, action, label, value, nonInteraction) -> if @enabled()
event = ['_trackEvent']
# Add the required information.
event.push category
event.push action
# Add the optional information where possible.
event.push label if label?
event.push value if value?
event.push nonInteraction if nonInteraction?
# Add the event to analytics.
_gaq = window._gaq ?= []
_gaq.push event
# Configuration
# -------------
# Initialize analytics.
store?.init 'analytics', yes | true | # [Undo Wikipedia Blackout](http://neocotic.com/UndoWikipediaBlackout)
# (c) 2012 PI:NAME:<NAME>END_PI
# Freely distributable under the MIT license.
# For all details and documentation:
# <http://neocotic.com/UndoWikipediaBlackout>
# Private constants
# -----------------
# Code for Undo Wikipedia Blackouts analytics account.
ACCOUNT = 'PI:KEY:<KEY>END_PI'
# Source URL of the analytics script.
SOURCE = 'https://ssl.google-analytics.com/ga.js'
# Analytics setup
# ---------------
analytics = window.analytics = new class Analytics extends utils.Class
# Public functions
# ----------------
# Add analytics to the current page.
add: ->
# Setup tracking details for analytics.
_gaq = window._gaq ?= []
_gaq.push ['_setAccount', ACCOUNT]
_gaq.push ['_trackPageview']
# Inject script to capture analytics.
ga = document.createElement 'script'
ga.async = 'async'
ga.src = SOURCE
script = document.getElementsByTagName('script')[0]
script.parentNode.insertBefore ga, script
# Determine whether or not analytics are enabled.
enabled: -> not store? or store.get 'analytics'
# Remove analytics from the current page.
remove: ->
# Delete scripts used to capture analytics.
for script in document.querySelectorAll "script[src='#{SOURCE}']"
script.parentNode.removeChild script
# Remove tracking details for analytics.
delete window._gaq
# Create an event with the information provided and track it in analytics.
track: (category, action, label, value, nonInteraction) -> if @enabled()
event = ['_trackEvent']
# Add the required information.
event.push category
event.push action
# Add the optional information where possible.
event.push label if label?
event.push value if value?
event.push nonInteraction if nonInteraction?
# Add the event to analytics.
_gaq = window._gaq ?= []
_gaq.push event
# Configuration
# -------------
# Initialize analytics.
store?.init 'analytics', yes |
[
{
"context": "\t\t\temail:$('#customer_email').val()\n\t\t\t# password:$('#customer_password').val()\n\t\t\tprofile:\n\t\t\t\tfullname:$('#customer_nam",
"end": 260,
"score": 0.9314392805099487,
"start": 239,
"tag": "PASSWORD",
"value": "$('#customer_password"
},
{
"context": "ail').... | client/views/users/quickAdd.coffee | sawima/kimashare | 0 | Template.quickAddUser.events
'submit #customer_quick_form': (evt) ->
evt.preventDefault()
roles=$('#select_roles').val()
if !roles
roles=[]
roles.push('customer')
customer={
email:$('#customer_email').val()
# password:$('#customer_password').val()
profile:
fullname:$('#customer_name').val()
phone:$('#customer_phone').val()
company:$('#customer_company').val()
}
# console.log "new User:",customer
Meteor.call 'quickCreateUser',customer,roles
# Meteor.call 'quickCreateUser',customer,(err,u_id)->
# if !err
# console.log u_id
# Roles.addUsersToRoles(u_id,roles)
$(evt.currentTarget).trigger('reset')
# $(evt.currentTarget).trigger 'reset' | 141475 | Template.quickAddUser.events
'submit #customer_quick_form': (evt) ->
evt.preventDefault()
roles=$('#select_roles').val()
if !roles
roles=[]
roles.push('customer')
customer={
email:$('#customer_email').val()
# password:<PASSWORD>').<PASSWORD>()
profile:
fullname:$('#customer_name').val()
phone:$('#customer_phone').val()
company:$('#customer_company').val()
}
# console.log "new User:",customer
Meteor.call 'quickCreateUser',customer,roles
# Meteor.call 'quickCreateUser',customer,(err,u_id)->
# if !err
# console.log u_id
# Roles.addUsersToRoles(u_id,roles)
$(evt.currentTarget).trigger('reset')
# $(evt.currentTarget).trigger 'reset' | true | Template.quickAddUser.events
'submit #customer_quick_form': (evt) ->
evt.preventDefault()
roles=$('#select_roles').val()
if !roles
roles=[]
roles.push('customer')
customer={
email:$('#customer_email').val()
# password:PI:PASSWORD:<PASSWORD>END_PI').PI:PASSWORD:<PASSWORD>END_PI()
profile:
fullname:$('#customer_name').val()
phone:$('#customer_phone').val()
company:$('#customer_company').val()
}
# console.log "new User:",customer
Meteor.call 'quickCreateUser',customer,roles
# Meteor.call 'quickCreateUser',customer,(err,u_id)->
# if !err
# console.log u_id
# Roles.addUsersToRoles(u_id,roles)
$(evt.currentTarget).trigger('reset')
# $(evt.currentTarget).trigger 'reset' |
[
{
"context": "ir.me',\n patient: '123',\n auth: {user: 'client', pass: 'secret'}\n mkfail = (done)->\n (err)->",
"end": 207,
"score": 0.9058396220207214,
"start": 201,
"tag": "USERNAME",
"value": "client"
},
{
"context": "tient: '123',\n auth: {user: 'client', pass... | integration_test_buffer/yuiSpec.coffee | a-mroz/fhir.js | 349 | jasmine.DEFAULT_TIMEOUT_INTERVAL = 10000
fhir = require('../src/adapters/yui')
describe "yui", ->
subject = fhir
baseUrl: 'https://ci-api.fhir.me',
patient: '123',
auth: {user: 'client', pass: 'secret'}
mkfail = (done)->
(err)->
console.error(err)
done()
it 'simplest', (done) ->
console.log('yui: simplest')
success = (data, status, headers)->
expect(status).toBe(200)
expect(data).not.toBe(1)
expect(headers).not.toBe(null)
done()
error = mkfail(done)
subject.search(type: 'Patient', query: {name: 'maud'}, success: success, error: error)
it "can convert results to an in-memory graph", (done) ->
console.log('yui: can convert ...')
success = (data, status, headers)->
expect(data[0].subject.resourceType).toBe('Patient')
done()
error = mkfail(done)
subject.search(type: 'Observation', graph: true, query: {$include: {Observation: 'subject'}}, success: success, error: error)
it "can post", (done) ->
console.log('yui: can post ...')
exampleSecEvent = {
"resourceType": "SecurityEvent",
"event": {
"type": {
"coding": [{
"system": "http://nema.org/dicom/dcid",
"code": "110114",
"display": "User Authentication"
}]
},
"subtype": [{
"coding": [{
"system": "http://nema.org/dicom/dcid",
"code": "110122",
"display": "Login"
}]
}],
"action": "E",
"dateTime": "2014-09-13T13:48:42Z",
"outcome": "0"
},
"participant": [{
"userId": "service",
"network": {
"identifier": "service",
"type": "2"
}
}],
"source": {
"site": "Cloud",
"identifier": "Health Intersections",
"type": [{
"system": "http://hl7.org/fhir/security-source-type",
"code": "3",
"display": "Web Server"
}]
}
}
success = (data, status, headers)-> done()
error = mkfail(done)
subject.create {entry: {content: exampleSecEvent}, success: success, error: error}
it "can resolve refs", (done) ->
console.log('yui: can resolve ...')
success = (rxs, status, headers)->
rx = rxs.entry[0].content
med = subject.resolveSync
reference: rx.medication
resource: rx
bundle: rx
expect(med.content).toBeTruthy()
done()
error = mkfail(done)
subject.search
type: 'MedicationPrescription'
query:
$include:
MedicationPrescription: 'medication'
success: success
error: error
| 73686 | jasmine.DEFAULT_TIMEOUT_INTERVAL = 10000
fhir = require('../src/adapters/yui')
describe "yui", ->
subject = fhir
baseUrl: 'https://ci-api.fhir.me',
patient: '123',
auth: {user: 'client', pass: '<PASSWORD>'}
mkfail = (done)->
(err)->
console.error(err)
done()
it 'simplest', (done) ->
console.log('yui: simplest')
success = (data, status, headers)->
expect(status).toBe(200)
expect(data).not.toBe(1)
expect(headers).not.toBe(null)
done()
error = mkfail(done)
subject.search(type: 'Patient', query: {name: '<NAME>'}, success: success, error: error)
it "can convert results to an in-memory graph", (done) ->
console.log('yui: can convert ...')
success = (data, status, headers)->
expect(data[0].subject.resourceType).toBe('Patient')
done()
error = mkfail(done)
subject.search(type: 'Observation', graph: true, query: {$include: {Observation: 'subject'}}, success: success, error: error)
it "can post", (done) ->
console.log('yui: can post ...')
exampleSecEvent = {
"resourceType": "SecurityEvent",
"event": {
"type": {
"coding": [{
"system": "http://nema.org/dicom/dcid",
"code": "110114",
"display": "User Authentication"
}]
},
"subtype": [{
"coding": [{
"system": "http://nema.org/dicom/dcid",
"code": "110122",
"display": "Login"
}]
}],
"action": "E",
"dateTime": "2014-09-13T13:48:42Z",
"outcome": "0"
},
"participant": [{
"userId": "service",
"network": {
"identifier": "service",
"type": "2"
}
}],
"source": {
"site": "Cloud",
"identifier": "Health Intersections",
"type": [{
"system": "http://hl7.org/fhir/security-source-type",
"code": "3",
"display": "Web Server"
}]
}
}
success = (data, status, headers)-> done()
error = mkfail(done)
subject.create {entry: {content: exampleSecEvent}, success: success, error: error}
it "can resolve refs", (done) ->
console.log('yui: can resolve ...')
success = (rxs, status, headers)->
rx = rxs.entry[0].content
med = subject.resolveSync
reference: rx.medication
resource: rx
bundle: rx
expect(med.content).toBeTruthy()
done()
error = mkfail(done)
subject.search
type: 'MedicationPrescription'
query:
$include:
MedicationPrescription: 'medication'
success: success
error: error
| true | jasmine.DEFAULT_TIMEOUT_INTERVAL = 10000
fhir = require('../src/adapters/yui')
describe "yui", ->
subject = fhir
baseUrl: 'https://ci-api.fhir.me',
patient: '123',
auth: {user: 'client', pass: 'PI:PASSWORD:<PASSWORD>END_PI'}
mkfail = (done)->
(err)->
console.error(err)
done()
it 'simplest', (done) ->
console.log('yui: simplest')
success = (data, status, headers)->
expect(status).toBe(200)
expect(data).not.toBe(1)
expect(headers).not.toBe(null)
done()
error = mkfail(done)
subject.search(type: 'Patient', query: {name: 'PI:NAME:<NAME>END_PI'}, success: success, error: error)
it "can convert results to an in-memory graph", (done) ->
console.log('yui: can convert ...')
success = (data, status, headers)->
expect(data[0].subject.resourceType).toBe('Patient')
done()
error = mkfail(done)
subject.search(type: 'Observation', graph: true, query: {$include: {Observation: 'subject'}}, success: success, error: error)
it "can post", (done) ->
console.log('yui: can post ...')
exampleSecEvent = {
"resourceType": "SecurityEvent",
"event": {
"type": {
"coding": [{
"system": "http://nema.org/dicom/dcid",
"code": "110114",
"display": "User Authentication"
}]
},
"subtype": [{
"coding": [{
"system": "http://nema.org/dicom/dcid",
"code": "110122",
"display": "Login"
}]
}],
"action": "E",
"dateTime": "2014-09-13T13:48:42Z",
"outcome": "0"
},
"participant": [{
"userId": "service",
"network": {
"identifier": "service",
"type": "2"
}
}],
"source": {
"site": "Cloud",
"identifier": "Health Intersections",
"type": [{
"system": "http://hl7.org/fhir/security-source-type",
"code": "3",
"display": "Web Server"
}]
}
}
success = (data, status, headers)-> done()
error = mkfail(done)
subject.create {entry: {content: exampleSecEvent}, success: success, error: error}
it "can resolve refs", (done) ->
console.log('yui: can resolve ...')
success = (rxs, status, headers)->
rx = rxs.entry[0].content
med = subject.resolveSync
reference: rx.medication
resource: rx
bundle: rx
expect(med.content).toBeTruthy()
done()
error = mkfail(done)
subject.search
type: 'MedicationPrescription'
query:
$include:
MedicationPrescription: 'medication'
success: success
error: error
|
[
{
"context": "\nmodule.exports = (robot) ->\n MAINTAINERS_KEY = \"maintainers\"\n\n fs.exists './kubeconfig', (exists) ->\n unl",
"end": 101,
"score": 0.6328898668289185,
"start": 90,
"tag": "KEY",
"value": "maintainers"
},
{
"context": "ntainers_url = \"https://raw.githubuserco... | images/kubot/worker.coffee | mazzystr/project-infra | 16 | YAML = require 'yaml'
fs = require 'fs'
module.exports = (robot) ->
MAINTAINERS_KEY = "maintainers"
fs.exists './kubeconfig', (exists) ->
unless exists
fs.cp '/etc/kubeconfig/config', './kubeconfig', { dereference: true }, (error) ->
if error
console.log "Could not copy kubeconfig: #{error}"
check_kubectl_input = (subaction, args_str) ->
allowed_subactions = ["get", "log", "logs", "describe", "config", "top"]
if subaction not in allowed_subactions
return false
args = args_str.split(/\s+/)
if subaction is "config" and args[0] is not "use-context"
return false
if /^logs?/.test subaction and "-f" in args
return false
if subaction is "get" and "-w" in args
return false
return true
execute_action = (res, action, subaction, args) ->
user = res.envelope.user.name
maintainers = robot.brain.get(MAINTAINERS_KEY) || []
if user not in maintainers
console.log "`#{user}` is not allowed to perform `#{action}`"
return
if action is 'ctl'
execute_kubectl_action res, subaction, args
execute_kubectl_action = (res, action, args) ->
if not check_kubectl_input action, args
res.send "Could not execute `#{action} #{args}`"
return
res.send "Executing kubectl #{action} command..."
exec = require('child_process').exec
command = "kubectl #{action} #{args} --kubeconfig=./kubeconfig"
exec command, (error, stdout, stderror) ->
if stdout
res.send "```\n" + stdout + "\n```"
else
res.send "Sorry that didn't work"
if error
res.send (error.stack)
if stderror
res.send (stderror)
robot.respond /(\w+)\s*(\w+)\s*(.*)/i, (res) ->
requested_action = res.match[1]
requested_subaction = res.match[2]
requested_args = res.match[3]
maintainers = robot.brain.get(MAINTAINERS_KEY) || []
if maintainers.length is 0
console.log "Fetching maintainers..."
maintainers_url = "https://raw.githubusercontent.com/kubevirt/project-infra/main/OWNERS_ALIASES"
robot.http(maintainers_url)
.get() (err, getRes, body) ->
if err
console.log("Could not read ci-maintainers from #{maintainers_url}: #{err}")
return
data = YAML.parse body
maintainers = data.aliases['ci-maintainers']
console.log "Fetched maintainers: #{maintainers}"
robot.brain.set(MAINTAINERS_KEY, maintainers)
execute_action res, requested_action, requested_subaction, requested_args
else
execute_action res, requested_action, requested_subaction, requested_args
robot.router.get '/health', (req, res) ->
res.setHeader 'content-type', 'text/plain'
res.end 'OK'
| 106623 | YAML = require 'yaml'
fs = require 'fs'
module.exports = (robot) ->
MAINTAINERS_KEY = "<KEY>"
fs.exists './kubeconfig', (exists) ->
unless exists
fs.cp '/etc/kubeconfig/config', './kubeconfig', { dereference: true }, (error) ->
if error
console.log "Could not copy kubeconfig: #{error}"
check_kubectl_input = (subaction, args_str) ->
allowed_subactions = ["get", "log", "logs", "describe", "config", "top"]
if subaction not in allowed_subactions
return false
args = args_str.split(/\s+/)
if subaction is "config" and args[0] is not "use-context"
return false
if /^logs?/.test subaction and "-f" in args
return false
if subaction is "get" and "-w" in args
return false
return true
execute_action = (res, action, subaction, args) ->
user = res.envelope.user.name
maintainers = robot.brain.get(MAINTAINERS_KEY) || []
if user not in maintainers
console.log "`#{user}` is not allowed to perform `#{action}`"
return
if action is 'ctl'
execute_kubectl_action res, subaction, args
execute_kubectl_action = (res, action, args) ->
if not check_kubectl_input action, args
res.send "Could not execute `#{action} #{args}`"
return
res.send "Executing kubectl #{action} command..."
exec = require('child_process').exec
command = "kubectl #{action} #{args} --kubeconfig=./kubeconfig"
exec command, (error, stdout, stderror) ->
if stdout
res.send "```\n" + stdout + "\n```"
else
res.send "Sorry that didn't work"
if error
res.send (error.stack)
if stderror
res.send (stderror)
robot.respond /(\w+)\s*(\w+)\s*(.*)/i, (res) ->
requested_action = res.match[1]
requested_subaction = res.match[2]
requested_args = res.match[3]
maintainers = robot.brain.get(MAINTAINERS_KEY) || []
if maintainers.length is 0
console.log "Fetching maintainers..."
maintainers_url = "https://raw.githubusercontent.com/kubevirt/project-infra/main/OWNERS_ALIASES"
robot.http(maintainers_url)
.get() (err, getRes, body) ->
if err
console.log("Could not read ci-maintainers from #{maintainers_url}: #{err}")
return
data = YAML.parse body
maintainers = data.aliases['ci-maintainers']
console.log "Fetched maintainers: #{maintainers}"
robot.brain.set(MAINTAINERS_KEY, maintainers)
execute_action res, requested_action, requested_subaction, requested_args
else
execute_action res, requested_action, requested_subaction, requested_args
robot.router.get '/health', (req, res) ->
res.setHeader 'content-type', 'text/plain'
res.end 'OK'
| true | YAML = require 'yaml'
fs = require 'fs'
module.exports = (robot) ->
MAINTAINERS_KEY = "PI:KEY:<KEY>END_PI"
fs.exists './kubeconfig', (exists) ->
unless exists
fs.cp '/etc/kubeconfig/config', './kubeconfig', { dereference: true }, (error) ->
if error
console.log "Could not copy kubeconfig: #{error}"
check_kubectl_input = (subaction, args_str) ->
allowed_subactions = ["get", "log", "logs", "describe", "config", "top"]
if subaction not in allowed_subactions
return false
args = args_str.split(/\s+/)
if subaction is "config" and args[0] is not "use-context"
return false
if /^logs?/.test subaction and "-f" in args
return false
if subaction is "get" and "-w" in args
return false
return true
execute_action = (res, action, subaction, args) ->
user = res.envelope.user.name
maintainers = robot.brain.get(MAINTAINERS_KEY) || []
if user not in maintainers
console.log "`#{user}` is not allowed to perform `#{action}`"
return
if action is 'ctl'
execute_kubectl_action res, subaction, args
execute_kubectl_action = (res, action, args) ->
if not check_kubectl_input action, args
res.send "Could not execute `#{action} #{args}`"
return
res.send "Executing kubectl #{action} command..."
exec = require('child_process').exec
command = "kubectl #{action} #{args} --kubeconfig=./kubeconfig"
exec command, (error, stdout, stderror) ->
if stdout
res.send "```\n" + stdout + "\n```"
else
res.send "Sorry that didn't work"
if error
res.send (error.stack)
if stderror
res.send (stderror)
robot.respond /(\w+)\s*(\w+)\s*(.*)/i, (res) ->
requested_action = res.match[1]
requested_subaction = res.match[2]
requested_args = res.match[3]
maintainers = robot.brain.get(MAINTAINERS_KEY) || []
if maintainers.length is 0
console.log "Fetching maintainers..."
maintainers_url = "https://raw.githubusercontent.com/kubevirt/project-infra/main/OWNERS_ALIASES"
robot.http(maintainers_url)
.get() (err, getRes, body) ->
if err
console.log("Could not read ci-maintainers from #{maintainers_url}: #{err}")
return
data = YAML.parse body
maintainers = data.aliases['ci-maintainers']
console.log "Fetched maintainers: #{maintainers}"
robot.brain.set(MAINTAINERS_KEY, maintainers)
execute_action res, requested_action, requested_subaction, requested_args
else
execute_action res, requested_action, requested_subaction, requested_args
robot.router.get '/health', (req, res) ->
res.setHeader 'content-type', 'text/plain'
res.end 'OK'
|
[
{
"context": "TRRanking.m\n\n The MIT License\n\n Copyright (c) 2008 Lachie Cox\n\n Permission is hereby granted, free of charge, t",
"end": 558,
"score": 0.9998539090156555,
"start": 548,
"tag": "NAME",
"value": "Lachie Cox"
}
] | node_modules/coffee-trace/lib/quicksilver_score.coffee | jpietrok-pnnl/wiki | 5 | ###
** Modified Quicksilver so that that it doesn't extends the `String` object's prototype.
qs_score - Quicksilver Score
A port of the Quicksilver string ranking algorithm
score("hello world", "axl") //=> 0.0
score("hello world", "ow") //=> 0.6
score("hello world", "hello world") //=> 1.0
Tested in Firefox 2 and Safari 3
The Quicksilver code is available here
http://code.google.com/p/blacktree-alchemy/
http://blacktree-alchemy.googlecode.com/svn/trunk/Crucible/Code/NSString+BLTRRanking.m
The MIT License
Copyright (c) 2008 Lachie Cox
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
###
class ScoredString extends String
ScoredString::score = (abbreviation, offset) ->
offset = offset or 0 # TODO: I think this is unused... remove
return 0.9 if abbreviation.length is 0
return 0.0 if abbreviation.length > @.length
i = abbreviation.length
while i > 0
sub_abbreviation = abbreviation.substring(0, i)
index = @.indexOf(sub_abbreviation)
continue if index < 0
continue if index + abbreviation.length > @.length + offset
next_string = @.substring(index + sub_abbreviation.length)
next_abbreviation = null
if i >= abbreviation.length
next_abbreviation = ""
else
next_abbreviation = abbreviation.substring(i)
remaining_score = new ScoredString(next_string).score(next_abbreviation, offset + index)
if remaining_score > 0
score = @.length - next_string.length
unless index is 0
j = 0
c = @.charCodeAt(index - 1)
if c is 32 or c is 9
j = (index - 2)
while j >= 0
c = @.charCodeAt(j)
score -= ((if (c is 32 or c is 9) then 1 else 0.15))
j--
else
score -= index
score += remaining_score * next_string.length
score /= @.length
return score
i--
0.0
module.exports = ScoredString | 22326 | ###
** Modified Quicksilver so that that it doesn't extends the `String` object's prototype.
qs_score - Quicksilver Score
A port of the Quicksilver string ranking algorithm
score("hello world", "axl") //=> 0.0
score("hello world", "ow") //=> 0.6
score("hello world", "hello world") //=> 1.0
Tested in Firefox 2 and Safari 3
The Quicksilver code is available here
http://code.google.com/p/blacktree-alchemy/
http://blacktree-alchemy.googlecode.com/svn/trunk/Crucible/Code/NSString+BLTRRanking.m
The MIT License
Copyright (c) 2008 <NAME>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
###
class ScoredString extends String
ScoredString::score = (abbreviation, offset) ->
offset = offset or 0 # TODO: I think this is unused... remove
return 0.9 if abbreviation.length is 0
return 0.0 if abbreviation.length > @.length
i = abbreviation.length
while i > 0
sub_abbreviation = abbreviation.substring(0, i)
index = @.indexOf(sub_abbreviation)
continue if index < 0
continue if index + abbreviation.length > @.length + offset
next_string = @.substring(index + sub_abbreviation.length)
next_abbreviation = null
if i >= abbreviation.length
next_abbreviation = ""
else
next_abbreviation = abbreviation.substring(i)
remaining_score = new ScoredString(next_string).score(next_abbreviation, offset + index)
if remaining_score > 0
score = @.length - next_string.length
unless index is 0
j = 0
c = @.charCodeAt(index - 1)
if c is 32 or c is 9
j = (index - 2)
while j >= 0
c = @.charCodeAt(j)
score -= ((if (c is 32 or c is 9) then 1 else 0.15))
j--
else
score -= index
score += remaining_score * next_string.length
score /= @.length
return score
i--
0.0
module.exports = ScoredString | true | ###
** Modified Quicksilver so that that it doesn't extends the `String` object's prototype.
qs_score - Quicksilver Score
A port of the Quicksilver string ranking algorithm
score("hello world", "axl") //=> 0.0
score("hello world", "ow") //=> 0.6
score("hello world", "hello world") //=> 1.0
Tested in Firefox 2 and Safari 3
The Quicksilver code is available here
http://code.google.com/p/blacktree-alchemy/
http://blacktree-alchemy.googlecode.com/svn/trunk/Crucible/Code/NSString+BLTRRanking.m
The MIT License
Copyright (c) 2008 PI:NAME:<NAME>END_PI
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
###
class ScoredString extends String
ScoredString::score = (abbreviation, offset) ->
offset = offset or 0 # TODO: I think this is unused... remove
return 0.9 if abbreviation.length is 0
return 0.0 if abbreviation.length > @.length
i = abbreviation.length
while i > 0
sub_abbreviation = abbreviation.substring(0, i)
index = @.indexOf(sub_abbreviation)
continue if index < 0
continue if index + abbreviation.length > @.length + offset
next_string = @.substring(index + sub_abbreviation.length)
next_abbreviation = null
if i >= abbreviation.length
next_abbreviation = ""
else
next_abbreviation = abbreviation.substring(i)
remaining_score = new ScoredString(next_string).score(next_abbreviation, offset + index)
if remaining_score > 0
score = @.length - next_string.length
unless index is 0
j = 0
c = @.charCodeAt(index - 1)
if c is 32 or c is 9
j = (index - 2)
while j >= 0
c = @.charCodeAt(j)
score -= ((if (c is 32 or c is 9) then 1 else 0.15))
j--
else
score -= index
score += remaining_score * next_string.length
score /= @.length
return score
i--
0.0
module.exports = ScoredString |
[
{
"context": " data: {\n email: email,\n password: password, \n username: username,\n },\n dat",
"end": 6893,
"score": 0.9989948868751526,
"start": 6885,
"tag": "PASSWORD",
"value": "password"
},
{
"context": "l,\n password: password, \n ... | jotleaf/static/js/views_base.coffee | reverie/jotleaf.com | 1 | # views_base.coffee is for the 'base' models that other Backbone.Views derive
# from, and methods/data shared between views
makeUrl = (content, render) ->
assert URLs[content]
URLs[content]
makeMessage = (msgName) ->
assert msgs[msgName]
msgs[msgName]
URLs = {
registration_register: '/account/register/'
auth_login: '/account/login/'
settings: '/account/settings/'
quick_page: '/new'
home: '/home/'
pages: '/pages/'
explore: '#'
}
msgs = {
password_reset_success: "We have sent you an email with a link to reset your password. Please check your email and click the link to continue."
password_reset_confirm_success: "Your password has been reset! You may now log in."
registration_success: "Congratulations, you have successfully registered!"
registration_error: "Sorry, something went wrong with your request! Please try again."
logout_success: "Successfully logged out!"
logout_error: "Log out failed. Try reloading the page."
page_claim_yes_success: "You have claimed the page."
page_claim_no_success: "The page has been disowned into oblivion."
}
class JLView extends Backbone.View
# JLView has our custom view methods that both top-level-page views, and
# sub-component views, might both want to use
$findOne: =>
@$el.findOne(arguments...)
# todo: rename 'commonContext'
commonContext: =>
return {
STATIC_URL: JL_CONFIG.STATIC_URL
url: -> makeUrl # is there a better way?
isAuthenticated: JL.AuthState.isAuthenticated()
username: JL.AuthState.getUsername()
}
# Useful for Moustache templates
contextToArg: (method) =>
return (a...) ->
args = [@]
args = args.concat(a)
return method(args...)
_truncatedContent: (ctx, maxLen=40) =>
if ctx.content.length > maxLen
return ctx.content.slice(0, maxLen) + '...'
else
return ctx.content
_isYou: (ctx) =>
return ctx.creator_id and (ctx.creator_id == JL.AuthState.getUserId())
listenTo: (obj, name, callback) ->
@_listeners ||= {}
id = obj._listenerId || (obj._listenerId = _.uniqueId('l'))
@_listeners[id] = [obj, name, callback]
if obj instanceof jQuery
obj.on(name, callback)
else
if name instanceof Object
callback = @
obj.on(name, callback, @)
@
stopListening: (obj, name, callback) ->
if !@_listeners
return
if obj
if obj instanceof jQuery
obj.off(name, callback)
else
if name instanceof Object
callback = @
obj.off(name, callback, @)
if !(name || callback)
delete @_listeners[obj._listenerId]
else
if name instanceof Object
callback = @
for [obj, storedName, storedCallback] in _.values(@_listeners)
if obj instanceof jQuery
# limit scope of jquery off calls to events
# and callback we have bound
obj.off(storedName, storedCallback)
else
obj.off(name, callback, @)
@_listeners = {}
@
destroy: =>
log "destroying jlview", @
log "calling unbind", @
@unbind()
@remove()
unbind: =>
log "base unbind", @
class TopView extends JLView
# TopView is the base for top-level page views, ie views that fill the
# whole window, as opposed to sub-components.
documentTitle: 'Jotleaf'
wantsToHandle: (options) =>
# Used by internal navigation system.
# Does this (top-level) page view want to handle `options`, as opposed
# to a new full-page navigation?
log "can't handle it"
return false
handle: ->
throw NotImplemented
makeSubviewInContainer: (SubView, selector, options={}) =>
options.el = @$findOne(selector)
options.topView = @
view = new SubView(options)
@addSubView(view)
return view
addSubView: (subview) =>
@subviews ||= []
@subviews.push(subview)
makeMainWebsiteView: (tplName, context={}) =>
baseContext = @commonContext()
base = ich.tpl_main_website(baseContext)
fullContext = _.extend(baseContext, context)
content = ich[tplName](fullContext)
@content = base.findOne('.content')
@content.append(content)
@setElement(base)
@_messagesView = @makeSubviewInContainer(MessagesView, '.messages-container')
@makeSubviewInContainer(ClaimsView, '.claim-notifications-container')
if JL.queuedMessages.length
@_messagesView.showMessages(JL.queuedMessages)
JL.queuedMessages = []
_stringsToMessages:(msgStrings, type) =>
if not msgStrings.length
return []
messageObjects = []
for msg in msgStrings
messageObjects.push({
tags: type,
text: msg
})
return messageObjects
queueSuccessMessages: (msgs) =>
msgObjects = @_stringsToMessages(msgs, "success-message")
JL.queuedMessages = _.union(JL.queuedMessages, msgObjects)
showSuccessMessages: (msgs) =>
@_messagesView.showMessages(@_stringsToMessages(msgs, "success-message"))
queueErrorMessages: (msgs) =>
msgObjects = @_stringsToMessages(msgs, "error-message")
JL.queuedMessages = _.union(JL.queuedMessages, msgObjects)
showErrorMessages: (msgs) =>
@_messagesView.showMessages(@_stringsToMessages(msgs, "error-message"))
queueSuccessMessage: (msg) =>
@queueSuccessMessages([msg])
showSuccessMessage: (msg) =>
@showSuccessMessages([msg])
queueErrorMessage: (msg) =>
@queueErrorMessages([msg])
showErrorMessage: (msg) =>
@showErrorMessages([msg])
setFirstFocus: (selector) =>
@listenTo(@, 'dom-insert', =>
if document.activeElement.tagName == 'BODY'
@$findOne(selector).focus()
)
destroy: =>
if @subviews
while @subviews.length
subview = @subviews.pop()
subview.destroy()
super
class BaseRegistration extends TopView
initialize: =>
@render()
errorContainer = @$findOne('.error-container')
form = @$findOne('form.registration-form')
@errorsView = new ErrorsView(form, errorContainer)
@setFirstFocus('input.username')
# ywot transfer JS
url = "#{JL_CONFIG.STATIC_URL}js/ywot_registration.js"
$.getScript(url)
render: =>
# subclasses must implement this!
throw NotImplemented
events: {
'submit form.registration-form': '_register'
}
_register: (e) =>
log "Submit from registration form detected", e
e.preventDefault()
form = $(e.target)
form.find('input').attr('disabled', 'disabled')
username = form.findOne('input.username').val()
email = form.findOne('input.email').val()
password = form.findOne('input.password').val()
button = form.findOne('input[type=submit]')
origVal = button.val()
@errorsView.clearErrors()
button.val('Registering...')
registration = $.ajax( {
url: '/xhr/account/register/',
type: "POST",
data: {
email: email,
password: password,
username: username,
},
dataType: "json"
cache: false,
})
registration.done((response) =>
if response.registration_successful
# mixpanel.alias call must come before setting AuthState, which
# triggers trackMixpanelUser.
assert response.user.id
mixpanel.alias(response.user.id)
mixpanel.track("New user signup")
JL.AuthState.setUser(response.user)
@queueSuccessMessage(makeMessage('registration_success'))
router.navigate('/home/', {trigger: true})
else
log "registration errors", response.errors
@errorsView.showErrors(response.errors)
button.val(origVal)
form.find('input').attr('disabled', false)
)
registration.fail((err)=>
button.val(origVal)
form.find('input').attr('disabled', false)
@errorsView.requestFailed(makeMessage('registration_error'))
)
| 52177 | # views_base.coffee is for the 'base' models that other Backbone.Views derive
# from, and methods/data shared between views
makeUrl = (content, render) ->
assert URLs[content]
URLs[content]
makeMessage = (msgName) ->
assert msgs[msgName]
msgs[msgName]
URLs = {
registration_register: '/account/register/'
auth_login: '/account/login/'
settings: '/account/settings/'
quick_page: '/new'
home: '/home/'
pages: '/pages/'
explore: '#'
}
msgs = {
password_reset_success: "We have sent you an email with a link to reset your password. Please check your email and click the link to continue."
password_reset_confirm_success: "Your password has been reset! You may now log in."
registration_success: "Congratulations, you have successfully registered!"
registration_error: "Sorry, something went wrong with your request! Please try again."
logout_success: "Successfully logged out!"
logout_error: "Log out failed. Try reloading the page."
page_claim_yes_success: "You have claimed the page."
page_claim_no_success: "The page has been disowned into oblivion."
}
class JLView extends Backbone.View
# JLView has our custom view methods that both top-level-page views, and
# sub-component views, might both want to use
$findOne: =>
@$el.findOne(arguments...)
# todo: rename 'commonContext'
commonContext: =>
return {
STATIC_URL: JL_CONFIG.STATIC_URL
url: -> makeUrl # is there a better way?
isAuthenticated: JL.AuthState.isAuthenticated()
username: JL.AuthState.getUsername()
}
# Useful for Moustache templates
contextToArg: (method) =>
return (a...) ->
args = [@]
args = args.concat(a)
return method(args...)
_truncatedContent: (ctx, maxLen=40) =>
if ctx.content.length > maxLen
return ctx.content.slice(0, maxLen) + '...'
else
return ctx.content
_isYou: (ctx) =>
return ctx.creator_id and (ctx.creator_id == JL.AuthState.getUserId())
listenTo: (obj, name, callback) ->
@_listeners ||= {}
id = obj._listenerId || (obj._listenerId = _.uniqueId('l'))
@_listeners[id] = [obj, name, callback]
if obj instanceof jQuery
obj.on(name, callback)
else
if name instanceof Object
callback = @
obj.on(name, callback, @)
@
stopListening: (obj, name, callback) ->
if !@_listeners
return
if obj
if obj instanceof jQuery
obj.off(name, callback)
else
if name instanceof Object
callback = @
obj.off(name, callback, @)
if !(name || callback)
delete @_listeners[obj._listenerId]
else
if name instanceof Object
callback = @
for [obj, storedName, storedCallback] in _.values(@_listeners)
if obj instanceof jQuery
# limit scope of jquery off calls to events
# and callback we have bound
obj.off(storedName, storedCallback)
else
obj.off(name, callback, @)
@_listeners = {}
@
destroy: =>
log "destroying jlview", @
log "calling unbind", @
@unbind()
@remove()
unbind: =>
log "base unbind", @
class TopView extends JLView
# TopView is the base for top-level page views, ie views that fill the
# whole window, as opposed to sub-components.
documentTitle: 'Jotleaf'
wantsToHandle: (options) =>
# Used by internal navigation system.
# Does this (top-level) page view want to handle `options`, as opposed
# to a new full-page navigation?
log "can't handle it"
return false
handle: ->
throw NotImplemented
makeSubviewInContainer: (SubView, selector, options={}) =>
options.el = @$findOne(selector)
options.topView = @
view = new SubView(options)
@addSubView(view)
return view
addSubView: (subview) =>
@subviews ||= []
@subviews.push(subview)
makeMainWebsiteView: (tplName, context={}) =>
baseContext = @commonContext()
base = ich.tpl_main_website(baseContext)
fullContext = _.extend(baseContext, context)
content = ich[tplName](fullContext)
@content = base.findOne('.content')
@content.append(content)
@setElement(base)
@_messagesView = @makeSubviewInContainer(MessagesView, '.messages-container')
@makeSubviewInContainer(ClaimsView, '.claim-notifications-container')
if JL.queuedMessages.length
@_messagesView.showMessages(JL.queuedMessages)
JL.queuedMessages = []
_stringsToMessages:(msgStrings, type) =>
if not msgStrings.length
return []
messageObjects = []
for msg in msgStrings
messageObjects.push({
tags: type,
text: msg
})
return messageObjects
queueSuccessMessages: (msgs) =>
msgObjects = @_stringsToMessages(msgs, "success-message")
JL.queuedMessages = _.union(JL.queuedMessages, msgObjects)
showSuccessMessages: (msgs) =>
@_messagesView.showMessages(@_stringsToMessages(msgs, "success-message"))
queueErrorMessages: (msgs) =>
msgObjects = @_stringsToMessages(msgs, "error-message")
JL.queuedMessages = _.union(JL.queuedMessages, msgObjects)
showErrorMessages: (msgs) =>
@_messagesView.showMessages(@_stringsToMessages(msgs, "error-message"))
queueSuccessMessage: (msg) =>
@queueSuccessMessages([msg])
showSuccessMessage: (msg) =>
@showSuccessMessages([msg])
queueErrorMessage: (msg) =>
@queueErrorMessages([msg])
showErrorMessage: (msg) =>
@showErrorMessages([msg])
setFirstFocus: (selector) =>
@listenTo(@, 'dom-insert', =>
if document.activeElement.tagName == 'BODY'
@$findOne(selector).focus()
)
destroy: =>
if @subviews
while @subviews.length
subview = @subviews.pop()
subview.destroy()
super
class BaseRegistration extends TopView
initialize: =>
@render()
errorContainer = @$findOne('.error-container')
form = @$findOne('form.registration-form')
@errorsView = new ErrorsView(form, errorContainer)
@setFirstFocus('input.username')
# ywot transfer JS
url = "#{JL_CONFIG.STATIC_URL}js/ywot_registration.js"
$.getScript(url)
render: =>
# subclasses must implement this!
throw NotImplemented
events: {
'submit form.registration-form': '_register'
}
_register: (e) =>
log "Submit from registration form detected", e
e.preventDefault()
form = $(e.target)
form.find('input').attr('disabled', 'disabled')
username = form.findOne('input.username').val()
email = form.findOne('input.email').val()
password = form.findOne('input.password').val()
button = form.findOne('input[type=submit]')
origVal = button.val()
@errorsView.clearErrors()
button.val('Registering...')
registration = $.ajax( {
url: '/xhr/account/register/',
type: "POST",
data: {
email: email,
password: <PASSWORD>,
username: username,
},
dataType: "json"
cache: false,
})
registration.done((response) =>
if response.registration_successful
# mixpanel.alias call must come before setting AuthState, which
# triggers trackMixpanelUser.
assert response.user.id
mixpanel.alias(response.user.id)
mixpanel.track("New user signup")
JL.AuthState.setUser(response.user)
@queueSuccessMessage(makeMessage('registration_success'))
router.navigate('/home/', {trigger: true})
else
log "registration errors", response.errors
@errorsView.showErrors(response.errors)
button.val(origVal)
form.find('input').attr('disabled', false)
)
registration.fail((err)=>
button.val(origVal)
form.find('input').attr('disabled', false)
@errorsView.requestFailed(makeMessage('registration_error'))
)
| true | # views_base.coffee is for the 'base' models that other Backbone.Views derive
# from, and methods/data shared between views
makeUrl = (content, render) ->
assert URLs[content]
URLs[content]
makeMessage = (msgName) ->
assert msgs[msgName]
msgs[msgName]
URLs = {
registration_register: '/account/register/'
auth_login: '/account/login/'
settings: '/account/settings/'
quick_page: '/new'
home: '/home/'
pages: '/pages/'
explore: '#'
}
msgs = {
password_reset_success: "We have sent you an email with a link to reset your password. Please check your email and click the link to continue."
password_reset_confirm_success: "Your password has been reset! You may now log in."
registration_success: "Congratulations, you have successfully registered!"
registration_error: "Sorry, something went wrong with your request! Please try again."
logout_success: "Successfully logged out!"
logout_error: "Log out failed. Try reloading the page."
page_claim_yes_success: "You have claimed the page."
page_claim_no_success: "The page has been disowned into oblivion."
}
class JLView extends Backbone.View
# JLView has our custom view methods that both top-level-page views, and
# sub-component views, might both want to use
$findOne: =>
@$el.findOne(arguments...)
# todo: rename 'commonContext'
commonContext: =>
return {
STATIC_URL: JL_CONFIG.STATIC_URL
url: -> makeUrl # is there a better way?
isAuthenticated: JL.AuthState.isAuthenticated()
username: JL.AuthState.getUsername()
}
# Useful for Moustache templates
contextToArg: (method) =>
return (a...) ->
args = [@]
args = args.concat(a)
return method(args...)
_truncatedContent: (ctx, maxLen=40) =>
if ctx.content.length > maxLen
return ctx.content.slice(0, maxLen) + '...'
else
return ctx.content
_isYou: (ctx) =>
return ctx.creator_id and (ctx.creator_id == JL.AuthState.getUserId())
listenTo: (obj, name, callback) ->
@_listeners ||= {}
id = obj._listenerId || (obj._listenerId = _.uniqueId('l'))
@_listeners[id] = [obj, name, callback]
if obj instanceof jQuery
obj.on(name, callback)
else
if name instanceof Object
callback = @
obj.on(name, callback, @)
@
stopListening: (obj, name, callback) ->
if !@_listeners
return
if obj
if obj instanceof jQuery
obj.off(name, callback)
else
if name instanceof Object
callback = @
obj.off(name, callback, @)
if !(name || callback)
delete @_listeners[obj._listenerId]
else
if name instanceof Object
callback = @
for [obj, storedName, storedCallback] in _.values(@_listeners)
if obj instanceof jQuery
# limit scope of jquery off calls to events
# and callback we have bound
obj.off(storedName, storedCallback)
else
obj.off(name, callback, @)
@_listeners = {}
@
destroy: =>
log "destroying jlview", @
log "calling unbind", @
@unbind()
@remove()
unbind: =>
log "base unbind", @
class TopView extends JLView
# TopView is the base for top-level page views, ie views that fill the
# whole window, as opposed to sub-components.
documentTitle: 'Jotleaf'
wantsToHandle: (options) =>
# Used by internal navigation system.
# Does this (top-level) page view want to handle `options`, as opposed
# to a new full-page navigation?
log "can't handle it"
return false
handle: ->
throw NotImplemented
makeSubviewInContainer: (SubView, selector, options={}) =>
options.el = @$findOne(selector)
options.topView = @
view = new SubView(options)
@addSubView(view)
return view
addSubView: (subview) =>
@subviews ||= []
@subviews.push(subview)
makeMainWebsiteView: (tplName, context={}) =>
baseContext = @commonContext()
base = ich.tpl_main_website(baseContext)
fullContext = _.extend(baseContext, context)
content = ich[tplName](fullContext)
@content = base.findOne('.content')
@content.append(content)
@setElement(base)
@_messagesView = @makeSubviewInContainer(MessagesView, '.messages-container')
@makeSubviewInContainer(ClaimsView, '.claim-notifications-container')
if JL.queuedMessages.length
@_messagesView.showMessages(JL.queuedMessages)
JL.queuedMessages = []
_stringsToMessages:(msgStrings, type) =>
if not msgStrings.length
return []
messageObjects = []
for msg in msgStrings
messageObjects.push({
tags: type,
text: msg
})
return messageObjects
queueSuccessMessages: (msgs) =>
msgObjects = @_stringsToMessages(msgs, "success-message")
JL.queuedMessages = _.union(JL.queuedMessages, msgObjects)
showSuccessMessages: (msgs) =>
@_messagesView.showMessages(@_stringsToMessages(msgs, "success-message"))
queueErrorMessages: (msgs) =>
msgObjects = @_stringsToMessages(msgs, "error-message")
JL.queuedMessages = _.union(JL.queuedMessages, msgObjects)
showErrorMessages: (msgs) =>
@_messagesView.showMessages(@_stringsToMessages(msgs, "error-message"))
queueSuccessMessage: (msg) =>
@queueSuccessMessages([msg])
showSuccessMessage: (msg) =>
@showSuccessMessages([msg])
queueErrorMessage: (msg) =>
@queueErrorMessages([msg])
showErrorMessage: (msg) =>
@showErrorMessages([msg])
setFirstFocus: (selector) =>
@listenTo(@, 'dom-insert', =>
if document.activeElement.tagName == 'BODY'
@$findOne(selector).focus()
)
destroy: =>
if @subviews
while @subviews.length
subview = @subviews.pop()
subview.destroy()
super
class BaseRegistration extends TopView
initialize: =>
@render()
errorContainer = @$findOne('.error-container')
form = @$findOne('form.registration-form')
@errorsView = new ErrorsView(form, errorContainer)
@setFirstFocus('input.username')
# ywot transfer JS
url = "#{JL_CONFIG.STATIC_URL}js/ywot_registration.js"
$.getScript(url)
render: =>
# subclasses must implement this!
throw NotImplemented
events: {
'submit form.registration-form': '_register'
}
_register: (e) =>
log "Submit from registration form detected", e
e.preventDefault()
form = $(e.target)
form.find('input').attr('disabled', 'disabled')
username = form.findOne('input.username').val()
email = form.findOne('input.email').val()
password = form.findOne('input.password').val()
button = form.findOne('input[type=submit]')
origVal = button.val()
@errorsView.clearErrors()
button.val('Registering...')
registration = $.ajax( {
url: '/xhr/account/register/',
type: "POST",
data: {
email: email,
password: PI:PASSWORD:<PASSWORD>END_PI,
username: username,
},
dataType: "json"
cache: false,
})
registration.done((response) =>
if response.registration_successful
# mixpanel.alias call must come before setting AuthState, which
# triggers trackMixpanelUser.
assert response.user.id
mixpanel.alias(response.user.id)
mixpanel.track("New user signup")
JL.AuthState.setUser(response.user)
@queueSuccessMessage(makeMessage('registration_success'))
router.navigate('/home/', {trigger: true})
else
log "registration errors", response.errors
@errorsView.showErrors(response.errors)
button.val(origVal)
form.find('input').attr('disabled', false)
)
registration.fail((err)=>
button.val(origVal)
form.find('input').attr('disabled', false)
@errorsView.requestFailed(makeMessage('registration_error'))
)
|
[
{
"context": "ect)', ->\n fn = -> 'foo'\n fn.key = 'value'\n expect(up.util.isOptions(fn)).toBe(false",
"end": 42751,
"score": 0.6574239730834961,
"start": 42746,
"tag": "KEY",
"value": "value"
},
{
"context": "ect)', ->\n fn = -> 'foo'\n fn.key = '... | spec_app/spec/javascripts/up/util_spec.js.coffee | ktec/unpoly | 0 | describe 'up.util', ->
u = up.util
describe 'JavaScript functions', ->
# describe 'up.util.flatMap', ->
#
# it 'collects the Array results of the given map function, then concatenates the result arrays into one flat array', ->
# fun = (x) -> [x, x]
# result = up.util.flatMap([1, 2, 3], fun)
# expect(result).toEqual([1, 1, 2, 2, 3, 3])
describe 'up.util.uniq', ->
it 'returns the given array with duplicates elements removed', ->
input = [1, 2, 1, 1, 3]
result = up.util.uniq(input)
expect(result).toEqual [1, 2, 3]
it 'works on DOM elements', ->
one = document.createElement("div")
two = document.createElement("div")
input = [one, one, two, two]
result = up.util.uniq(input)
expect(result).toEqual [one, two]
it 'preserves insertion order', ->
input = [1, 2, 1]
result = up.util.uniq(input)
expect(result).toEqual [1, 2]
describe 'up.util.uniqBy', ->
it 'returns the given array with duplicate elements removed, calling the given function to determine value for uniqueness', ->
input = ["foo", "bar", "apple", 'orange', 'banana']
result = up.util.uniqBy(input, (element) -> element.length)
expect(result).toEqual ['foo', 'apple', 'orange']
it 'accepts a property name instead of a function, which collects that property from each item to compute uniquness', ->
input = ["foo", "bar", "apple", 'orange', 'banana']
result = up.util.uniqBy(input, 'length')
expect(result).toEqual ['foo', 'apple', 'orange']
# describe 'up.util.parsePath', ->
#
# it 'parses a plain name', ->
# path = up.util.parsePath("foo")
# expect(path).toEqual ['foo']
#
# it 'considers underscores to be part of a name', ->
# path = up.util.parsePath("foo_bar")
# expect(path).toEqual ['foo_bar']
#
# it 'considers dashes to be part of a name', ->
# path = up.util.parsePath("foo-bar")
# expect(path).toEqual ['foo-bar']
#
# it 'parses dot-separated names into multiple path segments', ->
# path = up.util.parsePath('foo.bar.baz')
# expect(path).toEqual ['foo', 'bar', 'baz']
#
# it 'parses nested params notation with square brackets', ->
# path = up.util.parsePath('user[account][email]')
# expect(path).toEqual ['user', 'account', 'email']
#
# it 'parses double quotes in square brackets', ->
# path = up.util.parsePath('user["account"]["email"]')
# expect(path).toEqual ['user', 'account', 'email']
#
# it 'parses single quotes in square brackets', ->
# path = up.util.parsePath("user['account']['email']")
# expect(path).toEqual ['user', 'account', 'email']
#
# it 'allows square brackets inside quotes', ->
# path = up.util.parsePath("element['a[up-href]']")
# expect(path).toEqual ['element', 'a[up-href]']
#
# it 'allows single quotes inside double quotes', ->
# path = up.util.parsePath("element[\"a[up-href='foo']\"]")
# expect(path).toEqual ['element', "a[up-href='foo']"]
#
# it 'allows double quotes inside single quotes', ->
# path = up.util.parsePath("element['a[up-href=\"foo\"]']")
# expect(path).toEqual ['element', 'a[up-href="foo"]']
#
# it 'allows dots in square brackets when it is quoted', ->
# path = up.util.parsePath('elements[".foo"]')
# expect(path).toEqual ['elements', '.foo']
#
# it 'allows different notation for each segment', ->
# path = up.util.parsePath('foo.bar[baz]["bam"][\'qux\']')
# expect(path).toEqual ['foo', 'bar', 'baz', 'bam', 'qux']
describe 'up.util.map', ->
it 'creates a new array of values by calling the given function on each item of the given array', ->
array = ["apple", "orange", "cucumber"]
mapped = up.util.map(array, (element) -> element.length)
expect(mapped).toEqual [5, 6, 8]
it 'accepts a property name instead of a function, which collects that property from each item', ->
array = ["apple", "orange", "cucumber"]
mapped = up.util.map(array, 'length')
expect(mapped).toEqual [5, 6, 8]
it 'passes the iteration index as second argument to the given function', ->
array = ["apple", "orange", "cucumber"]
mapped = up.util.map(array, (element, i) -> i)
expect(mapped).toEqual [0, 1, 2]
describe 'up.util.each', ->
it 'calls the given function once for each itm of the given array', ->
args = []
array = ["apple", "orange", "cucumber"]
up.util.each array, (item) -> args.push(item)
expect(args).toEqual ["apple", "orange", "cucumber"]
it 'passes the iteration index as second argument to the given function', ->
args = []
array = ["apple", "orange", "cucumber"]
up.util.each array, (item, index) -> args.push(index)
expect(args).toEqual [0, 1, 2]
describe 'up.util.select', ->
it 'returns an array of those elements in the given array for which the given function returns true', ->
array = ["foo", "orange", "cucumber"]
results = up.util.select array, (item) -> item.length > 3
expect(results).toEqual ['orange', 'cucumber']
it 'passes the iteration index as second argument to the given function', ->
array = ["apple", "orange", "cucumber", "banana"]
results = up.util.select array, (item, index) -> index % 2 == 0
expect(results).toEqual ['apple', 'cucumber']
it 'accepts a property name instead of a function, which checks that property from each item', ->
array = [ { name: 'a', prop: false }, { name: 'b', prop: true } ]
results = up.util.select array, 'prop'
expect(results).toEqual [{ name: 'b', prop: true }]
describe 'up.util.reject', ->
it 'returns an array of those elements in the given array for which the given function returns false', ->
array = ["foo", "orange", "cucumber"]
results = up.util.reject array, (item) -> item.length < 4
expect(results).toEqual ['orange', 'cucumber']
it 'passes the iteration index as second argument to the given function', ->
array = ["apple", "orange", "cucumber", "banana"]
results = up.util.reject array, (item, index) -> index % 2 == 0
expect(results).toEqual ['orange', 'banana']
it 'accepts a property name instead of a function, which checks that property from each item', ->
array = [ { name: 'a', prop: false }, { name: 'b', prop: true } ]
results = up.util.reject array, 'prop'
expect(results).toEqual [{ name: 'a', prop: false }]
describe 'up.util.previewable', ->
it 'wraps a function into a proxy function with an additional .promise attribute', ->
fun = -> 'return value'
proxy = up.util.previewable(fun)
expect(u.isFunction(proxy)).toBe(true)
expect(u.isPromise(proxy.promise)).toBe(true)
expect(proxy()).toEqual('return value')
it "resolves the proxy's .promise when the inner function returns", (done) ->
fun = -> 'return value'
proxy = up.util.previewable(fun)
callback = jasmine.createSpy('promise callback')
proxy.promise.then(callback)
u.nextFrame ->
expect(callback).not.toHaveBeenCalled()
proxy()
u.nextFrame ->
expect(callback).toHaveBeenCalledWith('return value')
done()
it "delays resolution of the proxy's .promise if the inner function returns a promise", (done) ->
funDeferred = u.newDeferred()
fun = -> funDeferred
proxy = up.util.previewable(fun)
callback = jasmine.createSpy('promise callback')
proxy.promise.then(callback)
proxy()
u.nextFrame ->
expect(callback).not.toHaveBeenCalled()
funDeferred.resolve('return value')
u.nextFrame ->
expect(callback).toHaveBeenCalledWith('return value')
done()
describe 'up.util.kebabCase', ->
it 'converts a string of multiple words from camel-case to kebap-case', ->
result = up.util.kebabCase('fooBarBaz')
expect(result).toEqual('foo-bar-baz')
it 'does not change a single word', ->
result = up.util.kebabCase('foo')
expect(result).toEqual('foo')
it 'downcases the first word when it starts with a capital letter', ->
result = up.util.kebabCase('FooBar')
expect(result).toEqual('foo-bar')
it 'does not change a string that is already in kebab-case', ->
result = up.util.kebabCase('foo-bar-baz')
expect(result).toEqual('foo-bar-baz')
describe 'up.util.camelCase', ->
it 'converts a string of multiple words from kebap-case to camel-case', ->
result = up.util.camelCase('foo-bar-baz')
expect(result).toEqual('fooBarBaz')
it 'does not change a single word', ->
result = up.util.camelCase('foo')
expect(result).toEqual('foo')
it 'downcases the first word when it starts with a capital letter', ->
result = up.util.camelCase('Foo-Bar')
expect(result).toEqual('fooBar')
it 'does not change a string that is already in camel-case', ->
result = up.util.camelCase('fooBarBaz')
expect(result).toEqual('fooBarBaz')
describe 'up.util.kebabCaseKeys', ->
it "converts the given object's keys from camel-case to kebab-case", ->
input =
fooBar: 'one'
barBaz: 'two'
result = up.util.kebabCaseKeys(input)
expect(result).toEqual
'foo-bar': 'one'
'bar-baz': 'two'
it "does not change an object whose keys are already kebab-case", ->
input =
'foo-bar': 'one'
'bar-baz': 'two'
result = up.util.kebabCaseKeys(input)
expect(result).toEqual
'foo-bar': 'one'
'bar-baz': 'two'
describe 'up.util.camelCaseKeys', ->
it "converts the given object's keys from kebab-case to camel-case", ->
input =
'foo-bar': 'one'
'bar-baz': 'two'
result = up.util.camelCaseKeys(input)
expect(result).toEqual
fooBar: 'one'
barBaz: 'two'
it "does not change an object whose keys are already camel-case", ->
input =
fooBar: 'one'
barBaz: 'two'
result = up.util.camelCaseKeys(input)
expect(result).toEqual
fooBar: 'one'
barBaz: 'two'
# describe 'up.util.lowerCaseKeys', ->
#
# it "returns a copy of the given object will all keys in lower case", ->
# input =
# 'A-B': 'C-D'
# 'E-F': 'G-H'
# result = up.util.lowerCaseKeys(input)
# expect(result).toEqual
# 'a-b': 'C-D'
# 'e-f': 'G-H'
describe 'up.util.DivertibleChain', ->
it "instantiates a task queue whose (2..n)th tasks can be changed by calling '.asap'", (done) ->
chain = new up.util.DivertibleChain()
timer1Spy = jasmine.createSpy('timer1 has been called')
timer1 = ->
timer1Spy()
u.promiseTimer(50)
timer2Spy = jasmine.createSpy('timer2 has been called')
timer2 = ->
timer2Spy()
u.promiseTimer(50)
timer3Spy = jasmine.createSpy('timer3 has been called')
timer3 = ->
timer3Spy()
u.promiseTimer(50)
timer4Spy = jasmine.createSpy('timer4 has been called')
timer4 = ->
timer4Spy()
u.promiseTimer(50)
chain.asap(timer1)
u.nextFrame ->
expect(timer1Spy).toHaveBeenCalled()
chain.asap(timer2)
u.nextFrame ->
# timer2 is still waiting for timer1 to finish
expect(timer2Spy).not.toHaveBeenCalled()
# Override the (2..n)th tasks. This unschedules timer2.
chain.asap(timer3, timer4)
u.setTimer 80, ->
expect(timer2Spy).not.toHaveBeenCalled()
expect(timer3Spy).toHaveBeenCalled()
u.setTimer 70, ->
expect(timer4Spy).toHaveBeenCalled()
done()
describe 'up.util.sequence', ->
it 'combines the given functions into a single function', ->
values = []
one = -> values.push('one')
two = -> values.push('two')
three = -> values.push('three')
sequence = up.util.sequence(one, two, three)
expect(values).toEqual([])
sequence()
expect(values).toEqual(['one', 'two', 'three'])
describe 'up.util.createElementFromHtml', ->
it 'parses a string that contains a serialized HTML document', ->
string = """
<html lang="foo">
<head>
<title>document title</title>
</head>
<body data-env='production'>
<div>line 1</div>
<div>line 2</div>
</body>
</html>
"""
element = up.util.createElementFromHtml(string)
expect(element.querySelector('head title').textContent).toEqual('document title')
expect(element.querySelector('body').getAttribute('data-env')).toEqual('production')
expect(element.querySelectorAll('body div').length).toBe(2)
expect(element.querySelectorAll('body div')[0].textContent).toEqual('line 1')
expect(element.querySelectorAll('body div')[1].textContent).toEqual('line 2')
it 'parses a string that contains carriage returns (bugfix)', ->
string = """
<html>\r
<body>\r
<div>line</div>\r
</body>\r
</html>\r
"""
$element = up.util.createElementFromHtml(string)
expect($element.querySelector('body')).toBeGiven()
expect($element.querySelector('body div').textContent).toEqual('line')
it 'does not run forever if a page has a <head> without a <title> (bugfix)', ->
html = """
<!doctype html>
<html>
<head>
<meta charset="utf-8" />
<meta name="format-detection" content="telephone=no">
<link href='/images/favicon.png' rel='shortcut icon' type='image/png'>
<meta name='viewport' content='width=device-width, initial-scale=1, maximum-scale=1, minimum-scale=1'>
<base href="/examples/update-fragment/" />
<link href='http://fonts.googleapis.com/css?family=Orbitron:400|Ubuntu+Mono:400,700|Source+Sans+Pro:300,400,700,400italic,700italic' rel='stylesheet' type='text/css'>
<link href="//netdna.bootstrapcdn.com/font-awesome/4.1.0/css/font-awesome.min.css" rel="stylesheet">
<link href="/stylesheets/example/all.css" rel="stylesheet" />
<script src="/javascripts/example.js"></script>
</head>
<body>
<div class="page">
<div class="story">
<h1>Full story</h1>
<p>Lorem ipsum dolor sit amet.</p>
<a href="preview.html" up-target=".story">
Read summary
</a>
</div>
</div>
</body>
</html>
"""
element = up.util.createElementFromHtml(html)
expect(element.querySelector("title")).toBeMissing()
expect(element.querySelector("h1").textContent).toEqual('Full story')
it 'can parse HTML without a <head>', ->
html = """
<html>
<body>
<h1>Full story</h1>
</body>
</html>
"""
element = up.util.createElementFromHtml(html)
expect(element.querySelector("title")).toBeMissing()
expect(element.querySelector("h1").textContent).toEqual('Full story')
it 'can parse a HTML fragment without a <body>', ->
html = """
<h1>Full story</h1>
"""
element = up.util.createElementFromHtml(html)
expect(element.querySelector("title")).toBeMissing()
expect(element.querySelector("h1").textContent).toEqual('Full story')
describe 'up.util.isFixed', ->
it 'returns true if the given element or one of its ancestors has a "fixed" CSS position', ->
$grandGrandParent = affix('.grand-parent')
$grandParent = $grandGrandParent.affix('.grand-parent')
$parent = $grandParent.affix('.parent')
$child = $parent.affix('.child')
$grandParent.css(position: 'fixed')
expect(up.util.isFixed($child)).toBe(true)
expect(up.util.isFixed($parent)).toBe(true)
expect(up.util.isFixed($grandParent)).toBe(true)
expect(up.util.isFixed($grandGrandParent)).toBe(false)
it 'returns false if the given element and its ancestors all have a non-"fixed" CSS position', ->
$element = affix('.element')
expect(up.util.isFixed($element)).toBe(false)
describe 'up.util.setTimer', ->
it 'calls the given function after waiting the given milliseconds', (done) ->
callback = jasmine.createSpy()
expectNotCalled = -> expect(callback).not.toHaveBeenCalled()
expectCalled = -> expect(callback).toHaveBeenCalled()
up.util.setTimer(100, callback)
expectNotCalled()
setTimeout(expectNotCalled, 50)
setTimeout(expectCalled, 50 + 75)
setTimeout(done, 50 + 75)
describe 'if the delay is zero', ->
it 'calls the given function in the next execution frame', ->
callback = jasmine.createSpy()
up.util.setTimer(0, callback)
expect(callback).not.toHaveBeenCalled()
setTimeout((-> expect(callback).toHaveBeenCalled()), 0)
# describe 'up.util.argNames', ->
#
# it 'returns an array of argument names for the given function', ->
# fun = ($element, data) ->
# expect(up.util.argNames(fun)).toEqual(['$element', 'data'])
describe 'up.util.trim', ->
it 'removes leading and trailing whitespace from the given string', ->
string = "\t\n\r abc \r\n\t"
expect(up.util.trim(string)).toEqual('abc')
describe 'up.util.only', ->
it 'returns a copy of the given object with only the given whitelisted properties', ->
original =
foo: 'foo-value'
bar: 'bar-value'
baz: 'baz-value'
bam: 'bam-value'
whitelisted = up.util.only(original, 'bar', 'bam')
expect(whitelisted).toEqual
bar: 'bar-value'
bam: 'bam-value'
# Show that original did not change
expect(original).toEqual
foo: 'foo-value'
bar: 'bar-value'
baz: 'baz-value'
bam: 'bam-value'
it 'does not add empty keys to the returned object if the given object does not have that key', ->
original =
foo: 'foo-value'
whitelisted = up.util.only(original, 'foo', 'bar')
expect(whitelisted).toHaveOwnProperty('foo')
expect(whitelisted).not.toHaveOwnProperty('bar')
describe 'up.util.readInlineStyle', ->
describe 'with a string as second argument', ->
it 'returns a CSS value string from an inline [style] attribute', ->
$div = affix('div').attr('style', 'background-color: #ff0000')
style = up.util.readInlineStyle($div, 'backgroundColor')
# Browsers convert colors to rgb() values, even IE11
expect(style).toEqual('rgb(255, 0, 0)')
it 'returns a blank value if the element does not have the given property in the [style] attribute', ->
$div = affix('div').attr('style', 'background-color: red')
style = up.util.readInlineStyle($div, 'color')
expect(style).toBeBlank()
it 'returns a blank value the given property is a computed property, but not in the [style] attribute', ->
$div = affix('div[class="red-background"]')
inlineStyle = up.util.readInlineStyle($div, 'backgroundColor')
computedStyle = up.util.readComputedStyle($div, 'backgroundColor')
expect(computedStyle).toEqual('rgb(255, 0, 0)')
expect(inlineStyle).toBeBlank()
describe 'with an array as second argument', ->
it 'returns an object with the given inline [style] properties', ->
$div = affix('div').attr('style', 'background-color: #ff0000; color: #0000ff')
style = up.util.readInlineStyle($div, ['backgroundColor', 'color'])
expect(style).toEqual
backgroundColor: 'rgb(255, 0, 0)'
color: 'rgb(0, 0, 255)'
it 'returns blank keys if the element does not have the given property in the [style] attribute', ->
$div = affix('div').attr('style', 'background-color: #ff0000')
style = up.util.readInlineStyle($div, ['backgroundColor', 'color'])
expect(style).toHaveOwnProperty('color')
expect(style.color).toBeBlank()
it 'returns a blank value the given property is a computed property, but not in the [style] attribute', ->
$div = affix('div[class="red-background"]')
inlineStyleHash = up.util.readInlineStyle($div, ['backgroundColor'])
computedBackground = up.util.readComputedStyle($div, 'backgroundColor')
expect(computedBackground).toEqual('rgb(255, 0, 0)')
expect(inlineStyleHash).toHaveOwnProperty('backgroundColor')
expect(inlineStyleHash.backgroundColor).toBeBlank()
describe 'up.util.writeInlineStyle', ->
it "sets the given style properties as the given element's [style] attribute", ->
$div = affix('div')
up.util.writeInlineStyle($div, { color: 'red', backgroundColor: 'blue' })
style = $div.attr('style')
expect(style).toContain('color: red')
expect(style).toContain('background-color: blue')
it "merges the given style properties into the given element's existing [style] value", ->
$div = affix('div[style="color: red"]')
up.util.writeInlineStyle($div, { backgroundColor: 'blue' })
style = $div.attr('style')
expect(style).toContain('color: red')
expect(style).toContain('background-color: blue')
it "converts the values of known length properties to px values automatically", ->
$div = affix('div')
up.util.writeInlineStyle($div, { paddingTop: 100 })
style = $div.attr('style')
expect(style).toContain('padding-top: 100px')
describe 'up.util.writeTemporaryStyle', ->
it "sets the given inline styles and returns a function that will restore the previous inline styles", ->
$div = affix('div[style="color: red"]')
restore = up.util.writeTemporaryStyle($div, { color: 'blue' })
expect($div.attr('style')).toContain('color: blue')
expect($div.attr('style')).not.toContain('color: red')
restore()
expect($div.attr('style')).not.toContain('color: blue')
expect($div.attr('style')).toContain('color: red')
it "does not restore inherited styles", ->
$div = affix('div[class="red-background"]')
restore = up.util.writeTemporaryStyle($div, { backgroundColor: 'blue' })
expect($div.attr('style')).toContain('background-color: blue')
restore()
expect($div.attr('style')).not.toContain('background-color')
describe 'up.util.except', ->
it 'returns a copy of the given object but omits the given blacklisted properties', ->
original =
foo: 'foo-value'
bar: 'bar-value'
baz: 'baz-value'
bam: 'bam-value'
whitelisted = up.util.except(original, 'foo', 'baz')
expect(whitelisted).toEqual
bar: 'bar-value'
bam: 'bam-value'
# Show that original did not change
expect(original).toEqual
foo: 'foo-value'
bar: 'bar-value'
baz: 'baz-value'
bam: 'bam-value'
describe 'up.util.selectorForElement', ->
it "prefers using the element's 'up-id' attribute to using the element's ID", ->
$element = affix('div[up-id=up-id-value]#id-value')
expect(up.util.selectorForElement($element)).toBe('[up-id="up-id-value"]')
it "prefers using the element's ID to using the element's name", ->
$element = affix('div#id-value[name=name-value]')
expect(up.util.selectorForElement($element)).toBe("#id-value")
it "selects the ID with an attribute selector if the ID contains a slash", ->
$element = affix('div').attr(id: 'foo/bar')
expect(up.util.selectorForElement($element)).toBe('[id="foo/bar"]')
it "selects the ID with an attribute selector if the ID contains a space", ->
$element = affix('div').attr(id: 'foo bar')
expect(up.util.selectorForElement($element)).toBe('[id="foo bar"]')
it "selects the ID with an attribute selector if the ID contains a dot", ->
$element = affix('div').attr(id: 'foo.bar')
expect(up.util.selectorForElement($element)).toBe('[id="foo.bar"]')
it "selects the ID with an attribute selector if the ID contains a quote", ->
$element = affix('div').attr(id: 'foo"bar')
expect(up.util.selectorForElement($element)).toBe('[id="foo\\"bar"]')
it "prefers using the element's tagName + [name] to using the element's classes", ->
$element = affix('input[name=name-value].class1.class2')
expect(up.util.selectorForElement($element)).toBe('input[name="name-value"]')
it "prefers using the element's classes to using the element's ARIA label", ->
$element = affix('div.class1.class2[aria-label="ARIA label value"]')
expect(up.util.selectorForElement($element)).toBe(".class1.class2")
it 'does not use Unpoly classes to compose a class selector', ->
$element = affix('div.class1.up-current.class2')
expect(up.util.selectorForElement($element)).toBe(".class1.class2")
it "prefers using the element's ARIA label to using the element's tag name", ->
$element = affix('div[aria-label="ARIA label value"]')
expect(up.util.selectorForElement($element)).toBe('[aria-label="ARIA label value"]')
it "uses the element's tag name if no better description is available", ->
$element = affix('div')
expect(up.util.selectorForElement($element)).toBe("div")
it 'escapes quotes in attribute selector values', ->
$element = affix('div')
$element.attr('aria-label', 'foo"bar')
expect(up.util.selectorForElement($element)).toBe('[aria-label="foo\\"bar"]')
describe 'up.util.addTemporaryClass', ->
it 'adds the given class to the given element', ->
$element = affix('.foo.bar')
element = $element.get(0)
expect(element.className).toEqual('foo bar')
up.util.addTemporaryClass(element, 'baz')
expect(element.className).toEqual('foo bar baz')
it 'returns a function that restores the original class', ->
$element = affix('.foo.bar')
element = $element.get(0)
restoreClass = up.util.addTemporaryClass(element, 'baz')
expect(element.className).toEqual('foo bar baz')
restoreClass()
expect(element.className).toEqual('foo bar')
describe 'up.util.castedAttr', ->
it 'returns true if the attribute value is the string "true"', ->
$element = affix('div').attr('foo', 'true')
expect(up.util.castedAttr($element, 'foo')).toBe(true)
it 'returns true if the attribute value is the name of the attribute', ->
$element = affix('div').attr('foo', 'foo')
expect(up.util.castedAttr($element, 'foo')).toBe(true)
it 'returns false if the attribute value is the string "false"', ->
$element = affix('div').attr('foo', 'false')
expect(up.util.castedAttr($element, 'foo')).toBe(false)
it 'returns undefined if the element has no such attribute', ->
$element = affix('div')
expect(up.util.castedAttr($element, 'foo')).toBe(undefined)
it 'returns the attribute value unchanged if the value is some string', ->
$element = affix('div').attr('foo', 'some text')
expect(up.util.castedAttr($element, 'foo')).toBe('some text')
describe 'up.util.any', ->
it 'returns true if an element in the array returns true for the given function', ->
result = up.util.any [null, undefined, 'foo', ''], up.util.isPresent
expect(result).toBe(true)
it 'returns false if no element in the array returns true for the given function', ->
result = up.util.any [null, undefined, ''], up.util.isPresent
expect(result).toBe(false)
it 'short-circuits once an element returns true', ->
count = 0
up.util.any [null, undefined, 'foo', ''], (element) ->
count += 1
up.util.isPresent(element)
expect(count).toBe(3)
describe 'up.util.all', ->
it 'returns true if all element in the array returns true for the given function', ->
result = up.util.all ['foo', 'bar', 'baz'], up.util.isPresent
expect(result).toBe(true)
it 'returns false if an element in the array returns false for the given function', ->
result = up.util.all ['foo', 'bar', null, 'baz'], up.util.isPresent
expect(result).toBe(false)
it 'short-circuits once an element returns false', ->
count = 0
up.util.all ['foo', 'bar', '', 'baz'], (element) ->
count += 1
up.util.isPresent(element)
expect(count).toBe(3)
it 'passes the iteration index as second argument to the given function', ->
array = ["apple", "orange", "cucumber"]
args = []
up.util.all array, (item, index) ->
args.push(index)
true
expect(args).toEqual [0, 1, 2]
it 'accepts a property name instead of a function, which collects that property from each item', ->
allTrue = [ { prop: true }, { prop: true } ]
someFalse = [ { prop: true }, { prop: false } ]
expect(up.util.all(allTrue, 'prop')).toBe(true)
expect(up.util.all(someFalse, 'prop')).toBe(false)
# describe 'up.util.none', ->
#
# it 'returns true if no element in the array returns true for the given function', ->
# result = up.util.none ['foo', 'bar', 'baz'], up.util.isBlank
# expect(result).toBe(true)
#
# it 'returns false if an element in the array returns false for the given function', ->
# result = up.util.none ['foo', 'bar', null, 'baz'], up.util.isBlank
# expect(result).toBe(false)
#
# it 'short-circuits once an element returns true', ->
# count = 0
# up.util.none ['foo', 'bar', '', 'baz'], (element) ->
# count += 1
# up.util.isBlank(element)
# expect(count).toBe(3)
#
# it 'passes the iteration index as second argument to the given function', ->
# array = ["apple", "orange", "cucumber"]
# args = []
# up.util.none array, (item, index) ->
# args.push(index)
# false
# expect(args).toEqual [0, 1, 2]
#
# it 'accepts a property name instead of a function, which collects that property from each item', ->
# allFalse = [ { prop: false }, { prop: false } ]
# someTrue = [ { prop: true }, { prop: false } ]
# expect(up.util.none(allFalse, 'prop')).toBe(true)
# expect(up.util.none(someTrue, 'prop')).toBe(false)
describe 'up.util.any', ->
it 'returns true if at least one element in the array returns true for the given function', ->
result = up.util.any ['', 'bar', null], up.util.isPresent
expect(result).toBe(true)
it 'returns false if no element in the array returns true for the given function', ->
result = up.util.any ['', null, undefined], up.util.isPresent
expect(result).toBe(false)
it 'passes the iteration index as second argument to the given function', ->
array = ["apple", "orange", "cucumber"]
args = []
up.util.any array, (item, index) ->
args.push(index)
false
expect(args).toEqual [0, 1, 2]
it 'accepts a property name instead of a function, which collects that property from each item', ->
someTrue = [ { prop: true }, { prop: false } ]
allFalse = [ { prop: false }, { prop: false } ]
expect(up.util.any(someTrue, 'prop')).toBe(true)
expect(up.util.any(allFalse, 'prop')).toBe(false)
describe 'up.util.isBlank', ->
it 'returns false for false', ->
expect(up.util.isBlank(false)).toBe(false)
it 'returns false for true', ->
expect(up.util.isBlank(true)).toBe(false)
it 'returns true for null', ->
expect(up.util.isBlank(null)).toBe(true)
it 'returns true for undefined', ->
expect(up.util.isBlank(undefined)).toBe(true)
it 'returns true for an empty String', ->
expect(up.util.isBlank('')).toBe(true)
it 'returns false for a String with at least one character', ->
expect(up.util.isBlank('string')).toBe(false)
it 'returns true for an empty array', ->
expect(up.util.isBlank([])).toBe(true)
it 'returns false for an array with at least one element', ->
expect(up.util.isBlank(['element'])).toBe(false)
it 'returns true for an empty jQuery collection', ->
expect(up.util.isBlank($([]))).toBe(true)
it 'returns false for a jQuery collection with at least one element', ->
expect(up.util.isBlank($('body'))).toBe(false)
it 'returns true for an empty object', ->
expect(up.util.isBlank({})).toBe(true)
it 'returns false for a function', ->
expect(up.util.isBlank((->))).toBe(false)
it 'returns true for an object with at least one key', ->
expect(up.util.isBlank({key: 'value'})).toBe(false)
describe 'up.util.normalizeUrl', ->
it 'normalizes a relative path', ->
expect(up.util.normalizeUrl('foo')).toBe("http://#{location.hostname}:#{location.port}/foo")
it 'normalizes an absolute path', ->
expect(up.util.normalizeUrl('/foo')).toBe("http://#{location.hostname}:#{location.port}/foo")
it 'normalizes a full URL', ->
expect(up.util.normalizeUrl('http://example.com/foo/bar')).toBe('http://example.com/foo/bar')
it 'preserves a query string', ->
expect(up.util.normalizeUrl('http://example.com/foo/bar?key=value')).toBe('http://example.com/foo/bar?key=value')
it 'strips a query string with { search: false } option', ->
expect(up.util.normalizeUrl('http://example.com/foo/bar?key=value', search: false)).toBe('http://example.com/foo/bar')
it 'does not strip a trailing slash by default', ->
expect(up.util.normalizeUrl('/foo/')).toEqual("http://#{location.hostname}:#{location.port}/foo/")
it 'normalizes redundant segments', ->
expect(up.util.normalizeUrl('/foo/../foo')).toBe("http://#{location.hostname}:#{location.port}/foo")
it 'strips a #hash by default', ->
expect(up.util.normalizeUrl('http://example.com/foo/bar#fragment')).toBe('http://example.com/foo/bar')
it 'preserves a #hash with { hash: true } option', ->
expect(up.util.normalizeUrl('http://example.com/foo/bar#fragment', hash: true)).toBe('http://example.com/foo/bar#fragment')
it 'puts a #hash behind the query string', ->
expect(up.util.normalizeUrl('http://example.com/foo/bar?key=value#fragment', hash: true)).toBe('http://example.com/foo/bar?key=value#fragment')
describe 'up.util.detect', ->
it 'finds the first element in the given array that matches the given tester', ->
array = ['foo', 'bar', 'baz']
tester = (element) -> element[0] == 'b'
expect(up.util.detect(array, tester)).toEqual('bar')
it "returns undefined if the given array doesn't contain a matching element", ->
array = ['foo', 'bar', 'baz']
tester = (element) -> element[0] == 'z'
expect(up.util.detect(array, tester)).toBeUndefined()
describe 'up.util.config', ->
it 'creates an object with the given attributes', ->
object = up.util.config(a: 1, b: 2)
expect(object.a).toBe(1)
expect(object.b).toBe(2)
it 'does not allow to set a key that was not included in the factory settings', ->
object = up.util.config(a: 1)
object.b = 2
expect(object.b).toBeUndefined()
describe '#reset', ->
it 'resets the object to its original state', ->
object = up.util.config(a: 1)
expect(object.b).toBeUndefined()
object.a = 2
expect(object.a).toBe(2)
object.reset()
expect(object.a).toBe(1)
it 'does not remove the #reset or #update method from the object', ->
object = up.util.config(a: 1)
object.b = 2
object.reset()
expect(object.reset).toBeDefined()
describe 'up.util.remove', ->
it 'removes the given string from the given array', ->
array = ['a', 'b', 'c']
up.util.remove(array, 'b')
expect(array).toEqual ['a', 'c']
it 'removes the given object from the given array', ->
obj1 = { 'key': 1 }
obj2 = { 'key': 2 }
obj3 = { 'key': 3 }
array = [obj1, obj2, obj3]
up.util.remove(array, obj2)
expect(array).toEqual [obj1, obj3]
describe 'up.util.unresolvablePromise', ->
it 'return a pending promise', (done) ->
promise = up.util.unresolvablePromise()
promiseState(promise).then (result) ->
expect(result.state).toEqual('pending')
done()
it 'returns a different object every time (to prevent memory leaks)', ->
one = up.util.unresolvablePromise()
two = up.util.unresolvablePromise()
expect(one).not.toBe(two)
describe 'up.util.flatten', ->
it 'flattens the given array', ->
array = [1, [2, 3], 4]
expect(u.flatten(array)).toEqual([1, 2, 3, 4])
it 'only flattens one level deep for performance reasons', ->
array = [1, [2, [3,4]], 5]
expect(u.flatten(array)).toEqual([1, 2, [3, 4], 5])
describe 'up.util.renameKey', ->
it 'renames a key in the given property', ->
object = { a: 'a value', b: 'b value'}
u.renameKey(object, 'a', 'c')
expect(object.a).toBeUndefined()
expect(object.b).toBe('b value')
expect(object.c).toBe('a value')
describe 'up.util.selectInSubtree', ->
it 'finds the selector in ancestors and descendants of the given element', ->
$grandMother = affix('.grand-mother.match')
$mother = $grandMother.affix('.mother')
$element = $mother.affix('.element')
$child = $element.affix('.child.match')
$grandChild = $child.affix('.grand-child.match')
$matches = up.util.selectInSubtree($element, '.match')
$expected = $child.add($grandChild)
expect($matches).toEqual $expected
it 'finds the element itself if it matches the selector', ->
$element = affix('.element.match')
$matches = up.util.selectInSubtree($element, '.match')
expect($matches).toEqual $element
describe 'when given a jQuery collection with multiple elements', ->
it 'searches in a all subtrees of the given elements', ->
$a_grandMother = affix('.grand-mother.match')
$a_mother = $a_grandMother.affix('.mother')
$a_element = $a_mother.affix('.element')
$a_child = $a_element.affix('.child.match')
$a_grandChild = $a_child.affix('.grand-child.match')
$b_grandMother = affix('.grand-mother.match')
$b_mother = $b_grandMother.affix('.mother')
$b_element = $b_mother.affix('.element')
$b_child = $b_element.affix('.child.match')
$b_grandChild = $b_child.affix('.grand-child.match')
$matches = up.util.selectInSubtree($a_element.add($b_element), '.match')
expect($matches).toEqual $a_child.add($a_grandChild).add($b_child).add($b_grandChild)
describe 'up.util.selectInDynasty', ->
it 'finds the selector in both ancestors and descendants of the given element', ->
$grandMother = affix('.grand-mother.match')
$mother = $grandMother.affix('.mother')
$element = $mother.affix('.element')
$child = $element.affix('.child.match')
$grandChild = $child.affix('.grand-child.match')
$matches = up.util.selectInDynasty($element, '.match')
$expected = $grandMother.add($child).add($grandChild)
expect($matches).toEqual $expected
it 'finds the element itself if it matches the selector', ->
$element = affix('.element.match')
$matches = up.util.selectInDynasty($element, '.match')
expect($matches).toEqual $element
describe 'up.util.isCrossDomain', ->
it 'returns false for an absolute path', ->
expect(up.util.isCrossDomain('/foo')).toBe(false)
it 'returns false for an relative path', ->
expect(up.util.isCrossDomain('foo')).toBe(false)
it 'returns false for a fully qualified URL with the same protocol and hostname as the current location', ->
fullUrl = "#{location.protocol}//#{location.host}/foo"
expect(up.util.isCrossDomain(fullUrl)).toBe(false)
it 'returns true for a fully qualified URL with a different protocol than the current location', ->
fullUrl = "otherprotocol://#{location.host}/foo"
expect(up.util.isCrossDomain(fullUrl)).toBe(true)
it 'returns false for a fully qualified URL with a different hostname than the current location', ->
fullUrl = "#{location.protocol}//other-host.tld/foo"
expect(up.util.isCrossDomain(fullUrl)).toBe(true)
describe 'up.util.isOptions', ->
it 'returns true for an Object instance', ->
expect(up.util.isOptions(new Object())).toBe(true)
it 'returns true for an object literal', ->
expect(up.util.isOptions({ foo: 'bar'})).toBe(true)
it 'returns true for a prototype-less object', ->
expect(up.util.isOptions(Object.create(null))).toBe(true)
it 'returns false for undefined', ->
expect(up.util.isOptions(undefined)).toBe(false)
it 'returns false for null', ->
expect(up.util.isOptions(null)).toBe(false)
it 'returns false for a function (which is technically an object)', ->
fn = -> 'foo'
fn.key = 'value'
expect(up.util.isOptions(fn)).toBe(false)
it 'returns false for an Array', ->
expect(up.util.isOptions(['foo'])).toBe(false)
it 'returns false for a jQuery collection', ->
expect(up.util.isOptions($('body'))).toBe(false)
it 'returns false for a Promise', ->
expect(up.util.isOptions(Promise.resolve())).toBe(false)
it 'returns false for a FormData object', ->
expect(up.util.isOptions(new FormData())).toBe(false)
it 'returns false for a Date', ->
expect(up.util.isOptions(new Date())).toBe(false)
it 'returns false for a RegExp', ->
expect(up.util.isOptions(new RegExp('foo'))).toBe(false)
describe 'up.util.isObject', ->
it 'returns true for an Object instance', ->
expect(up.util.isObject(new Object())).toBe(true)
it 'returns true for an object literal', ->
expect(up.util.isObject({ foo: 'bar'})).toBe(true)
it 'returns false for undefined', ->
expect(up.util.isObject(undefined)).toBe(false)
it 'returns false for null', ->
expect(up.util.isObject(null)).toBe(false)
it 'returns true for a function (which is technically an object)', ->
fn = -> 'foo'
fn.key = 'value'
expect(up.util.isObject(fn)).toBe(true)
it 'returns true for an array', ->
expect(up.util.isObject(['foo'])).toBe(true)
it 'returns true for a jQuery collection', ->
expect(up.util.isObject($('body'))).toBe(true)
it 'returns true for a promise', ->
expect(up.util.isObject(Promise.resolve())).toBe(true)
it 'returns true for a FormData object', ->
expect(up.util.isObject(new FormData())).toBe(true)
describe 'up.util.merge', ->
it 'merges the given objects', ->
obj = { a: '1', b: '2' }
other = { b: '3', c: '4' }
obj = up.util.merge(obj, other)
expect(obj).toEqual { a: '1', b: '3', c: '4' }
it 'overrides (not merges) keys with object value', ->
obj = { a: '1', b: { c: '2', d: '3' } }
other = { e: '4', b: { f: '5', g: '6' }}
obj = up.util.merge(obj, other)
expect(obj).toEqual { a: '1', e: '4', b: { f: '5', g: '6' } }
it 'ignores undefined arguments', ->
obj = { a: 1, b: 2 }
result = up.util.merge(obj, undefined)
expect(result).toEqual { a: 1, b: 2 }
reverseResult = up.util.merge(undefined, obj)
expect(reverseResult).toEqual { a: 1, b: 2 }
it 'ignores null arguments', ->
obj = { a: 1, b: 2 }
result = up.util.merge(obj, null)
expect(result).toEqual { a: 1, b: 2 }
reverseResult = up.util.merge(null, obj)
expect(reverseResult).toEqual { a: 1, b: 2 }
# describe 'up.util.deepMerge', ->
#
# it 'recursively merges the given objects', ->
# obj = { a: '1', b: { c: '2', d: '3' } }
# other = { e: '4', b: { f: '5', g: '6' }}
# obj = up.util.deepMerge(obj, other)
# expect(obj).toEqual { a: '1', e: '4', b: { c: '2', d: '3', f: '5', g: '6' } }
#
# it 'ignores undefined arguments', ->
# obj = { a: 1, b: 2 }
#
# result = up.util.deepMerge(obj, undefined)
# expect(result).toEqual { a: 1, b: 2 }
#
# reverseResult = up.util.deepMerge(undefined, obj)
# expect(reverseResult).toEqual { a: 1, b: 2 }
#
# it 'ignores null arguments', ->
# obj = { a: 1, b: 2 }
#
# result = up.util.deepMerge(obj, null)
# expect(result).toEqual { a: 1, b: 2 }
#
# reverseResult = up.util.deepMerge(null, obj)
# expect(reverseResult).toEqual { a: 1, b: 2 }
#
# it 'overwrites (and does not concatenate) array values', ->
# obj = { a: ['1', '2'] }
# other = { a: ['3', '4'] }
# obj = up.util.deepMerge(obj, other)
# expect(obj).toEqual { a: ['3', '4'] }
describe 'up.util.memoize', ->
it 'returns a function that calls the memoized function', ->
fun = (a, b) -> a + b
memoized = u.memoize(fun)
expect(memoized(2, 3)).toEqual(5)
it 'returns the cached return value of the first call when called again', ->
spy = jasmine.createSpy().and.returnValue(5)
memoized = u.memoize(spy)
expect(memoized(2, 3)).toEqual(5)
expect(memoized(2, 3)).toEqual(5)
expect(spy.calls.count()).toEqual(1)
['assign', 'assignPolyfill'].forEach (assignVariant) ->
describe "up.util.#{assignVariant}", ->
assign = up.util[assignVariant]
it 'copies the second object into the first object', ->
target = { a: 1 }
source = { b: 2, c: 3 }
assign(target, source)
expect(target).toEqual { a: 1, b: 2, c: 3 }
# Source is unchanged
expect(source).toEqual { b: 2, c: 3 }
it 'copies null property values', ->
target = { a: 1, b: 2 }
source = { b: null }
assign(target, source)
expect(target).toEqual { a: 1, b: null }
it 'copies undefined property values', ->
target = { a: 1, b: 2 }
source = { b: undefined }
assign(target, source)
expect(target).toEqual { a: 1, b: undefined }
it 'returns the first object', ->
target = { a: 1 }
source = { b: 2 }
result = assign(target, source)
expect(result).toBe(target)
it 'takes multiple sources to copy from', ->
target = { a: 1 }
source1 = { b: 2, c: 3 }
source2 = { d: 4, e: 5 }
assign(target, source1, source2)
expect(target).toEqual { a: 1, b: 2, c: 3, d: 4, e: 5 }
| 105396 | describe 'up.util', ->
u = up.util
describe 'JavaScript functions', ->
# describe 'up.util.flatMap', ->
#
# it 'collects the Array results of the given map function, then concatenates the result arrays into one flat array', ->
# fun = (x) -> [x, x]
# result = up.util.flatMap([1, 2, 3], fun)
# expect(result).toEqual([1, 1, 2, 2, 3, 3])
describe 'up.util.uniq', ->
it 'returns the given array with duplicates elements removed', ->
input = [1, 2, 1, 1, 3]
result = up.util.uniq(input)
expect(result).toEqual [1, 2, 3]
it 'works on DOM elements', ->
one = document.createElement("div")
two = document.createElement("div")
input = [one, one, two, two]
result = up.util.uniq(input)
expect(result).toEqual [one, two]
it 'preserves insertion order', ->
input = [1, 2, 1]
result = up.util.uniq(input)
expect(result).toEqual [1, 2]
describe 'up.util.uniqBy', ->
it 'returns the given array with duplicate elements removed, calling the given function to determine value for uniqueness', ->
input = ["foo", "bar", "apple", 'orange', 'banana']
result = up.util.uniqBy(input, (element) -> element.length)
expect(result).toEqual ['foo', 'apple', 'orange']
it 'accepts a property name instead of a function, which collects that property from each item to compute uniquness', ->
input = ["foo", "bar", "apple", 'orange', 'banana']
result = up.util.uniqBy(input, 'length')
expect(result).toEqual ['foo', 'apple', 'orange']
# describe 'up.util.parsePath', ->
#
# it 'parses a plain name', ->
# path = up.util.parsePath("foo")
# expect(path).toEqual ['foo']
#
# it 'considers underscores to be part of a name', ->
# path = up.util.parsePath("foo_bar")
# expect(path).toEqual ['foo_bar']
#
# it 'considers dashes to be part of a name', ->
# path = up.util.parsePath("foo-bar")
# expect(path).toEqual ['foo-bar']
#
# it 'parses dot-separated names into multiple path segments', ->
# path = up.util.parsePath('foo.bar.baz')
# expect(path).toEqual ['foo', 'bar', 'baz']
#
# it 'parses nested params notation with square brackets', ->
# path = up.util.parsePath('user[account][email]')
# expect(path).toEqual ['user', 'account', 'email']
#
# it 'parses double quotes in square brackets', ->
# path = up.util.parsePath('user["account"]["email"]')
# expect(path).toEqual ['user', 'account', 'email']
#
# it 'parses single quotes in square brackets', ->
# path = up.util.parsePath("user['account']['email']")
# expect(path).toEqual ['user', 'account', 'email']
#
# it 'allows square brackets inside quotes', ->
# path = up.util.parsePath("element['a[up-href]']")
# expect(path).toEqual ['element', 'a[up-href]']
#
# it 'allows single quotes inside double quotes', ->
# path = up.util.parsePath("element[\"a[up-href='foo']\"]")
# expect(path).toEqual ['element', "a[up-href='foo']"]
#
# it 'allows double quotes inside single quotes', ->
# path = up.util.parsePath("element['a[up-href=\"foo\"]']")
# expect(path).toEqual ['element', 'a[up-href="foo"]']
#
# it 'allows dots in square brackets when it is quoted', ->
# path = up.util.parsePath('elements[".foo"]')
# expect(path).toEqual ['elements', '.foo']
#
# it 'allows different notation for each segment', ->
# path = up.util.parsePath('foo.bar[baz]["bam"][\'qux\']')
# expect(path).toEqual ['foo', 'bar', 'baz', 'bam', 'qux']
describe 'up.util.map', ->
it 'creates a new array of values by calling the given function on each item of the given array', ->
array = ["apple", "orange", "cucumber"]
mapped = up.util.map(array, (element) -> element.length)
expect(mapped).toEqual [5, 6, 8]
it 'accepts a property name instead of a function, which collects that property from each item', ->
array = ["apple", "orange", "cucumber"]
mapped = up.util.map(array, 'length')
expect(mapped).toEqual [5, 6, 8]
it 'passes the iteration index as second argument to the given function', ->
array = ["apple", "orange", "cucumber"]
mapped = up.util.map(array, (element, i) -> i)
expect(mapped).toEqual [0, 1, 2]
describe 'up.util.each', ->
it 'calls the given function once for each itm of the given array', ->
args = []
array = ["apple", "orange", "cucumber"]
up.util.each array, (item) -> args.push(item)
expect(args).toEqual ["apple", "orange", "cucumber"]
it 'passes the iteration index as second argument to the given function', ->
args = []
array = ["apple", "orange", "cucumber"]
up.util.each array, (item, index) -> args.push(index)
expect(args).toEqual [0, 1, 2]
describe 'up.util.select', ->
it 'returns an array of those elements in the given array for which the given function returns true', ->
array = ["foo", "orange", "cucumber"]
results = up.util.select array, (item) -> item.length > 3
expect(results).toEqual ['orange', 'cucumber']
it 'passes the iteration index as second argument to the given function', ->
array = ["apple", "orange", "cucumber", "banana"]
results = up.util.select array, (item, index) -> index % 2 == 0
expect(results).toEqual ['apple', 'cucumber']
it 'accepts a property name instead of a function, which checks that property from each item', ->
array = [ { name: 'a', prop: false }, { name: 'b', prop: true } ]
results = up.util.select array, 'prop'
expect(results).toEqual [{ name: 'b', prop: true }]
describe 'up.util.reject', ->
it 'returns an array of those elements in the given array for which the given function returns false', ->
array = ["foo", "orange", "cucumber"]
results = up.util.reject array, (item) -> item.length < 4
expect(results).toEqual ['orange', 'cucumber']
it 'passes the iteration index as second argument to the given function', ->
array = ["apple", "orange", "cucumber", "banana"]
results = up.util.reject array, (item, index) -> index % 2 == 0
expect(results).toEqual ['orange', 'banana']
it 'accepts a property name instead of a function, which checks that property from each item', ->
array = [ { name: 'a', prop: false }, { name: 'b', prop: true } ]
results = up.util.reject array, 'prop'
expect(results).toEqual [{ name: 'a', prop: false }]
describe 'up.util.previewable', ->
it 'wraps a function into a proxy function with an additional .promise attribute', ->
fun = -> 'return value'
proxy = up.util.previewable(fun)
expect(u.isFunction(proxy)).toBe(true)
expect(u.isPromise(proxy.promise)).toBe(true)
expect(proxy()).toEqual('return value')
it "resolves the proxy's .promise when the inner function returns", (done) ->
fun = -> 'return value'
proxy = up.util.previewable(fun)
callback = jasmine.createSpy('promise callback')
proxy.promise.then(callback)
u.nextFrame ->
expect(callback).not.toHaveBeenCalled()
proxy()
u.nextFrame ->
expect(callback).toHaveBeenCalledWith('return value')
done()
it "delays resolution of the proxy's .promise if the inner function returns a promise", (done) ->
funDeferred = u.newDeferred()
fun = -> funDeferred
proxy = up.util.previewable(fun)
callback = jasmine.createSpy('promise callback')
proxy.promise.then(callback)
proxy()
u.nextFrame ->
expect(callback).not.toHaveBeenCalled()
funDeferred.resolve('return value')
u.nextFrame ->
expect(callback).toHaveBeenCalledWith('return value')
done()
describe 'up.util.kebabCase', ->
it 'converts a string of multiple words from camel-case to kebap-case', ->
result = up.util.kebabCase('fooBarBaz')
expect(result).toEqual('foo-bar-baz')
it 'does not change a single word', ->
result = up.util.kebabCase('foo')
expect(result).toEqual('foo')
it 'downcases the first word when it starts with a capital letter', ->
result = up.util.kebabCase('FooBar')
expect(result).toEqual('foo-bar')
it 'does not change a string that is already in kebab-case', ->
result = up.util.kebabCase('foo-bar-baz')
expect(result).toEqual('foo-bar-baz')
describe 'up.util.camelCase', ->
it 'converts a string of multiple words from kebap-case to camel-case', ->
result = up.util.camelCase('foo-bar-baz')
expect(result).toEqual('fooBarBaz')
it 'does not change a single word', ->
result = up.util.camelCase('foo')
expect(result).toEqual('foo')
it 'downcases the first word when it starts with a capital letter', ->
result = up.util.camelCase('Foo-Bar')
expect(result).toEqual('fooBar')
it 'does not change a string that is already in camel-case', ->
result = up.util.camelCase('fooBarBaz')
expect(result).toEqual('fooBarBaz')
describe 'up.util.kebabCaseKeys', ->
it "converts the given object's keys from camel-case to kebab-case", ->
input =
fooBar: 'one'
barBaz: 'two'
result = up.util.kebabCaseKeys(input)
expect(result).toEqual
'foo-bar': 'one'
'bar-baz': 'two'
it "does not change an object whose keys are already kebab-case", ->
input =
'foo-bar': 'one'
'bar-baz': 'two'
result = up.util.kebabCaseKeys(input)
expect(result).toEqual
'foo-bar': 'one'
'bar-baz': 'two'
describe 'up.util.camelCaseKeys', ->
it "converts the given object's keys from kebab-case to camel-case", ->
input =
'foo-bar': 'one'
'bar-baz': 'two'
result = up.util.camelCaseKeys(input)
expect(result).toEqual
fooBar: 'one'
barBaz: 'two'
it "does not change an object whose keys are already camel-case", ->
input =
fooBar: 'one'
barBaz: 'two'
result = up.util.camelCaseKeys(input)
expect(result).toEqual
fooBar: 'one'
barBaz: 'two'
# describe 'up.util.lowerCaseKeys', ->
#
# it "returns a copy of the given object will all keys in lower case", ->
# input =
# 'A-B': 'C-D'
# 'E-F': 'G-H'
# result = up.util.lowerCaseKeys(input)
# expect(result).toEqual
# 'a-b': 'C-D'
# 'e-f': 'G-H'
describe 'up.util.DivertibleChain', ->
it "instantiates a task queue whose (2..n)th tasks can be changed by calling '.asap'", (done) ->
chain = new up.util.DivertibleChain()
timer1Spy = jasmine.createSpy('timer1 has been called')
timer1 = ->
timer1Spy()
u.promiseTimer(50)
timer2Spy = jasmine.createSpy('timer2 has been called')
timer2 = ->
timer2Spy()
u.promiseTimer(50)
timer3Spy = jasmine.createSpy('timer3 has been called')
timer3 = ->
timer3Spy()
u.promiseTimer(50)
timer4Spy = jasmine.createSpy('timer4 has been called')
timer4 = ->
timer4Spy()
u.promiseTimer(50)
chain.asap(timer1)
u.nextFrame ->
expect(timer1Spy).toHaveBeenCalled()
chain.asap(timer2)
u.nextFrame ->
# timer2 is still waiting for timer1 to finish
expect(timer2Spy).not.toHaveBeenCalled()
# Override the (2..n)th tasks. This unschedules timer2.
chain.asap(timer3, timer4)
u.setTimer 80, ->
expect(timer2Spy).not.toHaveBeenCalled()
expect(timer3Spy).toHaveBeenCalled()
u.setTimer 70, ->
expect(timer4Spy).toHaveBeenCalled()
done()
describe 'up.util.sequence', ->
it 'combines the given functions into a single function', ->
values = []
one = -> values.push('one')
two = -> values.push('two')
three = -> values.push('three')
sequence = up.util.sequence(one, two, three)
expect(values).toEqual([])
sequence()
expect(values).toEqual(['one', 'two', 'three'])
describe 'up.util.createElementFromHtml', ->
it 'parses a string that contains a serialized HTML document', ->
string = """
<html lang="foo">
<head>
<title>document title</title>
</head>
<body data-env='production'>
<div>line 1</div>
<div>line 2</div>
</body>
</html>
"""
element = up.util.createElementFromHtml(string)
expect(element.querySelector('head title').textContent).toEqual('document title')
expect(element.querySelector('body').getAttribute('data-env')).toEqual('production')
expect(element.querySelectorAll('body div').length).toBe(2)
expect(element.querySelectorAll('body div')[0].textContent).toEqual('line 1')
expect(element.querySelectorAll('body div')[1].textContent).toEqual('line 2')
it 'parses a string that contains carriage returns (bugfix)', ->
string = """
<html>\r
<body>\r
<div>line</div>\r
</body>\r
</html>\r
"""
$element = up.util.createElementFromHtml(string)
expect($element.querySelector('body')).toBeGiven()
expect($element.querySelector('body div').textContent).toEqual('line')
it 'does not run forever if a page has a <head> without a <title> (bugfix)', ->
html = """
<!doctype html>
<html>
<head>
<meta charset="utf-8" />
<meta name="format-detection" content="telephone=no">
<link href='/images/favicon.png' rel='shortcut icon' type='image/png'>
<meta name='viewport' content='width=device-width, initial-scale=1, maximum-scale=1, minimum-scale=1'>
<base href="/examples/update-fragment/" />
<link href='http://fonts.googleapis.com/css?family=Orbitron:400|Ubuntu+Mono:400,700|Source+Sans+Pro:300,400,700,400italic,700italic' rel='stylesheet' type='text/css'>
<link href="//netdna.bootstrapcdn.com/font-awesome/4.1.0/css/font-awesome.min.css" rel="stylesheet">
<link href="/stylesheets/example/all.css" rel="stylesheet" />
<script src="/javascripts/example.js"></script>
</head>
<body>
<div class="page">
<div class="story">
<h1>Full story</h1>
<p>Lorem ipsum dolor sit amet.</p>
<a href="preview.html" up-target=".story">
Read summary
</a>
</div>
</div>
</body>
</html>
"""
element = up.util.createElementFromHtml(html)
expect(element.querySelector("title")).toBeMissing()
expect(element.querySelector("h1").textContent).toEqual('Full story')
it 'can parse HTML without a <head>', ->
html = """
<html>
<body>
<h1>Full story</h1>
</body>
</html>
"""
element = up.util.createElementFromHtml(html)
expect(element.querySelector("title")).toBeMissing()
expect(element.querySelector("h1").textContent).toEqual('Full story')
it 'can parse a HTML fragment without a <body>', ->
html = """
<h1>Full story</h1>
"""
element = up.util.createElementFromHtml(html)
expect(element.querySelector("title")).toBeMissing()
expect(element.querySelector("h1").textContent).toEqual('Full story')
describe 'up.util.isFixed', ->
it 'returns true if the given element or one of its ancestors has a "fixed" CSS position', ->
$grandGrandParent = affix('.grand-parent')
$grandParent = $grandGrandParent.affix('.grand-parent')
$parent = $grandParent.affix('.parent')
$child = $parent.affix('.child')
$grandParent.css(position: 'fixed')
expect(up.util.isFixed($child)).toBe(true)
expect(up.util.isFixed($parent)).toBe(true)
expect(up.util.isFixed($grandParent)).toBe(true)
expect(up.util.isFixed($grandGrandParent)).toBe(false)
it 'returns false if the given element and its ancestors all have a non-"fixed" CSS position', ->
$element = affix('.element')
expect(up.util.isFixed($element)).toBe(false)
describe 'up.util.setTimer', ->
it 'calls the given function after waiting the given milliseconds', (done) ->
callback = jasmine.createSpy()
expectNotCalled = -> expect(callback).not.toHaveBeenCalled()
expectCalled = -> expect(callback).toHaveBeenCalled()
up.util.setTimer(100, callback)
expectNotCalled()
setTimeout(expectNotCalled, 50)
setTimeout(expectCalled, 50 + 75)
setTimeout(done, 50 + 75)
describe 'if the delay is zero', ->
it 'calls the given function in the next execution frame', ->
callback = jasmine.createSpy()
up.util.setTimer(0, callback)
expect(callback).not.toHaveBeenCalled()
setTimeout((-> expect(callback).toHaveBeenCalled()), 0)
# describe 'up.util.argNames', ->
#
# it 'returns an array of argument names for the given function', ->
# fun = ($element, data) ->
# expect(up.util.argNames(fun)).toEqual(['$element', 'data'])
describe 'up.util.trim', ->
it 'removes leading and trailing whitespace from the given string', ->
string = "\t\n\r abc \r\n\t"
expect(up.util.trim(string)).toEqual('abc')
describe 'up.util.only', ->
it 'returns a copy of the given object with only the given whitelisted properties', ->
original =
foo: 'foo-value'
bar: 'bar-value'
baz: 'baz-value'
bam: 'bam-value'
whitelisted = up.util.only(original, 'bar', 'bam')
expect(whitelisted).toEqual
bar: 'bar-value'
bam: 'bam-value'
# Show that original did not change
expect(original).toEqual
foo: 'foo-value'
bar: 'bar-value'
baz: 'baz-value'
bam: 'bam-value'
it 'does not add empty keys to the returned object if the given object does not have that key', ->
original =
foo: 'foo-value'
whitelisted = up.util.only(original, 'foo', 'bar')
expect(whitelisted).toHaveOwnProperty('foo')
expect(whitelisted).not.toHaveOwnProperty('bar')
describe 'up.util.readInlineStyle', ->
describe 'with a string as second argument', ->
it 'returns a CSS value string from an inline [style] attribute', ->
$div = affix('div').attr('style', 'background-color: #ff0000')
style = up.util.readInlineStyle($div, 'backgroundColor')
# Browsers convert colors to rgb() values, even IE11
expect(style).toEqual('rgb(255, 0, 0)')
it 'returns a blank value if the element does not have the given property in the [style] attribute', ->
$div = affix('div').attr('style', 'background-color: red')
style = up.util.readInlineStyle($div, 'color')
expect(style).toBeBlank()
it 'returns a blank value the given property is a computed property, but not in the [style] attribute', ->
$div = affix('div[class="red-background"]')
inlineStyle = up.util.readInlineStyle($div, 'backgroundColor')
computedStyle = up.util.readComputedStyle($div, 'backgroundColor')
expect(computedStyle).toEqual('rgb(255, 0, 0)')
expect(inlineStyle).toBeBlank()
describe 'with an array as second argument', ->
it 'returns an object with the given inline [style] properties', ->
$div = affix('div').attr('style', 'background-color: #ff0000; color: #0000ff')
style = up.util.readInlineStyle($div, ['backgroundColor', 'color'])
expect(style).toEqual
backgroundColor: 'rgb(255, 0, 0)'
color: 'rgb(0, 0, 255)'
it 'returns blank keys if the element does not have the given property in the [style] attribute', ->
$div = affix('div').attr('style', 'background-color: #ff0000')
style = up.util.readInlineStyle($div, ['backgroundColor', 'color'])
expect(style).toHaveOwnProperty('color')
expect(style.color).toBeBlank()
it 'returns a blank value the given property is a computed property, but not in the [style] attribute', ->
$div = affix('div[class="red-background"]')
inlineStyleHash = up.util.readInlineStyle($div, ['backgroundColor'])
computedBackground = up.util.readComputedStyle($div, 'backgroundColor')
expect(computedBackground).toEqual('rgb(255, 0, 0)')
expect(inlineStyleHash).toHaveOwnProperty('backgroundColor')
expect(inlineStyleHash.backgroundColor).toBeBlank()
describe 'up.util.writeInlineStyle', ->
it "sets the given style properties as the given element's [style] attribute", ->
$div = affix('div')
up.util.writeInlineStyle($div, { color: 'red', backgroundColor: 'blue' })
style = $div.attr('style')
expect(style).toContain('color: red')
expect(style).toContain('background-color: blue')
it "merges the given style properties into the given element's existing [style] value", ->
$div = affix('div[style="color: red"]')
up.util.writeInlineStyle($div, { backgroundColor: 'blue' })
style = $div.attr('style')
expect(style).toContain('color: red')
expect(style).toContain('background-color: blue')
it "converts the values of known length properties to px values automatically", ->
$div = affix('div')
up.util.writeInlineStyle($div, { paddingTop: 100 })
style = $div.attr('style')
expect(style).toContain('padding-top: 100px')
describe 'up.util.writeTemporaryStyle', ->
it "sets the given inline styles and returns a function that will restore the previous inline styles", ->
$div = affix('div[style="color: red"]')
restore = up.util.writeTemporaryStyle($div, { color: 'blue' })
expect($div.attr('style')).toContain('color: blue')
expect($div.attr('style')).not.toContain('color: red')
restore()
expect($div.attr('style')).not.toContain('color: blue')
expect($div.attr('style')).toContain('color: red')
it "does not restore inherited styles", ->
$div = affix('div[class="red-background"]')
restore = up.util.writeTemporaryStyle($div, { backgroundColor: 'blue' })
expect($div.attr('style')).toContain('background-color: blue')
restore()
expect($div.attr('style')).not.toContain('background-color')
describe 'up.util.except', ->
it 'returns a copy of the given object but omits the given blacklisted properties', ->
original =
foo: 'foo-value'
bar: 'bar-value'
baz: 'baz-value'
bam: 'bam-value'
whitelisted = up.util.except(original, 'foo', 'baz')
expect(whitelisted).toEqual
bar: 'bar-value'
bam: 'bam-value'
# Show that original did not change
expect(original).toEqual
foo: 'foo-value'
bar: 'bar-value'
baz: 'baz-value'
bam: 'bam-value'
describe 'up.util.selectorForElement', ->
it "prefers using the element's 'up-id' attribute to using the element's ID", ->
$element = affix('div[up-id=up-id-value]#id-value')
expect(up.util.selectorForElement($element)).toBe('[up-id="up-id-value"]')
it "prefers using the element's ID to using the element's name", ->
$element = affix('div#id-value[name=name-value]')
expect(up.util.selectorForElement($element)).toBe("#id-value")
it "selects the ID with an attribute selector if the ID contains a slash", ->
$element = affix('div').attr(id: 'foo/bar')
expect(up.util.selectorForElement($element)).toBe('[id="foo/bar"]')
it "selects the ID with an attribute selector if the ID contains a space", ->
$element = affix('div').attr(id: 'foo bar')
expect(up.util.selectorForElement($element)).toBe('[id="foo bar"]')
it "selects the ID with an attribute selector if the ID contains a dot", ->
$element = affix('div').attr(id: 'foo.bar')
expect(up.util.selectorForElement($element)).toBe('[id="foo.bar"]')
it "selects the ID with an attribute selector if the ID contains a quote", ->
$element = affix('div').attr(id: 'foo"bar')
expect(up.util.selectorForElement($element)).toBe('[id="foo\\"bar"]')
it "prefers using the element's tagName + [name] to using the element's classes", ->
$element = affix('input[name=name-value].class1.class2')
expect(up.util.selectorForElement($element)).toBe('input[name="name-value"]')
it "prefers using the element's classes to using the element's ARIA label", ->
$element = affix('div.class1.class2[aria-label="ARIA label value"]')
expect(up.util.selectorForElement($element)).toBe(".class1.class2")
it 'does not use Unpoly classes to compose a class selector', ->
$element = affix('div.class1.up-current.class2')
expect(up.util.selectorForElement($element)).toBe(".class1.class2")
it "prefers using the element's ARIA label to using the element's tag name", ->
$element = affix('div[aria-label="ARIA label value"]')
expect(up.util.selectorForElement($element)).toBe('[aria-label="ARIA label value"]')
it "uses the element's tag name if no better description is available", ->
$element = affix('div')
expect(up.util.selectorForElement($element)).toBe("div")
it 'escapes quotes in attribute selector values', ->
$element = affix('div')
$element.attr('aria-label', 'foo"bar')
expect(up.util.selectorForElement($element)).toBe('[aria-label="foo\\"bar"]')
describe 'up.util.addTemporaryClass', ->
it 'adds the given class to the given element', ->
$element = affix('.foo.bar')
element = $element.get(0)
expect(element.className).toEqual('foo bar')
up.util.addTemporaryClass(element, 'baz')
expect(element.className).toEqual('foo bar baz')
it 'returns a function that restores the original class', ->
$element = affix('.foo.bar')
element = $element.get(0)
restoreClass = up.util.addTemporaryClass(element, 'baz')
expect(element.className).toEqual('foo bar baz')
restoreClass()
expect(element.className).toEqual('foo bar')
describe 'up.util.castedAttr', ->
it 'returns true if the attribute value is the string "true"', ->
$element = affix('div').attr('foo', 'true')
expect(up.util.castedAttr($element, 'foo')).toBe(true)
it 'returns true if the attribute value is the name of the attribute', ->
$element = affix('div').attr('foo', 'foo')
expect(up.util.castedAttr($element, 'foo')).toBe(true)
it 'returns false if the attribute value is the string "false"', ->
$element = affix('div').attr('foo', 'false')
expect(up.util.castedAttr($element, 'foo')).toBe(false)
it 'returns undefined if the element has no such attribute', ->
$element = affix('div')
expect(up.util.castedAttr($element, 'foo')).toBe(undefined)
it 'returns the attribute value unchanged if the value is some string', ->
$element = affix('div').attr('foo', 'some text')
expect(up.util.castedAttr($element, 'foo')).toBe('some text')
describe 'up.util.any', ->
it 'returns true if an element in the array returns true for the given function', ->
result = up.util.any [null, undefined, 'foo', ''], up.util.isPresent
expect(result).toBe(true)
it 'returns false if no element in the array returns true for the given function', ->
result = up.util.any [null, undefined, ''], up.util.isPresent
expect(result).toBe(false)
it 'short-circuits once an element returns true', ->
count = 0
up.util.any [null, undefined, 'foo', ''], (element) ->
count += 1
up.util.isPresent(element)
expect(count).toBe(3)
describe 'up.util.all', ->
it 'returns true if all element in the array returns true for the given function', ->
result = up.util.all ['foo', 'bar', 'baz'], up.util.isPresent
expect(result).toBe(true)
it 'returns false if an element in the array returns false for the given function', ->
result = up.util.all ['foo', 'bar', null, 'baz'], up.util.isPresent
expect(result).toBe(false)
it 'short-circuits once an element returns false', ->
count = 0
up.util.all ['foo', 'bar', '', 'baz'], (element) ->
count += 1
up.util.isPresent(element)
expect(count).toBe(3)
it 'passes the iteration index as second argument to the given function', ->
array = ["apple", "orange", "cucumber"]
args = []
up.util.all array, (item, index) ->
args.push(index)
true
expect(args).toEqual [0, 1, 2]
it 'accepts a property name instead of a function, which collects that property from each item', ->
allTrue = [ { prop: true }, { prop: true } ]
someFalse = [ { prop: true }, { prop: false } ]
expect(up.util.all(allTrue, 'prop')).toBe(true)
expect(up.util.all(someFalse, 'prop')).toBe(false)
# describe 'up.util.none', ->
#
# it 'returns true if no element in the array returns true for the given function', ->
# result = up.util.none ['foo', 'bar', 'baz'], up.util.isBlank
# expect(result).toBe(true)
#
# it 'returns false if an element in the array returns false for the given function', ->
# result = up.util.none ['foo', 'bar', null, 'baz'], up.util.isBlank
# expect(result).toBe(false)
#
# it 'short-circuits once an element returns true', ->
# count = 0
# up.util.none ['foo', 'bar', '', 'baz'], (element) ->
# count += 1
# up.util.isBlank(element)
# expect(count).toBe(3)
#
# it 'passes the iteration index as second argument to the given function', ->
# array = ["apple", "orange", "cucumber"]
# args = []
# up.util.none array, (item, index) ->
# args.push(index)
# false
# expect(args).toEqual [0, 1, 2]
#
# it 'accepts a property name instead of a function, which collects that property from each item', ->
# allFalse = [ { prop: false }, { prop: false } ]
# someTrue = [ { prop: true }, { prop: false } ]
# expect(up.util.none(allFalse, 'prop')).toBe(true)
# expect(up.util.none(someTrue, 'prop')).toBe(false)
describe 'up.util.any', ->
it 'returns true if at least one element in the array returns true for the given function', ->
result = up.util.any ['', 'bar', null], up.util.isPresent
expect(result).toBe(true)
it 'returns false if no element in the array returns true for the given function', ->
result = up.util.any ['', null, undefined], up.util.isPresent
expect(result).toBe(false)
it 'passes the iteration index as second argument to the given function', ->
array = ["apple", "orange", "cucumber"]
args = []
up.util.any array, (item, index) ->
args.push(index)
false
expect(args).toEqual [0, 1, 2]
it 'accepts a property name instead of a function, which collects that property from each item', ->
someTrue = [ { prop: true }, { prop: false } ]
allFalse = [ { prop: false }, { prop: false } ]
expect(up.util.any(someTrue, 'prop')).toBe(true)
expect(up.util.any(allFalse, 'prop')).toBe(false)
describe 'up.util.isBlank', ->
it 'returns false for false', ->
expect(up.util.isBlank(false)).toBe(false)
it 'returns false for true', ->
expect(up.util.isBlank(true)).toBe(false)
it 'returns true for null', ->
expect(up.util.isBlank(null)).toBe(true)
it 'returns true for undefined', ->
expect(up.util.isBlank(undefined)).toBe(true)
it 'returns true for an empty String', ->
expect(up.util.isBlank('')).toBe(true)
it 'returns false for a String with at least one character', ->
expect(up.util.isBlank('string')).toBe(false)
it 'returns true for an empty array', ->
expect(up.util.isBlank([])).toBe(true)
it 'returns false for an array with at least one element', ->
expect(up.util.isBlank(['element'])).toBe(false)
it 'returns true for an empty jQuery collection', ->
expect(up.util.isBlank($([]))).toBe(true)
it 'returns false for a jQuery collection with at least one element', ->
expect(up.util.isBlank($('body'))).toBe(false)
it 'returns true for an empty object', ->
expect(up.util.isBlank({})).toBe(true)
it 'returns false for a function', ->
expect(up.util.isBlank((->))).toBe(false)
it 'returns true for an object with at least one key', ->
expect(up.util.isBlank({key: 'value'})).toBe(false)
describe 'up.util.normalizeUrl', ->
it 'normalizes a relative path', ->
expect(up.util.normalizeUrl('foo')).toBe("http://#{location.hostname}:#{location.port}/foo")
it 'normalizes an absolute path', ->
expect(up.util.normalizeUrl('/foo')).toBe("http://#{location.hostname}:#{location.port}/foo")
it 'normalizes a full URL', ->
expect(up.util.normalizeUrl('http://example.com/foo/bar')).toBe('http://example.com/foo/bar')
it 'preserves a query string', ->
expect(up.util.normalizeUrl('http://example.com/foo/bar?key=value')).toBe('http://example.com/foo/bar?key=value')
it 'strips a query string with { search: false } option', ->
expect(up.util.normalizeUrl('http://example.com/foo/bar?key=value', search: false)).toBe('http://example.com/foo/bar')
it 'does not strip a trailing slash by default', ->
expect(up.util.normalizeUrl('/foo/')).toEqual("http://#{location.hostname}:#{location.port}/foo/")
it 'normalizes redundant segments', ->
expect(up.util.normalizeUrl('/foo/../foo')).toBe("http://#{location.hostname}:#{location.port}/foo")
it 'strips a #hash by default', ->
expect(up.util.normalizeUrl('http://example.com/foo/bar#fragment')).toBe('http://example.com/foo/bar')
it 'preserves a #hash with { hash: true } option', ->
expect(up.util.normalizeUrl('http://example.com/foo/bar#fragment', hash: true)).toBe('http://example.com/foo/bar#fragment')
it 'puts a #hash behind the query string', ->
expect(up.util.normalizeUrl('http://example.com/foo/bar?key=value#fragment', hash: true)).toBe('http://example.com/foo/bar?key=value#fragment')
describe 'up.util.detect', ->
it 'finds the first element in the given array that matches the given tester', ->
array = ['foo', 'bar', 'baz']
tester = (element) -> element[0] == 'b'
expect(up.util.detect(array, tester)).toEqual('bar')
it "returns undefined if the given array doesn't contain a matching element", ->
array = ['foo', 'bar', 'baz']
tester = (element) -> element[0] == 'z'
expect(up.util.detect(array, tester)).toBeUndefined()
describe 'up.util.config', ->
it 'creates an object with the given attributes', ->
object = up.util.config(a: 1, b: 2)
expect(object.a).toBe(1)
expect(object.b).toBe(2)
it 'does not allow to set a key that was not included in the factory settings', ->
object = up.util.config(a: 1)
object.b = 2
expect(object.b).toBeUndefined()
describe '#reset', ->
it 'resets the object to its original state', ->
object = up.util.config(a: 1)
expect(object.b).toBeUndefined()
object.a = 2
expect(object.a).toBe(2)
object.reset()
expect(object.a).toBe(1)
it 'does not remove the #reset or #update method from the object', ->
object = up.util.config(a: 1)
object.b = 2
object.reset()
expect(object.reset).toBeDefined()
describe 'up.util.remove', ->
it 'removes the given string from the given array', ->
array = ['a', 'b', 'c']
up.util.remove(array, 'b')
expect(array).toEqual ['a', 'c']
it 'removes the given object from the given array', ->
obj1 = { 'key': 1 }
obj2 = { 'key': 2 }
obj3 = { 'key': 3 }
array = [obj1, obj2, obj3]
up.util.remove(array, obj2)
expect(array).toEqual [obj1, obj3]
describe 'up.util.unresolvablePromise', ->
it 'return a pending promise', (done) ->
promise = up.util.unresolvablePromise()
promiseState(promise).then (result) ->
expect(result.state).toEqual('pending')
done()
it 'returns a different object every time (to prevent memory leaks)', ->
one = up.util.unresolvablePromise()
two = up.util.unresolvablePromise()
expect(one).not.toBe(two)
describe 'up.util.flatten', ->
it 'flattens the given array', ->
array = [1, [2, 3], 4]
expect(u.flatten(array)).toEqual([1, 2, 3, 4])
it 'only flattens one level deep for performance reasons', ->
array = [1, [2, [3,4]], 5]
expect(u.flatten(array)).toEqual([1, 2, [3, 4], 5])
describe 'up.util.renameKey', ->
it 'renames a key in the given property', ->
object = { a: 'a value', b: 'b value'}
u.renameKey(object, 'a', 'c')
expect(object.a).toBeUndefined()
expect(object.b).toBe('b value')
expect(object.c).toBe('a value')
describe 'up.util.selectInSubtree', ->
it 'finds the selector in ancestors and descendants of the given element', ->
$grandMother = affix('.grand-mother.match')
$mother = $grandMother.affix('.mother')
$element = $mother.affix('.element')
$child = $element.affix('.child.match')
$grandChild = $child.affix('.grand-child.match')
$matches = up.util.selectInSubtree($element, '.match')
$expected = $child.add($grandChild)
expect($matches).toEqual $expected
it 'finds the element itself if it matches the selector', ->
$element = affix('.element.match')
$matches = up.util.selectInSubtree($element, '.match')
expect($matches).toEqual $element
describe 'when given a jQuery collection with multiple elements', ->
it 'searches in a all subtrees of the given elements', ->
$a_grandMother = affix('.grand-mother.match')
$a_mother = $a_grandMother.affix('.mother')
$a_element = $a_mother.affix('.element')
$a_child = $a_element.affix('.child.match')
$a_grandChild = $a_child.affix('.grand-child.match')
$b_grandMother = affix('.grand-mother.match')
$b_mother = $b_grandMother.affix('.mother')
$b_element = $b_mother.affix('.element')
$b_child = $b_element.affix('.child.match')
$b_grandChild = $b_child.affix('.grand-child.match')
$matches = up.util.selectInSubtree($a_element.add($b_element), '.match')
expect($matches).toEqual $a_child.add($a_grandChild).add($b_child).add($b_grandChild)
describe 'up.util.selectInDynasty', ->
it 'finds the selector in both ancestors and descendants of the given element', ->
$grandMother = affix('.grand-mother.match')
$mother = $grandMother.affix('.mother')
$element = $mother.affix('.element')
$child = $element.affix('.child.match')
$grandChild = $child.affix('.grand-child.match')
$matches = up.util.selectInDynasty($element, '.match')
$expected = $grandMother.add($child).add($grandChild)
expect($matches).toEqual $expected
it 'finds the element itself if it matches the selector', ->
$element = affix('.element.match')
$matches = up.util.selectInDynasty($element, '.match')
expect($matches).toEqual $element
describe 'up.util.isCrossDomain', ->
it 'returns false for an absolute path', ->
expect(up.util.isCrossDomain('/foo')).toBe(false)
it 'returns false for an relative path', ->
expect(up.util.isCrossDomain('foo')).toBe(false)
it 'returns false for a fully qualified URL with the same protocol and hostname as the current location', ->
fullUrl = "#{location.protocol}//#{location.host}/foo"
expect(up.util.isCrossDomain(fullUrl)).toBe(false)
it 'returns true for a fully qualified URL with a different protocol than the current location', ->
fullUrl = "otherprotocol://#{location.host}/foo"
expect(up.util.isCrossDomain(fullUrl)).toBe(true)
it 'returns false for a fully qualified URL with a different hostname than the current location', ->
fullUrl = "#{location.protocol}//other-host.tld/foo"
expect(up.util.isCrossDomain(fullUrl)).toBe(true)
describe 'up.util.isOptions', ->
it 'returns true for an Object instance', ->
expect(up.util.isOptions(new Object())).toBe(true)
it 'returns true for an object literal', ->
expect(up.util.isOptions({ foo: 'bar'})).toBe(true)
it 'returns true for a prototype-less object', ->
expect(up.util.isOptions(Object.create(null))).toBe(true)
it 'returns false for undefined', ->
expect(up.util.isOptions(undefined)).toBe(false)
it 'returns false for null', ->
expect(up.util.isOptions(null)).toBe(false)
it 'returns false for a function (which is technically an object)', ->
fn = -> 'foo'
fn.key = '<KEY>'
expect(up.util.isOptions(fn)).toBe(false)
it 'returns false for an Array', ->
expect(up.util.isOptions(['foo'])).toBe(false)
it 'returns false for a jQuery collection', ->
expect(up.util.isOptions($('body'))).toBe(false)
it 'returns false for a Promise', ->
expect(up.util.isOptions(Promise.resolve())).toBe(false)
it 'returns false for a FormData object', ->
expect(up.util.isOptions(new FormData())).toBe(false)
it 'returns false for a Date', ->
expect(up.util.isOptions(new Date())).toBe(false)
it 'returns false for a RegExp', ->
expect(up.util.isOptions(new RegExp('foo'))).toBe(false)
describe 'up.util.isObject', ->
it 'returns true for an Object instance', ->
expect(up.util.isObject(new Object())).toBe(true)
it 'returns true for an object literal', ->
expect(up.util.isObject({ foo: 'bar'})).toBe(true)
it 'returns false for undefined', ->
expect(up.util.isObject(undefined)).toBe(false)
it 'returns false for null', ->
expect(up.util.isObject(null)).toBe(false)
it 'returns true for a function (which is technically an object)', ->
fn = -> 'foo'
fn.key = '<KEY>'
expect(up.util.isObject(fn)).toBe(true)
it 'returns true for an array', ->
expect(up.util.isObject(['foo'])).toBe(true)
it 'returns true for a jQuery collection', ->
expect(up.util.isObject($('body'))).toBe(true)
it 'returns true for a promise', ->
expect(up.util.isObject(Promise.resolve())).toBe(true)
it 'returns true for a FormData object', ->
expect(up.util.isObject(new FormData())).toBe(true)
describe 'up.util.merge', ->
it 'merges the given objects', ->
obj = { a: '1', b: '2' }
other = { b: '3', c: '4' }
obj = up.util.merge(obj, other)
expect(obj).toEqual { a: '1', b: '3', c: '4' }
it 'overrides (not merges) keys with object value', ->
obj = { a: '1', b: { c: '2', d: '3' } }
other = { e: '4', b: { f: '5', g: '6' }}
obj = up.util.merge(obj, other)
expect(obj).toEqual { a: '1', e: '4', b: { f: '5', g: '6' } }
it 'ignores undefined arguments', ->
obj = { a: 1, b: 2 }
result = up.util.merge(obj, undefined)
expect(result).toEqual { a: 1, b: 2 }
reverseResult = up.util.merge(undefined, obj)
expect(reverseResult).toEqual { a: 1, b: 2 }
it 'ignores null arguments', ->
obj = { a: 1, b: 2 }
result = up.util.merge(obj, null)
expect(result).toEqual { a: 1, b: 2 }
reverseResult = up.util.merge(null, obj)
expect(reverseResult).toEqual { a: 1, b: 2 }
# describe 'up.util.deepMerge', ->
#
# it 'recursively merges the given objects', ->
# obj = { a: '1', b: { c: '2', d: '3' } }
# other = { e: '4', b: { f: '5', g: '6' }}
# obj = up.util.deepMerge(obj, other)
# expect(obj).toEqual { a: '1', e: '4', b: { c: '2', d: '3', f: '5', g: '6' } }
#
# it 'ignores undefined arguments', ->
# obj = { a: 1, b: 2 }
#
# result = up.util.deepMerge(obj, undefined)
# expect(result).toEqual { a: 1, b: 2 }
#
# reverseResult = up.util.deepMerge(undefined, obj)
# expect(reverseResult).toEqual { a: 1, b: 2 }
#
# it 'ignores null arguments', ->
# obj = { a: 1, b: 2 }
#
# result = up.util.deepMerge(obj, null)
# expect(result).toEqual { a: 1, b: 2 }
#
# reverseResult = up.util.deepMerge(null, obj)
# expect(reverseResult).toEqual { a: 1, b: 2 }
#
# it 'overwrites (and does not concatenate) array values', ->
# obj = { a: ['1', '2'] }
# other = { a: ['3', '4'] }
# obj = up.util.deepMerge(obj, other)
# expect(obj).toEqual { a: ['3', '4'] }
describe 'up.util.memoize', ->
it 'returns a function that calls the memoized function', ->
fun = (a, b) -> a + b
memoized = u.memoize(fun)
expect(memoized(2, 3)).toEqual(5)
it 'returns the cached return value of the first call when called again', ->
spy = jasmine.createSpy().and.returnValue(5)
memoized = u.memoize(spy)
expect(memoized(2, 3)).toEqual(5)
expect(memoized(2, 3)).toEqual(5)
expect(spy.calls.count()).toEqual(1)
['assign', 'assignPolyfill'].forEach (assignVariant) ->
describe "up.util.#{assignVariant}", ->
assign = up.util[assignVariant]
it 'copies the second object into the first object', ->
target = { a: 1 }
source = { b: 2, c: 3 }
assign(target, source)
expect(target).toEqual { a: 1, b: 2, c: 3 }
# Source is unchanged
expect(source).toEqual { b: 2, c: 3 }
it 'copies null property values', ->
target = { a: 1, b: 2 }
source = { b: null }
assign(target, source)
expect(target).toEqual { a: 1, b: null }
it 'copies undefined property values', ->
target = { a: 1, b: 2 }
source = { b: undefined }
assign(target, source)
expect(target).toEqual { a: 1, b: undefined }
it 'returns the first object', ->
target = { a: 1 }
source = { b: 2 }
result = assign(target, source)
expect(result).toBe(target)
it 'takes multiple sources to copy from', ->
target = { a: 1 }
source1 = { b: 2, c: 3 }
source2 = { d: 4, e: 5 }
assign(target, source1, source2)
expect(target).toEqual { a: 1, b: 2, c: 3, d: 4, e: 5 }
| true | describe 'up.util', ->
u = up.util
describe 'JavaScript functions', ->
# describe 'up.util.flatMap', ->
#
# it 'collects the Array results of the given map function, then concatenates the result arrays into one flat array', ->
# fun = (x) -> [x, x]
# result = up.util.flatMap([1, 2, 3], fun)
# expect(result).toEqual([1, 1, 2, 2, 3, 3])
describe 'up.util.uniq', ->
it 'returns the given array with duplicates elements removed', ->
input = [1, 2, 1, 1, 3]
result = up.util.uniq(input)
expect(result).toEqual [1, 2, 3]
it 'works on DOM elements', ->
one = document.createElement("div")
two = document.createElement("div")
input = [one, one, two, two]
result = up.util.uniq(input)
expect(result).toEqual [one, two]
it 'preserves insertion order', ->
input = [1, 2, 1]
result = up.util.uniq(input)
expect(result).toEqual [1, 2]
describe 'up.util.uniqBy', ->
it 'returns the given array with duplicate elements removed, calling the given function to determine value for uniqueness', ->
input = ["foo", "bar", "apple", 'orange', 'banana']
result = up.util.uniqBy(input, (element) -> element.length)
expect(result).toEqual ['foo', 'apple', 'orange']
it 'accepts a property name instead of a function, which collects that property from each item to compute uniquness', ->
input = ["foo", "bar", "apple", 'orange', 'banana']
result = up.util.uniqBy(input, 'length')
expect(result).toEqual ['foo', 'apple', 'orange']
# describe 'up.util.parsePath', ->
#
# it 'parses a plain name', ->
# path = up.util.parsePath("foo")
# expect(path).toEqual ['foo']
#
# it 'considers underscores to be part of a name', ->
# path = up.util.parsePath("foo_bar")
# expect(path).toEqual ['foo_bar']
#
# it 'considers dashes to be part of a name', ->
# path = up.util.parsePath("foo-bar")
# expect(path).toEqual ['foo-bar']
#
# it 'parses dot-separated names into multiple path segments', ->
# path = up.util.parsePath('foo.bar.baz')
# expect(path).toEqual ['foo', 'bar', 'baz']
#
# it 'parses nested params notation with square brackets', ->
# path = up.util.parsePath('user[account][email]')
# expect(path).toEqual ['user', 'account', 'email']
#
# it 'parses double quotes in square brackets', ->
# path = up.util.parsePath('user["account"]["email"]')
# expect(path).toEqual ['user', 'account', 'email']
#
# it 'parses single quotes in square brackets', ->
# path = up.util.parsePath("user['account']['email']")
# expect(path).toEqual ['user', 'account', 'email']
#
# it 'allows square brackets inside quotes', ->
# path = up.util.parsePath("element['a[up-href]']")
# expect(path).toEqual ['element', 'a[up-href]']
#
# it 'allows single quotes inside double quotes', ->
# path = up.util.parsePath("element[\"a[up-href='foo']\"]")
# expect(path).toEqual ['element', "a[up-href='foo']"]
#
# it 'allows double quotes inside single quotes', ->
# path = up.util.parsePath("element['a[up-href=\"foo\"]']")
# expect(path).toEqual ['element', 'a[up-href="foo"]']
#
# it 'allows dots in square brackets when it is quoted', ->
# path = up.util.parsePath('elements[".foo"]')
# expect(path).toEqual ['elements', '.foo']
#
# it 'allows different notation for each segment', ->
# path = up.util.parsePath('foo.bar[baz]["bam"][\'qux\']')
# expect(path).toEqual ['foo', 'bar', 'baz', 'bam', 'qux']
describe 'up.util.map', ->
it 'creates a new array of values by calling the given function on each item of the given array', ->
array = ["apple", "orange", "cucumber"]
mapped = up.util.map(array, (element) -> element.length)
expect(mapped).toEqual [5, 6, 8]
it 'accepts a property name instead of a function, which collects that property from each item', ->
array = ["apple", "orange", "cucumber"]
mapped = up.util.map(array, 'length')
expect(mapped).toEqual [5, 6, 8]
it 'passes the iteration index as second argument to the given function', ->
array = ["apple", "orange", "cucumber"]
mapped = up.util.map(array, (element, i) -> i)
expect(mapped).toEqual [0, 1, 2]
describe 'up.util.each', ->
it 'calls the given function once for each itm of the given array', ->
args = []
array = ["apple", "orange", "cucumber"]
up.util.each array, (item) -> args.push(item)
expect(args).toEqual ["apple", "orange", "cucumber"]
it 'passes the iteration index as second argument to the given function', ->
args = []
array = ["apple", "orange", "cucumber"]
up.util.each array, (item, index) -> args.push(index)
expect(args).toEqual [0, 1, 2]
describe 'up.util.select', ->
it 'returns an array of those elements in the given array for which the given function returns true', ->
array = ["foo", "orange", "cucumber"]
results = up.util.select array, (item) -> item.length > 3
expect(results).toEqual ['orange', 'cucumber']
it 'passes the iteration index as second argument to the given function', ->
array = ["apple", "orange", "cucumber", "banana"]
results = up.util.select array, (item, index) -> index % 2 == 0
expect(results).toEqual ['apple', 'cucumber']
it 'accepts a property name instead of a function, which checks that property from each item', ->
array = [ { name: 'a', prop: false }, { name: 'b', prop: true } ]
results = up.util.select array, 'prop'
expect(results).toEqual [{ name: 'b', prop: true }]
describe 'up.util.reject', ->
it 'returns an array of those elements in the given array for which the given function returns false', ->
array = ["foo", "orange", "cucumber"]
results = up.util.reject array, (item) -> item.length < 4
expect(results).toEqual ['orange', 'cucumber']
it 'passes the iteration index as second argument to the given function', ->
array = ["apple", "orange", "cucumber", "banana"]
results = up.util.reject array, (item, index) -> index % 2 == 0
expect(results).toEqual ['orange', 'banana']
it 'accepts a property name instead of a function, which checks that property from each item', ->
array = [ { name: 'a', prop: false }, { name: 'b', prop: true } ]
results = up.util.reject array, 'prop'
expect(results).toEqual [{ name: 'a', prop: false }]
describe 'up.util.previewable', ->
it 'wraps a function into a proxy function with an additional .promise attribute', ->
fun = -> 'return value'
proxy = up.util.previewable(fun)
expect(u.isFunction(proxy)).toBe(true)
expect(u.isPromise(proxy.promise)).toBe(true)
expect(proxy()).toEqual('return value')
it "resolves the proxy's .promise when the inner function returns", (done) ->
fun = -> 'return value'
proxy = up.util.previewable(fun)
callback = jasmine.createSpy('promise callback')
proxy.promise.then(callback)
u.nextFrame ->
expect(callback).not.toHaveBeenCalled()
proxy()
u.nextFrame ->
expect(callback).toHaveBeenCalledWith('return value')
done()
it "delays resolution of the proxy's .promise if the inner function returns a promise", (done) ->
funDeferred = u.newDeferred()
fun = -> funDeferred
proxy = up.util.previewable(fun)
callback = jasmine.createSpy('promise callback')
proxy.promise.then(callback)
proxy()
u.nextFrame ->
expect(callback).not.toHaveBeenCalled()
funDeferred.resolve('return value')
u.nextFrame ->
expect(callback).toHaveBeenCalledWith('return value')
done()
describe 'up.util.kebabCase', ->
it 'converts a string of multiple words from camel-case to kebap-case', ->
result = up.util.kebabCase('fooBarBaz')
expect(result).toEqual('foo-bar-baz')
it 'does not change a single word', ->
result = up.util.kebabCase('foo')
expect(result).toEqual('foo')
it 'downcases the first word when it starts with a capital letter', ->
result = up.util.kebabCase('FooBar')
expect(result).toEqual('foo-bar')
it 'does not change a string that is already in kebab-case', ->
result = up.util.kebabCase('foo-bar-baz')
expect(result).toEqual('foo-bar-baz')
describe 'up.util.camelCase', ->
it 'converts a string of multiple words from kebap-case to camel-case', ->
result = up.util.camelCase('foo-bar-baz')
expect(result).toEqual('fooBarBaz')
it 'does not change a single word', ->
result = up.util.camelCase('foo')
expect(result).toEqual('foo')
it 'downcases the first word when it starts with a capital letter', ->
result = up.util.camelCase('Foo-Bar')
expect(result).toEqual('fooBar')
it 'does not change a string that is already in camel-case', ->
result = up.util.camelCase('fooBarBaz')
expect(result).toEqual('fooBarBaz')
describe 'up.util.kebabCaseKeys', ->
it "converts the given object's keys from camel-case to kebab-case", ->
input =
fooBar: 'one'
barBaz: 'two'
result = up.util.kebabCaseKeys(input)
expect(result).toEqual
'foo-bar': 'one'
'bar-baz': 'two'
it "does not change an object whose keys are already kebab-case", ->
input =
'foo-bar': 'one'
'bar-baz': 'two'
result = up.util.kebabCaseKeys(input)
expect(result).toEqual
'foo-bar': 'one'
'bar-baz': 'two'
describe 'up.util.camelCaseKeys', ->
it "converts the given object's keys from kebab-case to camel-case", ->
input =
'foo-bar': 'one'
'bar-baz': 'two'
result = up.util.camelCaseKeys(input)
expect(result).toEqual
fooBar: 'one'
barBaz: 'two'
it "does not change an object whose keys are already camel-case", ->
input =
fooBar: 'one'
barBaz: 'two'
result = up.util.camelCaseKeys(input)
expect(result).toEqual
fooBar: 'one'
barBaz: 'two'
# describe 'up.util.lowerCaseKeys', ->
#
# it "returns a copy of the given object will all keys in lower case", ->
# input =
# 'A-B': 'C-D'
# 'E-F': 'G-H'
# result = up.util.lowerCaseKeys(input)
# expect(result).toEqual
# 'a-b': 'C-D'
# 'e-f': 'G-H'
describe 'up.util.DivertibleChain', ->
it "instantiates a task queue whose (2..n)th tasks can be changed by calling '.asap'", (done) ->
chain = new up.util.DivertibleChain()
timer1Spy = jasmine.createSpy('timer1 has been called')
timer1 = ->
timer1Spy()
u.promiseTimer(50)
timer2Spy = jasmine.createSpy('timer2 has been called')
timer2 = ->
timer2Spy()
u.promiseTimer(50)
timer3Spy = jasmine.createSpy('timer3 has been called')
timer3 = ->
timer3Spy()
u.promiseTimer(50)
timer4Spy = jasmine.createSpy('timer4 has been called')
timer4 = ->
timer4Spy()
u.promiseTimer(50)
chain.asap(timer1)
u.nextFrame ->
expect(timer1Spy).toHaveBeenCalled()
chain.asap(timer2)
u.nextFrame ->
# timer2 is still waiting for timer1 to finish
expect(timer2Spy).not.toHaveBeenCalled()
# Override the (2..n)th tasks. This unschedules timer2.
chain.asap(timer3, timer4)
u.setTimer 80, ->
expect(timer2Spy).not.toHaveBeenCalled()
expect(timer3Spy).toHaveBeenCalled()
u.setTimer 70, ->
expect(timer4Spy).toHaveBeenCalled()
done()
describe 'up.util.sequence', ->
it 'combines the given functions into a single function', ->
values = []
one = -> values.push('one')
two = -> values.push('two')
three = -> values.push('three')
sequence = up.util.sequence(one, two, three)
expect(values).toEqual([])
sequence()
expect(values).toEqual(['one', 'two', 'three'])
describe 'up.util.createElementFromHtml', ->
it 'parses a string that contains a serialized HTML document', ->
string = """
<html lang="foo">
<head>
<title>document title</title>
</head>
<body data-env='production'>
<div>line 1</div>
<div>line 2</div>
</body>
</html>
"""
element = up.util.createElementFromHtml(string)
expect(element.querySelector('head title').textContent).toEqual('document title')
expect(element.querySelector('body').getAttribute('data-env')).toEqual('production')
expect(element.querySelectorAll('body div').length).toBe(2)
expect(element.querySelectorAll('body div')[0].textContent).toEqual('line 1')
expect(element.querySelectorAll('body div')[1].textContent).toEqual('line 2')
it 'parses a string that contains carriage returns (bugfix)', ->
string = """
<html>\r
<body>\r
<div>line</div>\r
</body>\r
</html>\r
"""
$element = up.util.createElementFromHtml(string)
expect($element.querySelector('body')).toBeGiven()
expect($element.querySelector('body div').textContent).toEqual('line')
it 'does not run forever if a page has a <head> without a <title> (bugfix)', ->
html = """
<!doctype html>
<html>
<head>
<meta charset="utf-8" />
<meta name="format-detection" content="telephone=no">
<link href='/images/favicon.png' rel='shortcut icon' type='image/png'>
<meta name='viewport' content='width=device-width, initial-scale=1, maximum-scale=1, minimum-scale=1'>
<base href="/examples/update-fragment/" />
<link href='http://fonts.googleapis.com/css?family=Orbitron:400|Ubuntu+Mono:400,700|Source+Sans+Pro:300,400,700,400italic,700italic' rel='stylesheet' type='text/css'>
<link href="//netdna.bootstrapcdn.com/font-awesome/4.1.0/css/font-awesome.min.css" rel="stylesheet">
<link href="/stylesheets/example/all.css" rel="stylesheet" />
<script src="/javascripts/example.js"></script>
</head>
<body>
<div class="page">
<div class="story">
<h1>Full story</h1>
<p>Lorem ipsum dolor sit amet.</p>
<a href="preview.html" up-target=".story">
Read summary
</a>
</div>
</div>
</body>
</html>
"""
element = up.util.createElementFromHtml(html)
expect(element.querySelector("title")).toBeMissing()
expect(element.querySelector("h1").textContent).toEqual('Full story')
it 'can parse HTML without a <head>', ->
html = """
<html>
<body>
<h1>Full story</h1>
</body>
</html>
"""
element = up.util.createElementFromHtml(html)
expect(element.querySelector("title")).toBeMissing()
expect(element.querySelector("h1").textContent).toEqual('Full story')
it 'can parse a HTML fragment without a <body>', ->
html = """
<h1>Full story</h1>
"""
element = up.util.createElementFromHtml(html)
expect(element.querySelector("title")).toBeMissing()
expect(element.querySelector("h1").textContent).toEqual('Full story')
describe 'up.util.isFixed', ->
it 'returns true if the given element or one of its ancestors has a "fixed" CSS position', ->
$grandGrandParent = affix('.grand-parent')
$grandParent = $grandGrandParent.affix('.grand-parent')
$parent = $grandParent.affix('.parent')
$child = $parent.affix('.child')
$grandParent.css(position: 'fixed')
expect(up.util.isFixed($child)).toBe(true)
expect(up.util.isFixed($parent)).toBe(true)
expect(up.util.isFixed($grandParent)).toBe(true)
expect(up.util.isFixed($grandGrandParent)).toBe(false)
it 'returns false if the given element and its ancestors all have a non-"fixed" CSS position', ->
$element = affix('.element')
expect(up.util.isFixed($element)).toBe(false)
describe 'up.util.setTimer', ->
it 'calls the given function after waiting the given milliseconds', (done) ->
callback = jasmine.createSpy()
expectNotCalled = -> expect(callback).not.toHaveBeenCalled()
expectCalled = -> expect(callback).toHaveBeenCalled()
up.util.setTimer(100, callback)
expectNotCalled()
setTimeout(expectNotCalled, 50)
setTimeout(expectCalled, 50 + 75)
setTimeout(done, 50 + 75)
describe 'if the delay is zero', ->
it 'calls the given function in the next execution frame', ->
callback = jasmine.createSpy()
up.util.setTimer(0, callback)
expect(callback).not.toHaveBeenCalled()
setTimeout((-> expect(callback).toHaveBeenCalled()), 0)
# describe 'up.util.argNames', ->
#
# it 'returns an array of argument names for the given function', ->
# fun = ($element, data) ->
# expect(up.util.argNames(fun)).toEqual(['$element', 'data'])
describe 'up.util.trim', ->
it 'removes leading and trailing whitespace from the given string', ->
string = "\t\n\r abc \r\n\t"
expect(up.util.trim(string)).toEqual('abc')
describe 'up.util.only', ->
it 'returns a copy of the given object with only the given whitelisted properties', ->
original =
foo: 'foo-value'
bar: 'bar-value'
baz: 'baz-value'
bam: 'bam-value'
whitelisted = up.util.only(original, 'bar', 'bam')
expect(whitelisted).toEqual
bar: 'bar-value'
bam: 'bam-value'
# Show that original did not change
expect(original).toEqual
foo: 'foo-value'
bar: 'bar-value'
baz: 'baz-value'
bam: 'bam-value'
it 'does not add empty keys to the returned object if the given object does not have that key', ->
original =
foo: 'foo-value'
whitelisted = up.util.only(original, 'foo', 'bar')
expect(whitelisted).toHaveOwnProperty('foo')
expect(whitelisted).not.toHaveOwnProperty('bar')
describe 'up.util.readInlineStyle', ->
describe 'with a string as second argument', ->
it 'returns a CSS value string from an inline [style] attribute', ->
$div = affix('div').attr('style', 'background-color: #ff0000')
style = up.util.readInlineStyle($div, 'backgroundColor')
# Browsers convert colors to rgb() values, even IE11
expect(style).toEqual('rgb(255, 0, 0)')
it 'returns a blank value if the element does not have the given property in the [style] attribute', ->
$div = affix('div').attr('style', 'background-color: red')
style = up.util.readInlineStyle($div, 'color')
expect(style).toBeBlank()
it 'returns a blank value the given property is a computed property, but not in the [style] attribute', ->
$div = affix('div[class="red-background"]')
inlineStyle = up.util.readInlineStyle($div, 'backgroundColor')
computedStyle = up.util.readComputedStyle($div, 'backgroundColor')
expect(computedStyle).toEqual('rgb(255, 0, 0)')
expect(inlineStyle).toBeBlank()
describe 'with an array as second argument', ->
it 'returns an object with the given inline [style] properties', ->
$div = affix('div').attr('style', 'background-color: #ff0000; color: #0000ff')
style = up.util.readInlineStyle($div, ['backgroundColor', 'color'])
expect(style).toEqual
backgroundColor: 'rgb(255, 0, 0)'
color: 'rgb(0, 0, 255)'
it 'returns blank keys if the element does not have the given property in the [style] attribute', ->
$div = affix('div').attr('style', 'background-color: #ff0000')
style = up.util.readInlineStyle($div, ['backgroundColor', 'color'])
expect(style).toHaveOwnProperty('color')
expect(style.color).toBeBlank()
it 'returns a blank value the given property is a computed property, but not in the [style] attribute', ->
$div = affix('div[class="red-background"]')
inlineStyleHash = up.util.readInlineStyle($div, ['backgroundColor'])
computedBackground = up.util.readComputedStyle($div, 'backgroundColor')
expect(computedBackground).toEqual('rgb(255, 0, 0)')
expect(inlineStyleHash).toHaveOwnProperty('backgroundColor')
expect(inlineStyleHash.backgroundColor).toBeBlank()
describe 'up.util.writeInlineStyle', ->
it "sets the given style properties as the given element's [style] attribute", ->
$div = affix('div')
up.util.writeInlineStyle($div, { color: 'red', backgroundColor: 'blue' })
style = $div.attr('style')
expect(style).toContain('color: red')
expect(style).toContain('background-color: blue')
it "merges the given style properties into the given element's existing [style] value", ->
$div = affix('div[style="color: red"]')
up.util.writeInlineStyle($div, { backgroundColor: 'blue' })
style = $div.attr('style')
expect(style).toContain('color: red')
expect(style).toContain('background-color: blue')
it "converts the values of known length properties to px values automatically", ->
$div = affix('div')
up.util.writeInlineStyle($div, { paddingTop: 100 })
style = $div.attr('style')
expect(style).toContain('padding-top: 100px')
describe 'up.util.writeTemporaryStyle', ->
it "sets the given inline styles and returns a function that will restore the previous inline styles", ->
$div = affix('div[style="color: red"]')
restore = up.util.writeTemporaryStyle($div, { color: 'blue' })
expect($div.attr('style')).toContain('color: blue')
expect($div.attr('style')).not.toContain('color: red')
restore()
expect($div.attr('style')).not.toContain('color: blue')
expect($div.attr('style')).toContain('color: red')
it "does not restore inherited styles", ->
$div = affix('div[class="red-background"]')
restore = up.util.writeTemporaryStyle($div, { backgroundColor: 'blue' })
expect($div.attr('style')).toContain('background-color: blue')
restore()
expect($div.attr('style')).not.toContain('background-color')
describe 'up.util.except', ->
it 'returns a copy of the given object but omits the given blacklisted properties', ->
original =
foo: 'foo-value'
bar: 'bar-value'
baz: 'baz-value'
bam: 'bam-value'
whitelisted = up.util.except(original, 'foo', 'baz')
expect(whitelisted).toEqual
bar: 'bar-value'
bam: 'bam-value'
# Show that original did not change
expect(original).toEqual
foo: 'foo-value'
bar: 'bar-value'
baz: 'baz-value'
bam: 'bam-value'
describe 'up.util.selectorForElement', ->
it "prefers using the element's 'up-id' attribute to using the element's ID", ->
$element = affix('div[up-id=up-id-value]#id-value')
expect(up.util.selectorForElement($element)).toBe('[up-id="up-id-value"]')
it "prefers using the element's ID to using the element's name", ->
$element = affix('div#id-value[name=name-value]')
expect(up.util.selectorForElement($element)).toBe("#id-value")
it "selects the ID with an attribute selector if the ID contains a slash", ->
$element = affix('div').attr(id: 'foo/bar')
expect(up.util.selectorForElement($element)).toBe('[id="foo/bar"]')
it "selects the ID with an attribute selector if the ID contains a space", ->
$element = affix('div').attr(id: 'foo bar')
expect(up.util.selectorForElement($element)).toBe('[id="foo bar"]')
it "selects the ID with an attribute selector if the ID contains a dot", ->
$element = affix('div').attr(id: 'foo.bar')
expect(up.util.selectorForElement($element)).toBe('[id="foo.bar"]')
it "selects the ID with an attribute selector if the ID contains a quote", ->
$element = affix('div').attr(id: 'foo"bar')
expect(up.util.selectorForElement($element)).toBe('[id="foo\\"bar"]')
it "prefers using the element's tagName + [name] to using the element's classes", ->
$element = affix('input[name=name-value].class1.class2')
expect(up.util.selectorForElement($element)).toBe('input[name="name-value"]')
it "prefers using the element's classes to using the element's ARIA label", ->
$element = affix('div.class1.class2[aria-label="ARIA label value"]')
expect(up.util.selectorForElement($element)).toBe(".class1.class2")
it 'does not use Unpoly classes to compose a class selector', ->
$element = affix('div.class1.up-current.class2')
expect(up.util.selectorForElement($element)).toBe(".class1.class2")
it "prefers using the element's ARIA label to using the element's tag name", ->
$element = affix('div[aria-label="ARIA label value"]')
expect(up.util.selectorForElement($element)).toBe('[aria-label="ARIA label value"]')
it "uses the element's tag name if no better description is available", ->
$element = affix('div')
expect(up.util.selectorForElement($element)).toBe("div")
it 'escapes quotes in attribute selector values', ->
$element = affix('div')
$element.attr('aria-label', 'foo"bar')
expect(up.util.selectorForElement($element)).toBe('[aria-label="foo\\"bar"]')
describe 'up.util.addTemporaryClass', ->
it 'adds the given class to the given element', ->
$element = affix('.foo.bar')
element = $element.get(0)
expect(element.className).toEqual('foo bar')
up.util.addTemporaryClass(element, 'baz')
expect(element.className).toEqual('foo bar baz')
it 'returns a function that restores the original class', ->
$element = affix('.foo.bar')
element = $element.get(0)
restoreClass = up.util.addTemporaryClass(element, 'baz')
expect(element.className).toEqual('foo bar baz')
restoreClass()
expect(element.className).toEqual('foo bar')
describe 'up.util.castedAttr', ->
it 'returns true if the attribute value is the string "true"', ->
$element = affix('div').attr('foo', 'true')
expect(up.util.castedAttr($element, 'foo')).toBe(true)
it 'returns true if the attribute value is the name of the attribute', ->
$element = affix('div').attr('foo', 'foo')
expect(up.util.castedAttr($element, 'foo')).toBe(true)
it 'returns false if the attribute value is the string "false"', ->
$element = affix('div').attr('foo', 'false')
expect(up.util.castedAttr($element, 'foo')).toBe(false)
it 'returns undefined if the element has no such attribute', ->
$element = affix('div')
expect(up.util.castedAttr($element, 'foo')).toBe(undefined)
it 'returns the attribute value unchanged if the value is some string', ->
$element = affix('div').attr('foo', 'some text')
expect(up.util.castedAttr($element, 'foo')).toBe('some text')
describe 'up.util.any', ->
it 'returns true if an element in the array returns true for the given function', ->
result = up.util.any [null, undefined, 'foo', ''], up.util.isPresent
expect(result).toBe(true)
it 'returns false if no element in the array returns true for the given function', ->
result = up.util.any [null, undefined, ''], up.util.isPresent
expect(result).toBe(false)
it 'short-circuits once an element returns true', ->
count = 0
up.util.any [null, undefined, 'foo', ''], (element) ->
count += 1
up.util.isPresent(element)
expect(count).toBe(3)
describe 'up.util.all', ->
it 'returns true if all element in the array returns true for the given function', ->
result = up.util.all ['foo', 'bar', 'baz'], up.util.isPresent
expect(result).toBe(true)
it 'returns false if an element in the array returns false for the given function', ->
result = up.util.all ['foo', 'bar', null, 'baz'], up.util.isPresent
expect(result).toBe(false)
it 'short-circuits once an element returns false', ->
count = 0
up.util.all ['foo', 'bar', '', 'baz'], (element) ->
count += 1
up.util.isPresent(element)
expect(count).toBe(3)
it 'passes the iteration index as second argument to the given function', ->
array = ["apple", "orange", "cucumber"]
args = []
up.util.all array, (item, index) ->
args.push(index)
true
expect(args).toEqual [0, 1, 2]
it 'accepts a property name instead of a function, which collects that property from each item', ->
allTrue = [ { prop: true }, { prop: true } ]
someFalse = [ { prop: true }, { prop: false } ]
expect(up.util.all(allTrue, 'prop')).toBe(true)
expect(up.util.all(someFalse, 'prop')).toBe(false)
# describe 'up.util.none', ->
#
# it 'returns true if no element in the array returns true for the given function', ->
# result = up.util.none ['foo', 'bar', 'baz'], up.util.isBlank
# expect(result).toBe(true)
#
# it 'returns false if an element in the array returns false for the given function', ->
# result = up.util.none ['foo', 'bar', null, 'baz'], up.util.isBlank
# expect(result).toBe(false)
#
# it 'short-circuits once an element returns true', ->
# count = 0
# up.util.none ['foo', 'bar', '', 'baz'], (element) ->
# count += 1
# up.util.isBlank(element)
# expect(count).toBe(3)
#
# it 'passes the iteration index as second argument to the given function', ->
# array = ["apple", "orange", "cucumber"]
# args = []
# up.util.none array, (item, index) ->
# args.push(index)
# false
# expect(args).toEqual [0, 1, 2]
#
# it 'accepts a property name instead of a function, which collects that property from each item', ->
# allFalse = [ { prop: false }, { prop: false } ]
# someTrue = [ { prop: true }, { prop: false } ]
# expect(up.util.none(allFalse, 'prop')).toBe(true)
# expect(up.util.none(someTrue, 'prop')).toBe(false)
describe 'up.util.any', ->
it 'returns true if at least one element in the array returns true for the given function', ->
result = up.util.any ['', 'bar', null], up.util.isPresent
expect(result).toBe(true)
it 'returns false if no element in the array returns true for the given function', ->
result = up.util.any ['', null, undefined], up.util.isPresent
expect(result).toBe(false)
it 'passes the iteration index as second argument to the given function', ->
array = ["apple", "orange", "cucumber"]
args = []
up.util.any array, (item, index) ->
args.push(index)
false
expect(args).toEqual [0, 1, 2]
it 'accepts a property name instead of a function, which collects that property from each item', ->
someTrue = [ { prop: true }, { prop: false } ]
allFalse = [ { prop: false }, { prop: false } ]
expect(up.util.any(someTrue, 'prop')).toBe(true)
expect(up.util.any(allFalse, 'prop')).toBe(false)
describe 'up.util.isBlank', ->
it 'returns false for false', ->
expect(up.util.isBlank(false)).toBe(false)
it 'returns false for true', ->
expect(up.util.isBlank(true)).toBe(false)
it 'returns true for null', ->
expect(up.util.isBlank(null)).toBe(true)
it 'returns true for undefined', ->
expect(up.util.isBlank(undefined)).toBe(true)
it 'returns true for an empty String', ->
expect(up.util.isBlank('')).toBe(true)
it 'returns false for a String with at least one character', ->
expect(up.util.isBlank('string')).toBe(false)
it 'returns true for an empty array', ->
expect(up.util.isBlank([])).toBe(true)
it 'returns false for an array with at least one element', ->
expect(up.util.isBlank(['element'])).toBe(false)
it 'returns true for an empty jQuery collection', ->
expect(up.util.isBlank($([]))).toBe(true)
it 'returns false for a jQuery collection with at least one element', ->
expect(up.util.isBlank($('body'))).toBe(false)
it 'returns true for an empty object', ->
expect(up.util.isBlank({})).toBe(true)
it 'returns false for a function', ->
expect(up.util.isBlank((->))).toBe(false)
it 'returns true for an object with at least one key', ->
expect(up.util.isBlank({key: 'value'})).toBe(false)
describe 'up.util.normalizeUrl', ->
it 'normalizes a relative path', ->
expect(up.util.normalizeUrl('foo')).toBe("http://#{location.hostname}:#{location.port}/foo")
it 'normalizes an absolute path', ->
expect(up.util.normalizeUrl('/foo')).toBe("http://#{location.hostname}:#{location.port}/foo")
it 'normalizes a full URL', ->
expect(up.util.normalizeUrl('http://example.com/foo/bar')).toBe('http://example.com/foo/bar')
it 'preserves a query string', ->
expect(up.util.normalizeUrl('http://example.com/foo/bar?key=value')).toBe('http://example.com/foo/bar?key=value')
it 'strips a query string with { search: false } option', ->
expect(up.util.normalizeUrl('http://example.com/foo/bar?key=value', search: false)).toBe('http://example.com/foo/bar')
it 'does not strip a trailing slash by default', ->
expect(up.util.normalizeUrl('/foo/')).toEqual("http://#{location.hostname}:#{location.port}/foo/")
it 'normalizes redundant segments', ->
expect(up.util.normalizeUrl('/foo/../foo')).toBe("http://#{location.hostname}:#{location.port}/foo")
it 'strips a #hash by default', ->
expect(up.util.normalizeUrl('http://example.com/foo/bar#fragment')).toBe('http://example.com/foo/bar')
it 'preserves a #hash with { hash: true } option', ->
expect(up.util.normalizeUrl('http://example.com/foo/bar#fragment', hash: true)).toBe('http://example.com/foo/bar#fragment')
it 'puts a #hash behind the query string', ->
expect(up.util.normalizeUrl('http://example.com/foo/bar?key=value#fragment', hash: true)).toBe('http://example.com/foo/bar?key=value#fragment')
describe 'up.util.detect', ->
it 'finds the first element in the given array that matches the given tester', ->
array = ['foo', 'bar', 'baz']
tester = (element) -> element[0] == 'b'
expect(up.util.detect(array, tester)).toEqual('bar')
it "returns undefined if the given array doesn't contain a matching element", ->
array = ['foo', 'bar', 'baz']
tester = (element) -> element[0] == 'z'
expect(up.util.detect(array, tester)).toBeUndefined()
describe 'up.util.config', ->
it 'creates an object with the given attributes', ->
object = up.util.config(a: 1, b: 2)
expect(object.a).toBe(1)
expect(object.b).toBe(2)
it 'does not allow to set a key that was not included in the factory settings', ->
object = up.util.config(a: 1)
object.b = 2
expect(object.b).toBeUndefined()
describe '#reset', ->
it 'resets the object to its original state', ->
object = up.util.config(a: 1)
expect(object.b).toBeUndefined()
object.a = 2
expect(object.a).toBe(2)
object.reset()
expect(object.a).toBe(1)
it 'does not remove the #reset or #update method from the object', ->
object = up.util.config(a: 1)
object.b = 2
object.reset()
expect(object.reset).toBeDefined()
describe 'up.util.remove', ->
it 'removes the given string from the given array', ->
array = ['a', 'b', 'c']
up.util.remove(array, 'b')
expect(array).toEqual ['a', 'c']
it 'removes the given object from the given array', ->
obj1 = { 'key': 1 }
obj2 = { 'key': 2 }
obj3 = { 'key': 3 }
array = [obj1, obj2, obj3]
up.util.remove(array, obj2)
expect(array).toEqual [obj1, obj3]
describe 'up.util.unresolvablePromise', ->
it 'return a pending promise', (done) ->
promise = up.util.unresolvablePromise()
promiseState(promise).then (result) ->
expect(result.state).toEqual('pending')
done()
it 'returns a different object every time (to prevent memory leaks)', ->
one = up.util.unresolvablePromise()
two = up.util.unresolvablePromise()
expect(one).not.toBe(two)
describe 'up.util.flatten', ->
it 'flattens the given array', ->
array = [1, [2, 3], 4]
expect(u.flatten(array)).toEqual([1, 2, 3, 4])
it 'only flattens one level deep for performance reasons', ->
array = [1, [2, [3,4]], 5]
expect(u.flatten(array)).toEqual([1, 2, [3, 4], 5])
describe 'up.util.renameKey', ->
it 'renames a key in the given property', ->
object = { a: 'a value', b: 'b value'}
u.renameKey(object, 'a', 'c')
expect(object.a).toBeUndefined()
expect(object.b).toBe('b value')
expect(object.c).toBe('a value')
describe 'up.util.selectInSubtree', ->
it 'finds the selector in ancestors and descendants of the given element', ->
$grandMother = affix('.grand-mother.match')
$mother = $grandMother.affix('.mother')
$element = $mother.affix('.element')
$child = $element.affix('.child.match')
$grandChild = $child.affix('.grand-child.match')
$matches = up.util.selectInSubtree($element, '.match')
$expected = $child.add($grandChild)
expect($matches).toEqual $expected
it 'finds the element itself if it matches the selector', ->
$element = affix('.element.match')
$matches = up.util.selectInSubtree($element, '.match')
expect($matches).toEqual $element
describe 'when given a jQuery collection with multiple elements', ->
it 'searches in a all subtrees of the given elements', ->
$a_grandMother = affix('.grand-mother.match')
$a_mother = $a_grandMother.affix('.mother')
$a_element = $a_mother.affix('.element')
$a_child = $a_element.affix('.child.match')
$a_grandChild = $a_child.affix('.grand-child.match')
$b_grandMother = affix('.grand-mother.match')
$b_mother = $b_grandMother.affix('.mother')
$b_element = $b_mother.affix('.element')
$b_child = $b_element.affix('.child.match')
$b_grandChild = $b_child.affix('.grand-child.match')
$matches = up.util.selectInSubtree($a_element.add($b_element), '.match')
expect($matches).toEqual $a_child.add($a_grandChild).add($b_child).add($b_grandChild)
describe 'up.util.selectInDynasty', ->
it 'finds the selector in both ancestors and descendants of the given element', ->
$grandMother = affix('.grand-mother.match')
$mother = $grandMother.affix('.mother')
$element = $mother.affix('.element')
$child = $element.affix('.child.match')
$grandChild = $child.affix('.grand-child.match')
$matches = up.util.selectInDynasty($element, '.match')
$expected = $grandMother.add($child).add($grandChild)
expect($matches).toEqual $expected
it 'finds the element itself if it matches the selector', ->
$element = affix('.element.match')
$matches = up.util.selectInDynasty($element, '.match')
expect($matches).toEqual $element
describe 'up.util.isCrossDomain', ->
it 'returns false for an absolute path', ->
expect(up.util.isCrossDomain('/foo')).toBe(false)
it 'returns false for an relative path', ->
expect(up.util.isCrossDomain('foo')).toBe(false)
it 'returns false for a fully qualified URL with the same protocol and hostname as the current location', ->
fullUrl = "#{location.protocol}//#{location.host}/foo"
expect(up.util.isCrossDomain(fullUrl)).toBe(false)
it 'returns true for a fully qualified URL with a different protocol than the current location', ->
fullUrl = "otherprotocol://#{location.host}/foo"
expect(up.util.isCrossDomain(fullUrl)).toBe(true)
it 'returns false for a fully qualified URL with a different hostname than the current location', ->
fullUrl = "#{location.protocol}//other-host.tld/foo"
expect(up.util.isCrossDomain(fullUrl)).toBe(true)
describe 'up.util.isOptions', ->
it 'returns true for an Object instance', ->
expect(up.util.isOptions(new Object())).toBe(true)
it 'returns true for an object literal', ->
expect(up.util.isOptions({ foo: 'bar'})).toBe(true)
it 'returns true for a prototype-less object', ->
expect(up.util.isOptions(Object.create(null))).toBe(true)
it 'returns false for undefined', ->
expect(up.util.isOptions(undefined)).toBe(false)
it 'returns false for null', ->
expect(up.util.isOptions(null)).toBe(false)
it 'returns false for a function (which is technically an object)', ->
fn = -> 'foo'
fn.key = 'PI:KEY:<KEY>END_PI'
expect(up.util.isOptions(fn)).toBe(false)
it 'returns false for an Array', ->
expect(up.util.isOptions(['foo'])).toBe(false)
it 'returns false for a jQuery collection', ->
expect(up.util.isOptions($('body'))).toBe(false)
it 'returns false for a Promise', ->
expect(up.util.isOptions(Promise.resolve())).toBe(false)
it 'returns false for a FormData object', ->
expect(up.util.isOptions(new FormData())).toBe(false)
it 'returns false for a Date', ->
expect(up.util.isOptions(new Date())).toBe(false)
it 'returns false for a RegExp', ->
expect(up.util.isOptions(new RegExp('foo'))).toBe(false)
describe 'up.util.isObject', ->
it 'returns true for an Object instance', ->
expect(up.util.isObject(new Object())).toBe(true)
it 'returns true for an object literal', ->
expect(up.util.isObject({ foo: 'bar'})).toBe(true)
it 'returns false for undefined', ->
expect(up.util.isObject(undefined)).toBe(false)
it 'returns false for null', ->
expect(up.util.isObject(null)).toBe(false)
it 'returns true for a function (which is technically an object)', ->
fn = -> 'foo'
fn.key = 'PI:KEY:<KEY>END_PI'
expect(up.util.isObject(fn)).toBe(true)
it 'returns true for an array', ->
expect(up.util.isObject(['foo'])).toBe(true)
it 'returns true for a jQuery collection', ->
expect(up.util.isObject($('body'))).toBe(true)
it 'returns true for a promise', ->
expect(up.util.isObject(Promise.resolve())).toBe(true)
it 'returns true for a FormData object', ->
expect(up.util.isObject(new FormData())).toBe(true)
describe 'up.util.merge', ->
it 'merges the given objects', ->
obj = { a: '1', b: '2' }
other = { b: '3', c: '4' }
obj = up.util.merge(obj, other)
expect(obj).toEqual { a: '1', b: '3', c: '4' }
it 'overrides (not merges) keys with object value', ->
obj = { a: '1', b: { c: '2', d: '3' } }
other = { e: '4', b: { f: '5', g: '6' }}
obj = up.util.merge(obj, other)
expect(obj).toEqual { a: '1', e: '4', b: { f: '5', g: '6' } }
it 'ignores undefined arguments', ->
obj = { a: 1, b: 2 }
result = up.util.merge(obj, undefined)
expect(result).toEqual { a: 1, b: 2 }
reverseResult = up.util.merge(undefined, obj)
expect(reverseResult).toEqual { a: 1, b: 2 }
it 'ignores null arguments', ->
obj = { a: 1, b: 2 }
result = up.util.merge(obj, null)
expect(result).toEqual { a: 1, b: 2 }
reverseResult = up.util.merge(null, obj)
expect(reverseResult).toEqual { a: 1, b: 2 }
# describe 'up.util.deepMerge', ->
#
# it 'recursively merges the given objects', ->
# obj = { a: '1', b: { c: '2', d: '3' } }
# other = { e: '4', b: { f: '5', g: '6' }}
# obj = up.util.deepMerge(obj, other)
# expect(obj).toEqual { a: '1', e: '4', b: { c: '2', d: '3', f: '5', g: '6' } }
#
# it 'ignores undefined arguments', ->
# obj = { a: 1, b: 2 }
#
# result = up.util.deepMerge(obj, undefined)
# expect(result).toEqual { a: 1, b: 2 }
#
# reverseResult = up.util.deepMerge(undefined, obj)
# expect(reverseResult).toEqual { a: 1, b: 2 }
#
# it 'ignores null arguments', ->
# obj = { a: 1, b: 2 }
#
# result = up.util.deepMerge(obj, null)
# expect(result).toEqual { a: 1, b: 2 }
#
# reverseResult = up.util.deepMerge(null, obj)
# expect(reverseResult).toEqual { a: 1, b: 2 }
#
# it 'overwrites (and does not concatenate) array values', ->
# obj = { a: ['1', '2'] }
# other = { a: ['3', '4'] }
# obj = up.util.deepMerge(obj, other)
# expect(obj).toEqual { a: ['3', '4'] }
describe 'up.util.memoize', ->
it 'returns a function that calls the memoized function', ->
fun = (a, b) -> a + b
memoized = u.memoize(fun)
expect(memoized(2, 3)).toEqual(5)
it 'returns the cached return value of the first call when called again', ->
spy = jasmine.createSpy().and.returnValue(5)
memoized = u.memoize(spy)
expect(memoized(2, 3)).toEqual(5)
expect(memoized(2, 3)).toEqual(5)
expect(spy.calls.count()).toEqual(1)
['assign', 'assignPolyfill'].forEach (assignVariant) ->
describe "up.util.#{assignVariant}", ->
assign = up.util[assignVariant]
it 'copies the second object into the first object', ->
target = { a: 1 }
source = { b: 2, c: 3 }
assign(target, source)
expect(target).toEqual { a: 1, b: 2, c: 3 }
# Source is unchanged
expect(source).toEqual { b: 2, c: 3 }
it 'copies null property values', ->
target = { a: 1, b: 2 }
source = { b: null }
assign(target, source)
expect(target).toEqual { a: 1, b: null }
it 'copies undefined property values', ->
target = { a: 1, b: 2 }
source = { b: undefined }
assign(target, source)
expect(target).toEqual { a: 1, b: undefined }
it 'returns the first object', ->
target = { a: 1 }
source = { b: 2 }
result = assign(target, source)
expect(result).toBe(target)
it 'takes multiple sources to copy from', ->
target = { a: 1 }
source1 = { b: 2, c: 3 }
source2 = { d: 4, e: 5 }
assign(target, source1, source2)
expect(target).toEqual { a: 1, b: 2, c: 3, d: 4, e: 5 }
|
[
{
"context": "onfig = require '../config'\n\nSERIALIZATION_KEY = 'MODEL'\n# SERIALIZATION_EXPIRE_TIME_MS = 1000 * 10 # 10 ",
"end": 3105,
"score": 0.7463817000389099,
"start": 3100,
"tag": "KEY",
"value": "MODEL"
},
{
"context": " @overlay = new Overlay()\n\n @auth = new Auth {... | src/models/index.coffee | FreeRoamApp/free-roam | 14 | Exoid = require 'exoid'
_isEmpty = require 'lodash/isEmpty'
_isPlainObject = require 'lodash/isPlainObject'
_defaults = require 'lodash/defaults'
_merge = require 'lodash/merge'
_pick = require 'lodash/pick'
_map = require 'lodash/map'
_zipWith = require 'lodash/zipWith'
_differenceWith = require 'lodash/differenceWith'
_isEqual = require 'lodash/isEqual'
_keys = require 'lodash/keys'
RxBehaviorSubject = require('rxjs/BehaviorSubject').BehaviorSubject
RxObservable = require('rxjs/Observable').Observable
require 'rxjs/add/observable/combineLatest'
require 'rxjs/add/observable/of'
require 'rxjs/add/operator/take'
Auth = require './auth'
Agency = require './agency'
Amenity = require './amenity'
AmenityAttachment = require './amenity_attachment'
AmenityReview = require './amenity_review'
AdditionalScript = require './additional_script'
Ban = require './ban'
Campground = require './campground'
CampgroundAttachment = require './campground_attachment'
CampgroundReview = require './campground_review'
Category = require './category'
CellTower = require './cell_tower'
CheckIn = require './check_in'
Connection = require './connection'
ConversationMessage = require './conversation_message'
Conversation = require './conversation'
Coordinate = require './coordinate'
Cookie = require './cookie'
# EarnAction = require './earn_action'
Experiment = require './experiment'
Event = require './event'
Geocoder = require './geocoder'
Gif = require './gif'
GiveawayEntry = require './giveaway_entry'
Group = require './group'
GroupAuditLog = require './group_audit_log'
GroupUser = require './group_user'
GroupRole = require './group_role'
Hazard = require './hazard'
Image = require './image'
Item = require './item'
Language = require './language'
LoginLink = require './login_link'
LocalMap = require './local_map'
Notification = require './notification'
Office = require './office'
OfflineData = require './offline_data'
Overnight = require './overnight'
OvernightAttachment = require './overnight_attachment'
OvernightReview = require './overnight_review'
Payment = require './payment'
Product = require './product'
PlaceAttachmentBase = require './place_attachment_base'
PlaceBase = require './place_base'
PlaceReviewBase = require './place_review_base'
PushToken = require './push_token'
Region = require './region'
StatusBar = require './status_bar'
Subscription = require './subscription'
Thread = require './thread'
Comment = require './comment'
Vote = require './vote'
Time = require './time'
Transaction = require './transaction'
Trip = require './trip'
TripFollower = require './trip_follower'
User = require './user'
UserBlock = require './user_block'
UserData = require './user_data'
UserLocation = require './user_location'
UserRig = require './user_rig'
UserSettings = require './user_settings'
Drawer = require './drawer'
EarnAlert = require './earn_alert'
Overlay = require './overlay'
Tooltip = require './tooltip'
InstallOverlay = require './install_overlay'
Window = require './window'
request = require '../services/request'
config = require '../config'
SERIALIZATION_KEY = 'MODEL'
# SERIALIZATION_EXPIRE_TIME_MS = 1000 * 10 # 10 seconds
module.exports = class Model
constructor: (options) ->
{serverHeaders, io, @portal, language,
initialCookies, setCookie, host} = options
serverHeaders ?= {}
cache = window?[SERIALIZATION_KEY] or {}
window?[SERIALIZATION_KEY] = null
# maybe this means less memory used for long caches?
document?.querySelector('.model')?.innerHTML = ''
# isExpired = if serialization.expires?
# # Because of potential clock skew we check around the value
# delta = Math.abs(Date.now() - serialization.expires)
# delta > SERIALIZATION_EXPIRE_TIME_MS
# else
# true
# cache = if isExpired then {} else serialization
@isFromCache = not _isEmpty cache
userAgent = serverHeaders['user-agent'] or navigator?.userAgent
ioEmit = (event, opts) =>
accessToken = @cookie.get 'accessToken'
io.emit event, _defaults {accessToken, userAgent}, opts
proxy = (url, opts) =>
accessToken = @cookie.get 'accessToken'
proxyHeaders = _pick serverHeaders, [
'cookie'
'user-agent'
'accept-language'
'x-forwarded-for'
]
request url, _merge {
responseType: 'json'
query: if accessToken? then {accessToken} else {}
headers: if _isPlainObject opts?.body
_merge {
# Avoid CORS preflight
'Content-Type': 'text/plain'
}, proxyHeaders
else
proxyHeaders
}, opts
if navigator?.onLine
offlineCache = null
else
offlineCache = try
JSON.parse localStorage?.offlineCache
catch
{}
@initialCache = _defaults offlineCache, cache.exoid
@exoid = new Exoid
ioEmit: ioEmit
io: io
cache: @initialCache
isServerSide: not window?
pushToken = new RxBehaviorSubject null
@cookie = new Cookie {initialCookies, setCookie, host}
@l = new Language {language, @cookie}
@overlay = new Overlay()
@auth = new Auth {@exoid, @cookie, pushToken, @l, userAgent, @portal}
@statusBar = new StatusBar {}
@offlineData = new OfflineData {@exoid, @portal, @statusBar, @l}
@additionalScript = new AdditionalScript()
@agency = new Agency {@auth}
@amenity = new Amenity {@auth, @l, @offlineData}
@amenityAttachment = new AmenityAttachment {@auth}
@amenityReview = new AmenityReview {@auth, @exoid, proxy}
@ban = new Ban {@auth}
@category = new Category {@auth}
@campground = new Campground {@auth, @l, @offlineData}
@campgroundAttachment = new CampgroundAttachment {@auth}
@campgroundReview = new CampgroundReview {@auth, @exoid, proxy}
@comment = new Comment {@auth}
@cellTower = new CellTower {@auth, @l, @offlineData}
@checkIn = new CheckIn {@auth, proxy, @l}
@connection = new Connection {@auth}
@conversationMessage = new ConversationMessage {@auth, proxy, @exoid}
@conversation = new Conversation {@auth}
@coordinate = new Coordinate {@auth}
# @earnAction = new EarnAction {@auth}
@event = new Event {@auth}
@experiment = new Experiment {@cookie}
@geocoder = new Geocoder {@auth}
@giveawayEntry = new GiveawayEntry {@auth}
@gif = new Gif()
@group = new Group {@auth}
@groupAuditLog = new GroupAuditLog {@auth}
@groupUser = new GroupUser {@auth}
@groupRole = new GroupRole {@auth}
@hazard = new Hazard {@auth, @l, @offlineData}
@image = new Image {@additionalScript}
@item = new Item {@auth}
@loginLink = new LoginLink {@auth}
@localMap = new LocalMap {@auth}
@notification = new Notification {@auth}
@office = new Office {@auth}
@overnight = new Overnight {@auth, @l, @offlineData}
@overnightAttachment = new OvernightAttachment {@auth}
@overnightReview = new OvernightReview {@auth, @exoid, proxy}
@payment = new Payment {@auth}
@placeAttachment = new PlaceAttachmentBase {@auth}
@placeBase = new PlaceBase {@auth, @l, @offlineData}
@placeReview = new PlaceReviewBase {@auth}
@product = new Product {@auth}
@pushToken = new PushToken {@auth, pushToken}
@region = new Region {@auth}
@subscription = new Subscription {@auth}
@thread = new Thread {@auth, @l, @group, @exoid, proxy}
@transaction = new Transaction {@auth}
@time = new Time {@auth}
@trip = new Trip {@auth, proxy, @exoid}
@tripFollower = new TripFollower {@auth}
@user = new User {@auth, proxy, @exoid, @cookie, @l, @overlay, @portal, @router}
@userBlock = new UserBlock {@auth}
@userData = new UserData {@auth}
@userLocation = new UserLocation {@auth}
@userRig = new UserRig {@auth}
@userSettings = new UserSettings {@auth}
@vote = new Vote {@auth}
@drawer = new Drawer()
@earnAlert = new EarnAlert()
@installOverlay = new InstallOverlay {@l, @overlay}
@tooltip = new Tooltip()
@portal?.setModels {
@user, @pushToken, @l, @installOverlay, @overlay
}
@window = new Window {@cookie, @experiment, userAgent}
# after page has loaded, refetch all initial (cached) requests to verify they're still up-to-date
validateInitialCache: =>
cache = @initialCache
@initialCache = null
# could listen for postMessage from service worker to see if this is from
# cache, then validate data
requests = _map cache, (result, key) =>
req = try
JSON.parse key
catch
RxObservable.of null
if req.path
@auth.stream req.path, req.body, {ignoreCache: true} #, options
# TODO: seems to use anon cookie for this. not sure how to fix...
# i guess keep initial cookie stored and run using that?
# so need to handle the case where the cookie changes between server-side
# cache and the actual get (when user doesn't exist from exoid, but cookie gets user)
RxObservable.combineLatest(
requests, (vals...) -> vals
)
.take(1).subscribe (responses) =>
responses = _zipWith responses, _keys(cache), (response, req) ->
{req, response}
cacheArray = _map cache, (response, req) ->
{req, response}
# see if our updated responses differ from the cached data.
changedReqs = _differenceWith(responses, cacheArray, _isEqual)
# update with new values
_map changedReqs, ({req, response}) =>
console.log 'OUTDATED EXOID:', req, 'replacing...', response
@exoid.setDataCache req, response
# there's a change this will be invalidated every time
# eg. if we add some sort of timer / visitCount to user.getMe
# i'm not sure if that's a bad thing or not. some people always
# load from cache then update, and this would basically be the same
unless _isEmpty changedReqs
console.log 'invalidating html cache...'
@portal.call 'cache.deleteHtmlCache'
# FIXME TODO invalidate in service worker
wasCached: => @isFromCache
dispose: =>
@time.dispose()
@exoid.disposeAll()
getSerializationStream: =>
@exoid.getCacheStream()
.map (exoidCache) ->
string = JSON.stringify({
exoid: exoidCache
# problem with this is clock skew
# expires: Date.now() + SERIALIZATION_EXPIRE_TIME_MS
}).replace /<\/script/gi, '<\\/script'
"window['#{SERIALIZATION_KEY}']=#{string};"
| 105819 | Exoid = require 'exoid'
_isEmpty = require 'lodash/isEmpty'
_isPlainObject = require 'lodash/isPlainObject'
_defaults = require 'lodash/defaults'
_merge = require 'lodash/merge'
_pick = require 'lodash/pick'
_map = require 'lodash/map'
_zipWith = require 'lodash/zipWith'
_differenceWith = require 'lodash/differenceWith'
_isEqual = require 'lodash/isEqual'
_keys = require 'lodash/keys'
RxBehaviorSubject = require('rxjs/BehaviorSubject').BehaviorSubject
RxObservable = require('rxjs/Observable').Observable
require 'rxjs/add/observable/combineLatest'
require 'rxjs/add/observable/of'
require 'rxjs/add/operator/take'
Auth = require './auth'
Agency = require './agency'
Amenity = require './amenity'
AmenityAttachment = require './amenity_attachment'
AmenityReview = require './amenity_review'
AdditionalScript = require './additional_script'
Ban = require './ban'
Campground = require './campground'
CampgroundAttachment = require './campground_attachment'
CampgroundReview = require './campground_review'
Category = require './category'
CellTower = require './cell_tower'
CheckIn = require './check_in'
Connection = require './connection'
ConversationMessage = require './conversation_message'
Conversation = require './conversation'
Coordinate = require './coordinate'
Cookie = require './cookie'
# EarnAction = require './earn_action'
Experiment = require './experiment'
Event = require './event'
Geocoder = require './geocoder'
Gif = require './gif'
GiveawayEntry = require './giveaway_entry'
Group = require './group'
GroupAuditLog = require './group_audit_log'
GroupUser = require './group_user'
GroupRole = require './group_role'
Hazard = require './hazard'
Image = require './image'
Item = require './item'
Language = require './language'
LoginLink = require './login_link'
LocalMap = require './local_map'
Notification = require './notification'
Office = require './office'
OfflineData = require './offline_data'
Overnight = require './overnight'
OvernightAttachment = require './overnight_attachment'
OvernightReview = require './overnight_review'
Payment = require './payment'
Product = require './product'
PlaceAttachmentBase = require './place_attachment_base'
PlaceBase = require './place_base'
PlaceReviewBase = require './place_review_base'
PushToken = require './push_token'
Region = require './region'
StatusBar = require './status_bar'
Subscription = require './subscription'
Thread = require './thread'
Comment = require './comment'
Vote = require './vote'
Time = require './time'
Transaction = require './transaction'
Trip = require './trip'
TripFollower = require './trip_follower'
User = require './user'
UserBlock = require './user_block'
UserData = require './user_data'
UserLocation = require './user_location'
UserRig = require './user_rig'
UserSettings = require './user_settings'
Drawer = require './drawer'
EarnAlert = require './earn_alert'
Overlay = require './overlay'
Tooltip = require './tooltip'
InstallOverlay = require './install_overlay'
Window = require './window'
request = require '../services/request'
config = require '../config'
SERIALIZATION_KEY = '<KEY>'
# SERIALIZATION_EXPIRE_TIME_MS = 1000 * 10 # 10 seconds
module.exports = class Model
constructor: (options) ->
{serverHeaders, io, @portal, language,
initialCookies, setCookie, host} = options
serverHeaders ?= {}
cache = window?[SERIALIZATION_KEY] or {}
window?[SERIALIZATION_KEY] = null
# maybe this means less memory used for long caches?
document?.querySelector('.model')?.innerHTML = ''
# isExpired = if serialization.expires?
# # Because of potential clock skew we check around the value
# delta = Math.abs(Date.now() - serialization.expires)
# delta > SERIALIZATION_EXPIRE_TIME_MS
# else
# true
# cache = if isExpired then {} else serialization
@isFromCache = not _isEmpty cache
userAgent = serverHeaders['user-agent'] or navigator?.userAgent
ioEmit = (event, opts) =>
accessToken = @cookie.get 'accessToken'
io.emit event, _defaults {accessToken, userAgent}, opts
proxy = (url, opts) =>
accessToken = @cookie.get 'accessToken'
proxyHeaders = _pick serverHeaders, [
'cookie'
'user-agent'
'accept-language'
'x-forwarded-for'
]
request url, _merge {
responseType: 'json'
query: if accessToken? then {accessToken} else {}
headers: if _isPlainObject opts?.body
_merge {
# Avoid CORS preflight
'Content-Type': 'text/plain'
}, proxyHeaders
else
proxyHeaders
}, opts
if navigator?.onLine
offlineCache = null
else
offlineCache = try
JSON.parse localStorage?.offlineCache
catch
{}
@initialCache = _defaults offlineCache, cache.exoid
@exoid = new Exoid
ioEmit: ioEmit
io: io
cache: @initialCache
isServerSide: not window?
pushToken = new RxBehaviorSubject null
@cookie = new Cookie {initialCookies, setCookie, host}
@l = new Language {language, @cookie}
@overlay = new Overlay()
@auth = new Auth {@exoid, @cookie, pushToken, @l, userAgent, @portal}
@statusBar = new StatusBar {}
@offlineData = new OfflineData {@exoid, @portal, @statusBar, @l}
@additionalScript = new AdditionalScript()
@agency = new Agency {@auth}
@amenity = new Amenity {@auth, @l, @offlineData}
@amenityAttachment = new AmenityAttachment {@auth}
@amenityReview = new AmenityReview {@auth, @exoid, proxy}
@ban = new Ban {@auth}
@category = new Category {@auth}
@campground = new Campground {@auth, @l, @offlineData}
@campgroundAttachment = new CampgroundAttachment {@auth}
@campgroundReview = new CampgroundReview {@auth, @exoid, proxy}
@comment = new Comment {@auth}
@cellTower = new CellTower {@auth, @l, @offlineData}
@checkIn = new CheckIn {@auth, proxy, @l}
@connection = new Connection {@auth}
@conversationMessage = new ConversationMessage {@auth, proxy, @exoid}
@conversation = new Conversation {@auth}
@coordinate = new Coordinate {@auth}
# @earnAction = new EarnAction {@auth}
@event = new Event {@auth}
@experiment = new Experiment {@cookie}
@geocoder = new Geocoder {@auth}
@giveawayEntry = new GiveawayEntry {@auth}
@gif = new Gif()
@group = new Group {@auth}
@groupAuditLog = new GroupAuditLog {@auth}
@groupUser = new GroupUser {@auth}
@groupRole = new GroupRole {@auth}
@hazard = new Hazard {@auth, @l, @offlineData}
@image = new Image {@additionalScript}
@item = new Item {@auth}
@loginLink = new LoginLink {@auth}
@localMap = new LocalMap {@auth}
@notification = new Notification {@auth}
@office = new Office {@auth}
@overnight = new Overnight {@auth, @l, @offlineData}
@overnightAttachment = new OvernightAttachment {@auth}
@overnightReview = new OvernightReview {@auth, @exoid, proxy}
@payment = new Payment {@auth}
@placeAttachment = new PlaceAttachmentBase {@auth}
@placeBase = new PlaceBase {@auth, @l, @offlineData}
@placeReview = new PlaceReviewBase {@auth}
@product = new Product {@auth}
@pushToken = new PushToken {@auth, pushToken}
@region = new Region {@auth}
@subscription = new Subscription {@auth}
@thread = new Thread {@auth, @l, @group, @exoid, proxy}
@transaction = new Transaction {@auth}
@time = new Time {@auth}
@trip = new Trip {@auth, proxy, @exoid}
@tripFollower = new TripFollower {@auth}
@user = new User {@auth, proxy, @exoid, @cookie, @l, @overlay, @portal, @router}
@userBlock = new UserBlock {@auth}
@userData = new UserData {@auth}
@userLocation = new UserLocation {@auth}
@userRig = new UserRig {@auth}
@userSettings = new UserSettings {@auth}
@vote = new Vote {@auth}
@drawer = new Drawer()
@earnAlert = new EarnAlert()
@installOverlay = new InstallOverlay {@l, @overlay}
@tooltip = new Tooltip()
@portal?.setModels {
@user, @pushToken, @l, @installOverlay, @overlay
}
@window = new Window {@cookie, @experiment, userAgent}
# after page has loaded, refetch all initial (cached) requests to verify they're still up-to-date
validateInitialCache: =>
cache = @initialCache
@initialCache = null
# could listen for postMessage from service worker to see if this is from
# cache, then validate data
requests = _map cache, (result, key) =>
req = try
JSON.parse key
catch
RxObservable.of null
if req.path
@auth.stream req.path, req.body, {ignoreCache: true} #, options
# TODO: seems to use anon cookie for this. not sure how to fix...
# i guess keep initial cookie stored and run using that?
# so need to handle the case where the cookie changes between server-side
# cache and the actual get (when user doesn't exist from exoid, but cookie gets user)
RxObservable.combineLatest(
requests, (vals...) -> vals
)
.take(1).subscribe (responses) =>
responses = _zipWith responses, _keys(cache), (response, req) ->
{req, response}
cacheArray = _map cache, (response, req) ->
{req, response}
# see if our updated responses differ from the cached data.
changedReqs = _differenceWith(responses, cacheArray, _isEqual)
# update with new values
_map changedReqs, ({req, response}) =>
console.log 'OUTDATED EXOID:', req, 'replacing...', response
@exoid.setDataCache req, response
# there's a change this will be invalidated every time
# eg. if we add some sort of timer / visitCount to user.getMe
# i'm not sure if that's a bad thing or not. some people always
# load from cache then update, and this would basically be the same
unless _isEmpty changedReqs
console.log 'invalidating html cache...'
@portal.call 'cache.deleteHtmlCache'
# FIXME TODO invalidate in service worker
wasCached: => @isFromCache
dispose: =>
@time.dispose()
@exoid.disposeAll()
getSerializationStream: =>
@exoid.getCacheStream()
.map (exoidCache) ->
string = JSON.stringify({
exoid: exoidCache
# problem with this is clock skew
# expires: Date.now() + SERIALIZATION_EXPIRE_TIME_MS
}).replace /<\/script/gi, '<\\/script'
"window['#{SERIALIZATION_KEY}']=#{string};"
| true | Exoid = require 'exoid'
_isEmpty = require 'lodash/isEmpty'
_isPlainObject = require 'lodash/isPlainObject'
_defaults = require 'lodash/defaults'
_merge = require 'lodash/merge'
_pick = require 'lodash/pick'
_map = require 'lodash/map'
_zipWith = require 'lodash/zipWith'
_differenceWith = require 'lodash/differenceWith'
_isEqual = require 'lodash/isEqual'
_keys = require 'lodash/keys'
RxBehaviorSubject = require('rxjs/BehaviorSubject').BehaviorSubject
RxObservable = require('rxjs/Observable').Observable
require 'rxjs/add/observable/combineLatest'
require 'rxjs/add/observable/of'
require 'rxjs/add/operator/take'
Auth = require './auth'
Agency = require './agency'
Amenity = require './amenity'
AmenityAttachment = require './amenity_attachment'
AmenityReview = require './amenity_review'
AdditionalScript = require './additional_script'
Ban = require './ban'
Campground = require './campground'
CampgroundAttachment = require './campground_attachment'
CampgroundReview = require './campground_review'
Category = require './category'
CellTower = require './cell_tower'
CheckIn = require './check_in'
Connection = require './connection'
ConversationMessage = require './conversation_message'
Conversation = require './conversation'
Coordinate = require './coordinate'
Cookie = require './cookie'
# EarnAction = require './earn_action'
Experiment = require './experiment'
Event = require './event'
Geocoder = require './geocoder'
Gif = require './gif'
GiveawayEntry = require './giveaway_entry'
Group = require './group'
GroupAuditLog = require './group_audit_log'
GroupUser = require './group_user'
GroupRole = require './group_role'
Hazard = require './hazard'
Image = require './image'
Item = require './item'
Language = require './language'
LoginLink = require './login_link'
LocalMap = require './local_map'
Notification = require './notification'
Office = require './office'
OfflineData = require './offline_data'
Overnight = require './overnight'
OvernightAttachment = require './overnight_attachment'
OvernightReview = require './overnight_review'
Payment = require './payment'
Product = require './product'
PlaceAttachmentBase = require './place_attachment_base'
PlaceBase = require './place_base'
PlaceReviewBase = require './place_review_base'
PushToken = require './push_token'
Region = require './region'
StatusBar = require './status_bar'
Subscription = require './subscription'
Thread = require './thread'
Comment = require './comment'
Vote = require './vote'
Time = require './time'
Transaction = require './transaction'
Trip = require './trip'
TripFollower = require './trip_follower'
User = require './user'
UserBlock = require './user_block'
UserData = require './user_data'
UserLocation = require './user_location'
UserRig = require './user_rig'
UserSettings = require './user_settings'
Drawer = require './drawer'
EarnAlert = require './earn_alert'
Overlay = require './overlay'
Tooltip = require './tooltip'
InstallOverlay = require './install_overlay'
Window = require './window'
request = require '../services/request'
config = require '../config'
SERIALIZATION_KEY = 'PI:KEY:<KEY>END_PI'
# SERIALIZATION_EXPIRE_TIME_MS = 1000 * 10 # 10 seconds
module.exports = class Model
constructor: (options) ->
{serverHeaders, io, @portal, language,
initialCookies, setCookie, host} = options
serverHeaders ?= {}
cache = window?[SERIALIZATION_KEY] or {}
window?[SERIALIZATION_KEY] = null
# maybe this means less memory used for long caches?
document?.querySelector('.model')?.innerHTML = ''
# isExpired = if serialization.expires?
# # Because of potential clock skew we check around the value
# delta = Math.abs(Date.now() - serialization.expires)
# delta > SERIALIZATION_EXPIRE_TIME_MS
# else
# true
# cache = if isExpired then {} else serialization
@isFromCache = not _isEmpty cache
userAgent = serverHeaders['user-agent'] or navigator?.userAgent
ioEmit = (event, opts) =>
accessToken = @cookie.get 'accessToken'
io.emit event, _defaults {accessToken, userAgent}, opts
proxy = (url, opts) =>
accessToken = @cookie.get 'accessToken'
proxyHeaders = _pick serverHeaders, [
'cookie'
'user-agent'
'accept-language'
'x-forwarded-for'
]
request url, _merge {
responseType: 'json'
query: if accessToken? then {accessToken} else {}
headers: if _isPlainObject opts?.body
_merge {
# Avoid CORS preflight
'Content-Type': 'text/plain'
}, proxyHeaders
else
proxyHeaders
}, opts
if navigator?.onLine
offlineCache = null
else
offlineCache = try
JSON.parse localStorage?.offlineCache
catch
{}
@initialCache = _defaults offlineCache, cache.exoid
@exoid = new Exoid
ioEmit: ioEmit
io: io
cache: @initialCache
isServerSide: not window?
pushToken = new RxBehaviorSubject null
@cookie = new Cookie {initialCookies, setCookie, host}
@l = new Language {language, @cookie}
@overlay = new Overlay()
@auth = new Auth {@exoid, @cookie, pushToken, @l, userAgent, @portal}
@statusBar = new StatusBar {}
@offlineData = new OfflineData {@exoid, @portal, @statusBar, @l}
@additionalScript = new AdditionalScript()
@agency = new Agency {@auth}
@amenity = new Amenity {@auth, @l, @offlineData}
@amenityAttachment = new AmenityAttachment {@auth}
@amenityReview = new AmenityReview {@auth, @exoid, proxy}
@ban = new Ban {@auth}
@category = new Category {@auth}
@campground = new Campground {@auth, @l, @offlineData}
@campgroundAttachment = new CampgroundAttachment {@auth}
@campgroundReview = new CampgroundReview {@auth, @exoid, proxy}
@comment = new Comment {@auth}
@cellTower = new CellTower {@auth, @l, @offlineData}
@checkIn = new CheckIn {@auth, proxy, @l}
@connection = new Connection {@auth}
@conversationMessage = new ConversationMessage {@auth, proxy, @exoid}
@conversation = new Conversation {@auth}
@coordinate = new Coordinate {@auth}
# @earnAction = new EarnAction {@auth}
@event = new Event {@auth}
@experiment = new Experiment {@cookie}
@geocoder = new Geocoder {@auth}
@giveawayEntry = new GiveawayEntry {@auth}
@gif = new Gif()
@group = new Group {@auth}
@groupAuditLog = new GroupAuditLog {@auth}
@groupUser = new GroupUser {@auth}
@groupRole = new GroupRole {@auth}
@hazard = new Hazard {@auth, @l, @offlineData}
@image = new Image {@additionalScript}
@item = new Item {@auth}
@loginLink = new LoginLink {@auth}
@localMap = new LocalMap {@auth}
@notification = new Notification {@auth}
@office = new Office {@auth}
@overnight = new Overnight {@auth, @l, @offlineData}
@overnightAttachment = new OvernightAttachment {@auth}
@overnightReview = new OvernightReview {@auth, @exoid, proxy}
@payment = new Payment {@auth}
@placeAttachment = new PlaceAttachmentBase {@auth}
@placeBase = new PlaceBase {@auth, @l, @offlineData}
@placeReview = new PlaceReviewBase {@auth}
@product = new Product {@auth}
@pushToken = new PushToken {@auth, pushToken}
@region = new Region {@auth}
@subscription = new Subscription {@auth}
@thread = new Thread {@auth, @l, @group, @exoid, proxy}
@transaction = new Transaction {@auth}
@time = new Time {@auth}
@trip = new Trip {@auth, proxy, @exoid}
@tripFollower = new TripFollower {@auth}
@user = new User {@auth, proxy, @exoid, @cookie, @l, @overlay, @portal, @router}
@userBlock = new UserBlock {@auth}
@userData = new UserData {@auth}
@userLocation = new UserLocation {@auth}
@userRig = new UserRig {@auth}
@userSettings = new UserSettings {@auth}
@vote = new Vote {@auth}
@drawer = new Drawer()
@earnAlert = new EarnAlert()
@installOverlay = new InstallOverlay {@l, @overlay}
@tooltip = new Tooltip()
@portal?.setModels {
@user, @pushToken, @l, @installOverlay, @overlay
}
@window = new Window {@cookie, @experiment, userAgent}
# after page has loaded, refetch all initial (cached) requests to verify they're still up-to-date
validateInitialCache: =>
cache = @initialCache
@initialCache = null
# could listen for postMessage from service worker to see if this is from
# cache, then validate data
requests = _map cache, (result, key) =>
req = try
JSON.parse key
catch
RxObservable.of null
if req.path
@auth.stream req.path, req.body, {ignoreCache: true} #, options
# TODO: seems to use anon cookie for this. not sure how to fix...
# i guess keep initial cookie stored and run using that?
# so need to handle the case where the cookie changes between server-side
# cache and the actual get (when user doesn't exist from exoid, but cookie gets user)
RxObservable.combineLatest(
requests, (vals...) -> vals
)
.take(1).subscribe (responses) =>
responses = _zipWith responses, _keys(cache), (response, req) ->
{req, response}
cacheArray = _map cache, (response, req) ->
{req, response}
# see if our updated responses differ from the cached data.
changedReqs = _differenceWith(responses, cacheArray, _isEqual)
# update with new values
_map changedReqs, ({req, response}) =>
console.log 'OUTDATED EXOID:', req, 'replacing...', response
@exoid.setDataCache req, response
# there's a change this will be invalidated every time
# eg. if we add some sort of timer / visitCount to user.getMe
# i'm not sure if that's a bad thing or not. some people always
# load from cache then update, and this would basically be the same
unless _isEmpty changedReqs
console.log 'invalidating html cache...'
@portal.call 'cache.deleteHtmlCache'
# FIXME TODO invalidate in service worker
wasCached: => @isFromCache
dispose: =>
@time.dispose()
@exoid.disposeAll()
getSerializationStream: =>
@exoid.getCacheStream()
.map (exoidCache) ->
string = JSON.stringify({
exoid: exoidCache
# problem with this is clock skew
# expires: Date.now() + SERIALIZATION_EXPIRE_TIME_MS
}).replace /<\/script/gi, '<\\/script'
"window['#{SERIALIZATION_KEY}']=#{string};"
|
[
{
"context": "rink - reduce and simplify vector and mbtiles\n by Michael Strassburger <codepoet@cpan.org>\n\n Downsamples the extent of ",
"end": 83,
"score": 0.9998741149902344,
"start": 63,
"tag": "NAME",
"value": "Michael Strassburger"
},
{
"context": "ify vector and mbtiles\n ... | src/Shrinker.coffee | rastapasta/tileshrink | 27 | ###
tileshrink - reduce and simplify vector and mbtiles
by Michael Strassburger <codepoet@cpan.org>
Downsamples the extent of all layers and simplifies the resulting polylines
###
simplify = require 'simplify-js'
module.exports = class Shrinker
config:
extent: 1024
precision: 1
include: null
shrink: null
clampPoints: true
constructor: (options) ->
@config[option] = options[option] for option of options
shrink: (tile, z, x, y) ->
return if z > @config.shrink
for layer in tile.layers
scale = layer.extent / @config.extent
features = []
for feature in layer.features
geometry = @_scaleAndSimplifyGeometry scale, feature.geometry
if feature.type is "POLYGON"
continue if geometry[0].length < 3
geometry = @_reducePolygon geometry
else if @config.clampPoints and feature.type is "POINT"
geometry = @_clampPoints geometry
continue if not geometry.length or not geometry[0].length
feature.geometry = geometry
features.push feature
layer.features = features
layer.extent = @config.extent
true
_scaleAndSimplifyGeometry: (scale, lines) ->
for line, i in lines
for point in line
point.x = Math.floor point.x/scale
point.y = Math.floor point.y/scale
if line.length > 1
lines[i] = simplify line, @config.precision, true
lines
_reducePolygon: (rings) ->
reduced = [rings[0]]
for ring in rings[1..]
if ring.length > 2
reduced.push ring
reduced
_clampPoints: (outer) ->
clamped = []
for points in outer
filtered = []
for point in points
if 0 <= point.x < @config.extent and
0 <= point.y < @config.extent
filtered.push point
clamped.push filtered if filtered.length
clamped
| 184350 | ###
tileshrink - reduce and simplify vector and mbtiles
by <NAME> <<EMAIL>>
Downsamples the extent of all layers and simplifies the resulting polylines
###
simplify = require 'simplify-js'
module.exports = class Shrinker
config:
extent: 1024
precision: 1
include: null
shrink: null
clampPoints: true
constructor: (options) ->
@config[option] = options[option] for option of options
shrink: (tile, z, x, y) ->
return if z > @config.shrink
for layer in tile.layers
scale = layer.extent / @config.extent
features = []
for feature in layer.features
geometry = @_scaleAndSimplifyGeometry scale, feature.geometry
if feature.type is "POLYGON"
continue if geometry[0].length < 3
geometry = @_reducePolygon geometry
else if @config.clampPoints and feature.type is "POINT"
geometry = @_clampPoints geometry
continue if not geometry.length or not geometry[0].length
feature.geometry = geometry
features.push feature
layer.features = features
layer.extent = @config.extent
true
_scaleAndSimplifyGeometry: (scale, lines) ->
for line, i in lines
for point in line
point.x = Math.floor point.x/scale
point.y = Math.floor point.y/scale
if line.length > 1
lines[i] = simplify line, @config.precision, true
lines
_reducePolygon: (rings) ->
reduced = [rings[0]]
for ring in rings[1..]
if ring.length > 2
reduced.push ring
reduced
_clampPoints: (outer) ->
clamped = []
for points in outer
filtered = []
for point in points
if 0 <= point.x < @config.extent and
0 <= point.y < @config.extent
filtered.push point
clamped.push filtered if filtered.length
clamped
| true | ###
tileshrink - reduce and simplify vector and mbtiles
by PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI>
Downsamples the extent of all layers and simplifies the resulting polylines
###
simplify = require 'simplify-js'
module.exports = class Shrinker
config:
extent: 1024
precision: 1
include: null
shrink: null
clampPoints: true
constructor: (options) ->
@config[option] = options[option] for option of options
shrink: (tile, z, x, y) ->
return if z > @config.shrink
for layer in tile.layers
scale = layer.extent / @config.extent
features = []
for feature in layer.features
geometry = @_scaleAndSimplifyGeometry scale, feature.geometry
if feature.type is "POLYGON"
continue if geometry[0].length < 3
geometry = @_reducePolygon geometry
else if @config.clampPoints and feature.type is "POINT"
geometry = @_clampPoints geometry
continue if not geometry.length or not geometry[0].length
feature.geometry = geometry
features.push feature
layer.features = features
layer.extent = @config.extent
true
_scaleAndSimplifyGeometry: (scale, lines) ->
for line, i in lines
for point in line
point.x = Math.floor point.x/scale
point.y = Math.floor point.y/scale
if line.length > 1
lines[i] = simplify line, @config.precision, true
lines
_reducePolygon: (rings) ->
reduced = [rings[0]]
for ring in rings[1..]
if ring.length > 2
reduced.push ring
reduced
_clampPoints: (outer) ->
clamped = []
for points in outer
filtered = []
for point in points
if 0 <= point.x < @config.extent and
0 <= point.y < @config.extent
filtered.push point
clamped.push filtered if filtered.length
clamped
|
[
{
"context": "config =\n title: 'Runners'\n token: 'CHANGEME'\n org: 'CHANGEME'\n topposition: '650px'\n leftp",
"end": 46,
"score": 0.9874438047409058,
"start": 38,
"tag": "PASSWORD",
"value": "CHANGEME"
}
] | github-runner.coffee | mms-gianni/uebersicht-widget-guthub-runner | 0 | config =
title: 'Runners'
token: 'CHANGEME'
org: 'CHANGEME'
topposition: '650px'
leftposition: '10px'
command: "curl -sS -u :#{config.token} -H \"Accept: application/vnd.github.v3+json\" https://api.github.com/orgs/#{config.org}/actions/runners"
refreshFrequency: 4800
style: """
color: #fff
font-family: Helvetica Neue
background: rgba(#fff, .3)
padding 10px 15px 0px
border-radius: 5px
font-size: 11px
/*bottom: #{config.topposition}*/
top: #{config.topposition}
left: #{config.leftposition}
#header
width: 100%
text-transform: uppercase
font-size: 10px
font-weight: bold
padding-bottom: 8px
#list
float: right
.status
float: right
margin: 10px
.runner
vertical-align: middle
.online
top: 50%
left: 50%
height: 16px
width: 16px
border: 4px rgba(#FFF, 0.25) solid
border-top: 4px #6A0 solid
border-bottom: 4px #6A0 solid
border-radius: 50%
margin: -8px -4px 0 -8px
-webkit-animation: spin1 1s infinite linear
animation: spin1 1s infinite linear
@-webkit-keyframes spin1
from
-webkit-transform: rotate(0deg)
transform: rotate(0deg)
to
-webkit-transform: rotate(359deg)
transform: rotate(359deg)
@keyframes spin1
from
-webkit-transform: rotate(0deg)
transform: rotate(0deg)
-webkit-transform: rotate(0deg)
transform: rotate(0deg)
to
-webkit-transform: rotate(359deg)
transform: rotate(359deg)
-webkit-transform: rotate(359deg)
transform: rotate(359deg)
.online-idle
top: 50%
left: 50%
height: 16px
width: 16px
margin: -8px -4px 0 -8px
/*margin: -25px 0 0 -25px*/
border: 4px rgba(#6A0, 1) solid
border-radius: 50%
.offline
top: 50%
left: 50%
height: 16px
width: 16px
margin: -8px -4px 0 -8px
/*margin: -25px 0 0 -25px*/
border: 4px rgba(#000, 1) solid
border-radius: 50%
"""
render: -> """
<div id="header">#{config.title}</div>
<div>
<div id="list">
</div>
"""
update: (output, domEl) ->
runners = JSON.parse(output)
table = $(domEl).find('#list')
# Reset the table
table.html('')
renderBuild = (runner) ->
"""
<div class="status">
<div class="#{ runner.displaystatus }"></div>
<!--<span class="runner">#{ runner.name }</span>-->
</div>
"""
for runner in runners['runners']
if runner.status == 'online'
runner.displaystatus = 'online'
if runner.busy == false
runner.displaystatus = 'online-idle'
else
runner.displaystatus = 'offline'
table.append renderBuild(runner)
| 117719 | config =
title: 'Runners'
token: '<PASSWORD>'
org: 'CHANGEME'
topposition: '650px'
leftposition: '10px'
command: "curl -sS -u :#{config.token} -H \"Accept: application/vnd.github.v3+json\" https://api.github.com/orgs/#{config.org}/actions/runners"
refreshFrequency: 4800
style: """
color: #fff
font-family: Helvetica Neue
background: rgba(#fff, .3)
padding 10px 15px 0px
border-radius: 5px
font-size: 11px
/*bottom: #{config.topposition}*/
top: #{config.topposition}
left: #{config.leftposition}
#header
width: 100%
text-transform: uppercase
font-size: 10px
font-weight: bold
padding-bottom: 8px
#list
float: right
.status
float: right
margin: 10px
.runner
vertical-align: middle
.online
top: 50%
left: 50%
height: 16px
width: 16px
border: 4px rgba(#FFF, 0.25) solid
border-top: 4px #6A0 solid
border-bottom: 4px #6A0 solid
border-radius: 50%
margin: -8px -4px 0 -8px
-webkit-animation: spin1 1s infinite linear
animation: spin1 1s infinite linear
@-webkit-keyframes spin1
from
-webkit-transform: rotate(0deg)
transform: rotate(0deg)
to
-webkit-transform: rotate(359deg)
transform: rotate(359deg)
@keyframes spin1
from
-webkit-transform: rotate(0deg)
transform: rotate(0deg)
-webkit-transform: rotate(0deg)
transform: rotate(0deg)
to
-webkit-transform: rotate(359deg)
transform: rotate(359deg)
-webkit-transform: rotate(359deg)
transform: rotate(359deg)
.online-idle
top: 50%
left: 50%
height: 16px
width: 16px
margin: -8px -4px 0 -8px
/*margin: -25px 0 0 -25px*/
border: 4px rgba(#6A0, 1) solid
border-radius: 50%
.offline
top: 50%
left: 50%
height: 16px
width: 16px
margin: -8px -4px 0 -8px
/*margin: -25px 0 0 -25px*/
border: 4px rgba(#000, 1) solid
border-radius: 50%
"""
render: -> """
<div id="header">#{config.title}</div>
<div>
<div id="list">
</div>
"""
update: (output, domEl) ->
runners = JSON.parse(output)
table = $(domEl).find('#list')
# Reset the table
table.html('')
renderBuild = (runner) ->
"""
<div class="status">
<div class="#{ runner.displaystatus }"></div>
<!--<span class="runner">#{ runner.name }</span>-->
</div>
"""
for runner in runners['runners']
if runner.status == 'online'
runner.displaystatus = 'online'
if runner.busy == false
runner.displaystatus = 'online-idle'
else
runner.displaystatus = 'offline'
table.append renderBuild(runner)
| true | config =
title: 'Runners'
token: 'PI:PASSWORD:<PASSWORD>END_PI'
org: 'CHANGEME'
topposition: '650px'
leftposition: '10px'
command: "curl -sS -u :#{config.token} -H \"Accept: application/vnd.github.v3+json\" https://api.github.com/orgs/#{config.org}/actions/runners"
refreshFrequency: 4800
style: """
color: #fff
font-family: Helvetica Neue
background: rgba(#fff, .3)
padding 10px 15px 0px
border-radius: 5px
font-size: 11px
/*bottom: #{config.topposition}*/
top: #{config.topposition}
left: #{config.leftposition}
#header
width: 100%
text-transform: uppercase
font-size: 10px
font-weight: bold
padding-bottom: 8px
#list
float: right
.status
float: right
margin: 10px
.runner
vertical-align: middle
.online
top: 50%
left: 50%
height: 16px
width: 16px
border: 4px rgba(#FFF, 0.25) solid
border-top: 4px #6A0 solid
border-bottom: 4px #6A0 solid
border-radius: 50%
margin: -8px -4px 0 -8px
-webkit-animation: spin1 1s infinite linear
animation: spin1 1s infinite linear
@-webkit-keyframes spin1
from
-webkit-transform: rotate(0deg)
transform: rotate(0deg)
to
-webkit-transform: rotate(359deg)
transform: rotate(359deg)
@keyframes spin1
from
-webkit-transform: rotate(0deg)
transform: rotate(0deg)
-webkit-transform: rotate(0deg)
transform: rotate(0deg)
to
-webkit-transform: rotate(359deg)
transform: rotate(359deg)
-webkit-transform: rotate(359deg)
transform: rotate(359deg)
.online-idle
top: 50%
left: 50%
height: 16px
width: 16px
margin: -8px -4px 0 -8px
/*margin: -25px 0 0 -25px*/
border: 4px rgba(#6A0, 1) solid
border-radius: 50%
.offline
top: 50%
left: 50%
height: 16px
width: 16px
margin: -8px -4px 0 -8px
/*margin: -25px 0 0 -25px*/
border: 4px rgba(#000, 1) solid
border-radius: 50%
"""
render: -> """
<div id="header">#{config.title}</div>
<div>
<div id="list">
</div>
"""
update: (output, domEl) ->
runners = JSON.parse(output)
table = $(domEl).find('#list')
# Reset the table
table.html('')
renderBuild = (runner) ->
"""
<div class="status">
<div class="#{ runner.displaystatus }"></div>
<!--<span class="runner">#{ runner.name }</span>-->
</div>
"""
for runner in runners['runners']
if runner.status == 'online'
runner.displaystatus = 'online'
if runner.busy == false
runner.displaystatus = 'online-idle'
else
runner.displaystatus = 'offline'
table.append renderBuild(runner)
|
[
{
"context": " 0, 0, 1\n 1, 1, 0\n]\nkids =\n brother:\n name: \"Max\"\n age : 11\n sister:\n name: \"Ida\"\n age :",
"end": 156,
"score": 0.9998275637626648,
"start": 153,
"tag": "NAME",
"value": "Max"
},
{
"context": " name: \"Max\"\n age : 11\n sister:\n n... | CoffeeScriptRunTime/CoffeeScripts/ObjectAndArray.coffee | fredericaltorres/DynamicJavaScriptRunTimes.NET | 4 | song = ["do", "re", "mi", "fa", "so"]
singers = {Jagger: "Rock", Elvis: "Roll"}
bitlist = [
1, 0, 1
0, 0, 1
1, 1, 0
]
kids =
brother:
name: "Max"
age : 11
sister:
name: "Ida"
age : 9
printJSON(song)
printJSON(singers)
printJSON(bitlist)
printJSON(kids) | 178568 | song = ["do", "re", "mi", "fa", "so"]
singers = {Jagger: "Rock", Elvis: "Roll"}
bitlist = [
1, 0, 1
0, 0, 1
1, 1, 0
]
kids =
brother:
name: "<NAME>"
age : 11
sister:
name: "<NAME>"
age : 9
printJSON(song)
printJSON(singers)
printJSON(bitlist)
printJSON(kids) | true | song = ["do", "re", "mi", "fa", "so"]
singers = {Jagger: "Rock", Elvis: "Roll"}
bitlist = [
1, 0, 1
0, 0, 1
1, 1, 0
]
kids =
brother:
name: "PI:NAME:<NAME>END_PI"
age : 11
sister:
name: "PI:NAME:<NAME>END_PI"
age : 9
printJSON(song)
printJSON(singers)
printJSON(bitlist)
printJSON(kids) |
[
{
"context": ".\n\n@module joukou-api/persona/graph/routes\n@author Isaac Johnston <isaac.johnston@joukou.com>\n###\n\n_ ",
"end": 778,
"score": 0.9998853802680969,
"start": 764,
"tag": "NAME",
"value": "Isaac Johnston"
},
{
"context": "-api/persona/graph/routes\n@autho... | src/persona/graph/routes.coffee | joukou/joukou-api | 0 | "use strict"
###*
Copyright 2014 Joukou Ltd
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
###
###*
{@link module:joukou-api/persona/graph/model|Graph} APIs provide information about the
graphs that an agent has authorization to access.
@module joukou-api/persona/graph/routes
@author Isaac Johnston <isaac.johnston@joukou.com>
###
_ = require( 'lodash' )
uuid = require( 'node-uuid' )
async = require( 'async' )
{ authenticate } = require( '../../authn' )
authz = require( '../../authz' )
hal = require( '../../hal' )
request = require( 'request' )
connection_routes = require( './connection/routes' )
process_routes = require( './process/routes' )
network_routes = require( './network/routes' )
GraphModel = require( './model' )
PersonaModel = require( '../model')
CircleModel = require( '../../circle/model')
Q = require('q')
GraphStateModel = require( '../../agent/graph/state/model' )
{ UnauthorizedError, ForbiddenError, NotFoundError } = require( 'restify' )
module.exports = self =
###*
Registers graph-related routes with the `server`.
@param {joukou-api/server} server
###
registerRoutes: ( server ) ->
server.get( '/persona/:personaKey/graph', authenticate, self.index )
server.post( '/persona/:personaKey/graph', authenticate, self.create )
server.get( '/persona/:personaKey/graph/:graphKey', authenticate, self.retrieve )
connection_routes.registerRoutes( server )
process_routes.registerRoutes( server )
network_routes.registerRoutes( server )
return
###*
Handles a request to search for graphs owned by a certain persona.
@param {http.IncomingMessage} req
@param {http.ServerResponse} res
@param {function(Error)} next
###
index: ( req, res, next ) ->
request(
uri: 'http://localhost:8098/mapred'
method: 'POST'
json:
inputs:
module: 'yokozuna'
function: 'mapred_search'
arg: [ 'graph', 'personas.key:' + req.params.personaKey ]
query: [
{
map:
language: 'javascript'
keep: true
source: ( ( value, keyData, arg ) ->
result = Riak.mapValuesJson( value )[ 0 ]
result.key = value.key
return [ result ]
).toString()
}
]
, ( err, reply ) ->
if err
res.send( 503 )
return
representation = {}
representation._embedded = _.reduce( reply.body, ( memo, graph ) ->
memo[ 'joukou:graph' ].push(
name: graph.name
key: graph.key
_links:
self:
href: "/persona/#{req.params.personaKey}/graph/#{graph.key}"
'joukou:persona':
href: "/persona/#{req.params.personaKey}"
'joukou:process-create':
href: "/persona/#{req.params.personaKey}/graph/#{graph.key}/process"
'joukou:processes':
href: "/persona/#{req.params.personaKey}/graph/#{graph.key}/process"
'joukou:connection-create':
href: "/persona/#{req.params.personaKey}/graph/#{graph.key}/connection"
'joukou:connections':
href: "/persona/#{req.params.personaKey}/graph/#{graph.key}/connection"
)
memo
, { 'joukou:graph': [] } )
res.link( "/persona/#{req.params.personaKey}", 'joukou:persona' )
res.send( 200, representation )
return
)
return
###
@api {post} /persona/:personaKey/graph Creates a Joukou graph
@apiName CreateGraph
@apiGroup Graph
@apiParam {Object} properties
@apiExample CURL Example:
curl -i -X POST https://api.joukou.com/persona/7bcb937e-3938-49c5-a1ce-5eb45f194f2f/graph \
-H 'Content-Type: application/json' \
-d '{ "name": "CRM to Sharepoint Integration" }'
@apiSuccess (201) Created The graph has been created successfully.
@apiError (429) TooManyRequests The client has sent too many requests in a given amount of time.
@apiError (503) ServiceUnavailable There was a temporary failure creating the graph, the client should try again later.
###
create: ( req, res, next ) ->
authz.hasPersona(req.user, req.params.personaKey)
.then( ( persona ) ->
data = {}
data.name = req.body.name
data.personas = [
key: persona.getKey()
]
GraphModel.create( data ).then( ( graph ) ->
graph.save()
)
.then( ( graph ) ->
self = "/persona/#{persona.getKey()}/graph/#{graph.getKey()}"
res.link( self, 'joukou:graph' )
res.header( 'Location', self )
res.send( 201, {} )
)
.fail( ( err ) -> next( err ) )
)
.fail( ( err ) ->
if err instanceof NotFoundError
err = new NotFoundError("Persona '#{req.params.personaKey}' not found")
next( err ) )
###
@api {get} /graph/:graphKey Retrieve the definition of a Joukou graph
@apiName RetrieveGraph
@apiGroup Graph
@apiExample CURL Example:
curl -i -X GET https://api.joukou.com/graph/15269bc7-a6b2-42c5-8805-879f1fe11ec0
@apiSuccess (200) OK The graph definition is sent in the response.
@apiError (401) Unauthorized The request requires user authentication, or authorization has been refused for the supplied credentials.
@apiError (404) NotFound The server did not find a graph definition that matched the provided key.
@apiError (429) TooManyRequests The client has sent too many requests in a given amount of time.
@apiError (503) ServiceUnavailable There was a temporary failure retrieving the graph definition, the client should try again later.
###
retrieve: ( req, res, next ) ->
authz.hasGraph(req.user, req.params.graphKey, req.params.personaKey)
.then( ( { graph, persona } ) ->
graph.getConnections().then( ( connections ) ->
representation = {}
if req.accepts('application/hal+json')
for item in graph.getValue().personas
res.link( "/persona/#{item.key}", 'joukou:persona' )
res.link( "/persona/#{persona.getKey()}/graph/#{graph.getKey()}/process", 'joukou:process-create', title: 'Add a Process to this Graph' )
res.link( "/persona/#{persona.getKey()}/graph/#{graph.getKey()}/process/clone", 'joukou:process-clone', title: 'Clone a Process to this Graph' )
res.link( "/persona/#{persona.getKey()}/graph/#{graph.getKey()}/process", 'joukou:processes', title: 'List of Processes for this Graph' )
res.link( "/persona/#{persona.getKey()}/graph/#{graph.getKey()}/connection", 'joukou:connection-create', title: 'Add a Connection to this Graph' )
res.link( "/persona/#{persona.getKey()}/graph/#{graph.getKey()}/connection", 'joukou:connections', title: 'List of Connections for this Graph' )
# representation = _.pick( graph.getValue(), [ 'name' ] )
representation._embedded =
'joukou:process': _.reduce( graph.getValue().processes or {}, ( memo, process, processKey ) ->
memo.push(
_links:
self:
href: "/persona/#{persona.getKey()}/graph/#{graph.getKey()}/process/#{processKey}"
'joukou:process-update:position':
href: "/persona/#{persona.getKey()}/graph/#{graph.getKey()}/process/#{processKey}/position"
#'joukou:circle': TODO
# href: "/persona/#{persona.getKey()}/circle/#{process.circle.key}"
metadata: process.metadata
)
memo
, [] )
'joukou:connection': _.reduce( graph.getValue().connections or [], ( memo, connection, i ) ->
memo.push(
_links:
self:
href: "/persona/#{persona.getKey()}/graph/#{graph.getKey()}/connection/#{connection.key}"
'joukou:process': [
{
name: 'src' # TODO href
}
{
name: 'tgt' # TODO href
}
]
)
memo
, [] )
representation.properties =
name: graph.getValue().name
metadata: {
key: req.params.graphKey
state: {
x: 0
y: 0
scale: 1
metadata: {}
}
}
processPort = (port) ->
port.process = "/persona/#{persona.getKey()}/graph/#{graph.getKey()}/process/#{port.process}"
return port
representation.connections = _.map(connections, (connection) ->
return {
tgt: processPort(connection.tgt)
src: processPort(connection.src)
metadata: {
key: connection.key
}
}
)
representation.processes = {}
representation.outports = {}
representation.inports = {}
promises = _.map(graph.getValue().processes, (process, key) ->
deferred = Q.defer()
if not process.circle
# Skip it
return deferred.resolve()
CircleModel.retrieve(process.circle.key).then((circle) ->
circleValue = circle.getValue()
mapPort = (port) ->
return port
metadata = {
circle: {
key: circle.getKey()
value: {
description: circleValue.description
icon: circleValue.icon
subgraph: circleValue.subgraph
inports: _.map(circleValue.inports, mapPort)
outports: _.map(circleValue.outports, mapPort)
}
}
key: key
image: circleValue.image
label: circleValue.name
}
metadata = _.merge(metadata, process.metadata)
representation.processes["/persona/#{persona.getKey()}/graph/#{graph.getKey()}/process/#{key}"] = {
id: "/persona/#{persona.getKey()}/graph/#{graph.getKey()}/process/#{key}"
component: circle.getKey()
metadata: metadata
}
deferred.resolve()
).fail(deferred.reject)
return deferred.promise
)
stateDeferred = Q.defer()
setupStateLink = (state) ->
if not req.accepts("application/hal+json")
return
link = "/agent/graph/#{req.params.graphKey}/state"
res.link( link, 'joukou:graph:state', title: 'Update graph state' )
representation._embedded['joukou:graph:state'] = {
x: state.x
y: state.y
scale: state.scale
metadata: state.metadata or {}
_links:
self:
href: link
}
GraphStateModel.retrieveForGraph(
req.user.getKey(),
req.params.graphKey
).then( (model) ->
state = model.getValue()
metadata = representation.properties.metadata
metadata.state = {
x: state.x
y: state.y
scale: state.scale
metadata: state.metadata or {}
}
setupStateLink(state)
stateDeferred.resolve(model)
).fail( (err) ->
state = {
x: 0
y: 0
scale: 1
metadata: {}
}
setupStateLink(state)
stateDeferred.resolve(state)
)
promises.push(stateDeferred.promise)
Q.all(promises).then(->
res.send( 200, representation )
).fail( (err) ->
next( err )
)
return
).fail( ( err ) -> next( err ) )
)
.fail( ( err ) -> next( err ) )
return
| 157246 | "use strict"
###*
Copyright 2014 Joukou Ltd
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
###
###*
{@link module:joukou-api/persona/graph/model|Graph} APIs provide information about the
graphs that an agent has authorization to access.
@module joukou-api/persona/graph/routes
@author <NAME> <<EMAIL>>
###
_ = require( 'lodash' )
uuid = require( 'node-uuid' )
async = require( 'async' )
{ authenticate } = require( '../../authn' )
authz = require( '../../authz' )
hal = require( '../../hal' )
request = require( 'request' )
connection_routes = require( './connection/routes' )
process_routes = require( './process/routes' )
network_routes = require( './network/routes' )
GraphModel = require( './model' )
PersonaModel = require( '../model')
CircleModel = require( '../../circle/model')
Q = require('q')
GraphStateModel = require( '../../agent/graph/state/model' )
{ UnauthorizedError, ForbiddenError, NotFoundError } = require( 'restify' )
module.exports = self =
###*
Registers graph-related routes with the `server`.
@param {joukou-api/server} server
###
registerRoutes: ( server ) ->
server.get( '/persona/:personaKey/graph', authenticate, self.index )
server.post( '/persona/:personaKey/graph', authenticate, self.create )
server.get( '/persona/:personaKey/graph/:graphKey', authenticate, self.retrieve )
connection_routes.registerRoutes( server )
process_routes.registerRoutes( server )
network_routes.registerRoutes( server )
return
###*
Handles a request to search for graphs owned by a certain persona.
@param {http.IncomingMessage} req
@param {http.ServerResponse} res
@param {function(Error)} next
###
index: ( req, res, next ) ->
request(
uri: 'http://localhost:8098/mapred'
method: 'POST'
json:
inputs:
module: 'yokozuna'
function: 'mapred_search'
arg: [ 'graph', 'personas.key:' + req.params.personaKey ]
query: [
{
map:
language: 'javascript'
keep: true
source: ( ( value, keyData, arg ) ->
result = Riak.mapValuesJson( value )[ 0 ]
result.key = value.key
return [ result ]
).toString()
}
]
, ( err, reply ) ->
if err
res.send( 503 )
return
representation = {}
representation._embedded = _.reduce( reply.body, ( memo, graph ) ->
memo[ 'joukou:graph' ].push(
name: graph.name
key: graph.key
_links:
self:
href: "/persona/#{req.params.personaKey}/graph/#{graph.key}"
'joukou:persona':
href: "/persona/#{req.params.personaKey}"
'joukou:process-create':
href: "/persona/#{req.params.personaKey}/graph/#{graph.key}/process"
'joukou:processes':
href: "/persona/#{req.params.personaKey}/graph/#{graph.key}/process"
'joukou:connection-create':
href: "/persona/#{req.params.personaKey}/graph/#{graph.key}/connection"
'joukou:connections':
href: "/persona/#{req.params.personaKey}/graph/#{graph.key}/connection"
)
memo
, { 'joukou:graph': [] } )
res.link( "/persona/#{req.params.personaKey}", 'joukou:persona' )
res.send( 200, representation )
return
)
return
###
@api {post} /persona/:personaKey/graph Creates a Joukou graph
@apiName CreateGraph
@apiGroup Graph
@apiParam {Object} properties
@apiExample CURL Example:
curl -i -X POST https://api.joukou.com/persona/7bcb937e-3938-49c5-a1ce-5eb45f194f2f/graph \
-H 'Content-Type: application/json' \
-d '{ "name": "CRM to Sharepoint Integration" }'
@apiSuccess (201) Created The graph has been created successfully.
@apiError (429) TooManyRequests The client has sent too many requests in a given amount of time.
@apiError (503) ServiceUnavailable There was a temporary failure creating the graph, the client should try again later.
###
create: ( req, res, next ) ->
authz.hasPersona(req.user, req.params.personaKey)
.then( ( persona ) ->
data = {}
data.name = req.body.name
data.personas = [
key: persona.getKey()
]
GraphModel.create( data ).then( ( graph ) ->
graph.save()
)
.then( ( graph ) ->
self = "/persona/#{persona.getKey()}/graph/#{graph.getKey()}"
res.link( self, 'joukou:graph' )
res.header( 'Location', self )
res.send( 201, {} )
)
.fail( ( err ) -> next( err ) )
)
.fail( ( err ) ->
if err instanceof NotFoundError
err = new NotFoundError("Persona '#{req.params.personaKey}' not found")
next( err ) )
###
@api {get} /graph/:graphKey Retrieve the definition of a Joukou graph
@apiName RetrieveGraph
@apiGroup Graph
@apiExample CURL Example:
curl -i -X GET https://api.joukou.com/graph/15269bc7-a6b2-42c5-8805-879f1fe11ec0
@apiSuccess (200) OK The graph definition is sent in the response.
@apiError (401) Unauthorized The request requires user authentication, or authorization has been refused for the supplied credentials.
@apiError (404) NotFound The server did not find a graph definition that matched the provided key.
@apiError (429) TooManyRequests The client has sent too many requests in a given amount of time.
@apiError (503) ServiceUnavailable There was a temporary failure retrieving the graph definition, the client should try again later.
###
retrieve: ( req, res, next ) ->
authz.hasGraph(req.user, req.params.graphKey, req.params.personaKey)
.then( ( { graph, persona } ) ->
graph.getConnections().then( ( connections ) ->
representation = {}
if req.accepts('application/hal+json')
for item in graph.getValue().personas
res.link( "/persona/#{item.key}", 'joukou:persona' )
res.link( "/persona/#{persona.getKey()}/graph/#{graph.getKey()}/process", 'joukou:process-create', title: 'Add a Process to this Graph' )
res.link( "/persona/#{persona.getKey()}/graph/#{graph.getKey()}/process/clone", 'joukou:process-clone', title: 'Clone a Process to this Graph' )
res.link( "/persona/#{persona.getKey()}/graph/#{graph.getKey()}/process", 'joukou:processes', title: 'List of Processes for this Graph' )
res.link( "/persona/#{persona.getKey()}/graph/#{graph.getKey()}/connection", 'joukou:connection-create', title: 'Add a Connection to this Graph' )
res.link( "/persona/#{persona.getKey()}/graph/#{graph.getKey()}/connection", 'joukou:connections', title: 'List of Connections for this Graph' )
# representation = _.pick( graph.getValue(), [ 'name' ] )
representation._embedded =
'joukou:process': _.reduce( graph.getValue().processes or {}, ( memo, process, processKey ) ->
memo.push(
_links:
self:
href: "/persona/#{persona.getKey()}/graph/#{graph.getKey()}/process/#{processKey}"
'joukou:process-update:position':
href: "/persona/#{persona.getKey()}/graph/#{graph.getKey()}/process/#{processKey}/position"
#'joukou:circle': TODO
# href: "/persona/#{persona.getKey()}/circle/#{process.circle.key}"
metadata: process.metadata
)
memo
, [] )
'joukou:connection': _.reduce( graph.getValue().connections or [], ( memo, connection, i ) ->
memo.push(
_links:
self:
href: "/persona/#{persona.getKey()}/graph/#{graph.getKey()}/connection/#{connection.key}"
'joukou:process': [
{
name: 'src' # TODO href
}
{
name: 'tgt' # TODO href
}
]
)
memo
, [] )
representation.properties =
name: graph.getValue().name
metadata: {
key: req.params.graphKey
state: {
x: 0
y: 0
scale: 1
metadata: {}
}
}
processPort = (port) ->
port.process = "/persona/#{persona.getKey()}/graph/#{graph.getKey()}/process/#{port.process}"
return port
representation.connections = _.map(connections, (connection) ->
return {
tgt: processPort(connection.tgt)
src: processPort(connection.src)
metadata: {
key: connection.key
}
}
)
representation.processes = {}
representation.outports = {}
representation.inports = {}
promises = _.map(graph.getValue().processes, (process, key) ->
deferred = Q.defer()
if not process.circle
# Skip it
return deferred.resolve()
CircleModel.retrieve(process.circle.key).then((circle) ->
circleValue = circle.getValue()
mapPort = (port) ->
return port
metadata = {
circle: {
key: circle.getKey()
value: {
description: circleValue.description
icon: circleValue.icon
subgraph: circleValue.subgraph
inports: _.map(circleValue.inports, mapPort)
outports: _.map(circleValue.outports, mapPort)
}
}
key: key
image: circleValue.image
label: circleValue.name
}
metadata = _.merge(metadata, process.metadata)
representation.processes["/persona/#{persona.getKey()}/graph/#{graph.getKey()}/process/#{key}"] = {
id: "/persona/#{persona.getKey()}/graph/#{graph.getKey()}/process/#{key}"
component: circle.getKey()
metadata: metadata
}
deferred.resolve()
).fail(deferred.reject)
return deferred.promise
)
stateDeferred = Q.defer()
setupStateLink = (state) ->
if not req.accepts("application/hal+json")
return
link = "/agent/graph/#{req.params.graphKey}/state"
res.link( link, 'joukou:graph:state', title: 'Update graph state' )
representation._embedded['joukou:graph:state'] = {
x: state.x
y: state.y
scale: state.scale
metadata: state.metadata or {}
_links:
self:
href: link
}
GraphStateModel.retrieveForGraph(
req.user.getKey(),
req.params.graphKey
).then( (model) ->
state = model.getValue()
metadata = representation.properties.metadata
metadata.state = {
x: state.x
y: state.y
scale: state.scale
metadata: state.metadata or {}
}
setupStateLink(state)
stateDeferred.resolve(model)
).fail( (err) ->
state = {
x: 0
y: 0
scale: 1
metadata: {}
}
setupStateLink(state)
stateDeferred.resolve(state)
)
promises.push(stateDeferred.promise)
Q.all(promises).then(->
res.send( 200, representation )
).fail( (err) ->
next( err )
)
return
).fail( ( err ) -> next( err ) )
)
.fail( ( err ) -> next( err ) )
return
| true | "use strict"
###*
Copyright 2014 Joukou Ltd
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
###
###*
{@link module:joukou-api/persona/graph/model|Graph} APIs provide information about the
graphs that an agent has authorization to access.
@module joukou-api/persona/graph/routes
@author PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI>
###
_ = require( 'lodash' )
uuid = require( 'node-uuid' )
async = require( 'async' )
{ authenticate } = require( '../../authn' )
authz = require( '../../authz' )
hal = require( '../../hal' )
request = require( 'request' )
connection_routes = require( './connection/routes' )
process_routes = require( './process/routes' )
network_routes = require( './network/routes' )
GraphModel = require( './model' )
PersonaModel = require( '../model')
CircleModel = require( '../../circle/model')
Q = require('q')
GraphStateModel = require( '../../agent/graph/state/model' )
{ UnauthorizedError, ForbiddenError, NotFoundError } = require( 'restify' )
module.exports = self =
###*
Registers graph-related routes with the `server`.
@param {joukou-api/server} server
###
registerRoutes: ( server ) ->
server.get( '/persona/:personaKey/graph', authenticate, self.index )
server.post( '/persona/:personaKey/graph', authenticate, self.create )
server.get( '/persona/:personaKey/graph/:graphKey', authenticate, self.retrieve )
connection_routes.registerRoutes( server )
process_routes.registerRoutes( server )
network_routes.registerRoutes( server )
return
###*
Handles a request to search for graphs owned by a certain persona.
@param {http.IncomingMessage} req
@param {http.ServerResponse} res
@param {function(Error)} next
###
index: ( req, res, next ) ->
request(
uri: 'http://localhost:8098/mapred'
method: 'POST'
json:
inputs:
module: 'yokozuna'
function: 'mapred_search'
arg: [ 'graph', 'personas.key:' + req.params.personaKey ]
query: [
{
map:
language: 'javascript'
keep: true
source: ( ( value, keyData, arg ) ->
result = Riak.mapValuesJson( value )[ 0 ]
result.key = value.key
return [ result ]
).toString()
}
]
, ( err, reply ) ->
if err
res.send( 503 )
return
representation = {}
representation._embedded = _.reduce( reply.body, ( memo, graph ) ->
memo[ 'joukou:graph' ].push(
name: graph.name
key: graph.key
_links:
self:
href: "/persona/#{req.params.personaKey}/graph/#{graph.key}"
'joukou:persona':
href: "/persona/#{req.params.personaKey}"
'joukou:process-create':
href: "/persona/#{req.params.personaKey}/graph/#{graph.key}/process"
'joukou:processes':
href: "/persona/#{req.params.personaKey}/graph/#{graph.key}/process"
'joukou:connection-create':
href: "/persona/#{req.params.personaKey}/graph/#{graph.key}/connection"
'joukou:connections':
href: "/persona/#{req.params.personaKey}/graph/#{graph.key}/connection"
)
memo
, { 'joukou:graph': [] } )
res.link( "/persona/#{req.params.personaKey}", 'joukou:persona' )
res.send( 200, representation )
return
)
return
###
@api {post} /persona/:personaKey/graph Creates a Joukou graph
@apiName CreateGraph
@apiGroup Graph
@apiParam {Object} properties
@apiExample CURL Example:
curl -i -X POST https://api.joukou.com/persona/7bcb937e-3938-49c5-a1ce-5eb45f194f2f/graph \
-H 'Content-Type: application/json' \
-d '{ "name": "CRM to Sharepoint Integration" }'
@apiSuccess (201) Created The graph has been created successfully.
@apiError (429) TooManyRequests The client has sent too many requests in a given amount of time.
@apiError (503) ServiceUnavailable There was a temporary failure creating the graph, the client should try again later.
###
create: ( req, res, next ) ->
authz.hasPersona(req.user, req.params.personaKey)
.then( ( persona ) ->
data = {}
data.name = req.body.name
data.personas = [
key: persona.getKey()
]
GraphModel.create( data ).then( ( graph ) ->
graph.save()
)
.then( ( graph ) ->
self = "/persona/#{persona.getKey()}/graph/#{graph.getKey()}"
res.link( self, 'joukou:graph' )
res.header( 'Location', self )
res.send( 201, {} )
)
.fail( ( err ) -> next( err ) )
)
.fail( ( err ) ->
if err instanceof NotFoundError
err = new NotFoundError("Persona '#{req.params.personaKey}' not found")
next( err ) )
###
@api {get} /graph/:graphKey Retrieve the definition of a Joukou graph
@apiName RetrieveGraph
@apiGroup Graph
@apiExample CURL Example:
curl -i -X GET https://api.joukou.com/graph/15269bc7-a6b2-42c5-8805-879f1fe11ec0
@apiSuccess (200) OK The graph definition is sent in the response.
@apiError (401) Unauthorized The request requires user authentication, or authorization has been refused for the supplied credentials.
@apiError (404) NotFound The server did not find a graph definition that matched the provided key.
@apiError (429) TooManyRequests The client has sent too many requests in a given amount of time.
@apiError (503) ServiceUnavailable There was a temporary failure retrieving the graph definition, the client should try again later.
###
retrieve: ( req, res, next ) ->
authz.hasGraph(req.user, req.params.graphKey, req.params.personaKey)
.then( ( { graph, persona } ) ->
graph.getConnections().then( ( connections ) ->
representation = {}
if req.accepts('application/hal+json')
for item in graph.getValue().personas
res.link( "/persona/#{item.key}", 'joukou:persona' )
res.link( "/persona/#{persona.getKey()}/graph/#{graph.getKey()}/process", 'joukou:process-create', title: 'Add a Process to this Graph' )
res.link( "/persona/#{persona.getKey()}/graph/#{graph.getKey()}/process/clone", 'joukou:process-clone', title: 'Clone a Process to this Graph' )
res.link( "/persona/#{persona.getKey()}/graph/#{graph.getKey()}/process", 'joukou:processes', title: 'List of Processes for this Graph' )
res.link( "/persona/#{persona.getKey()}/graph/#{graph.getKey()}/connection", 'joukou:connection-create', title: 'Add a Connection to this Graph' )
res.link( "/persona/#{persona.getKey()}/graph/#{graph.getKey()}/connection", 'joukou:connections', title: 'List of Connections for this Graph' )
# representation = _.pick( graph.getValue(), [ 'name' ] )
representation._embedded =
'joukou:process': _.reduce( graph.getValue().processes or {}, ( memo, process, processKey ) ->
memo.push(
_links:
self:
href: "/persona/#{persona.getKey()}/graph/#{graph.getKey()}/process/#{processKey}"
'joukou:process-update:position':
href: "/persona/#{persona.getKey()}/graph/#{graph.getKey()}/process/#{processKey}/position"
#'joukou:circle': TODO
# href: "/persona/#{persona.getKey()}/circle/#{process.circle.key}"
metadata: process.metadata
)
memo
, [] )
'joukou:connection': _.reduce( graph.getValue().connections or [], ( memo, connection, i ) ->
memo.push(
_links:
self:
href: "/persona/#{persona.getKey()}/graph/#{graph.getKey()}/connection/#{connection.key}"
'joukou:process': [
{
name: 'src' # TODO href
}
{
name: 'tgt' # TODO href
}
]
)
memo
, [] )
representation.properties =
name: graph.getValue().name
metadata: {
key: req.params.graphKey
state: {
x: 0
y: 0
scale: 1
metadata: {}
}
}
processPort = (port) ->
port.process = "/persona/#{persona.getKey()}/graph/#{graph.getKey()}/process/#{port.process}"
return port
representation.connections = _.map(connections, (connection) ->
return {
tgt: processPort(connection.tgt)
src: processPort(connection.src)
metadata: {
key: connection.key
}
}
)
representation.processes = {}
representation.outports = {}
representation.inports = {}
promises = _.map(graph.getValue().processes, (process, key) ->
deferred = Q.defer()
if not process.circle
# Skip it
return deferred.resolve()
CircleModel.retrieve(process.circle.key).then((circle) ->
circleValue = circle.getValue()
mapPort = (port) ->
return port
metadata = {
circle: {
key: circle.getKey()
value: {
description: circleValue.description
icon: circleValue.icon
subgraph: circleValue.subgraph
inports: _.map(circleValue.inports, mapPort)
outports: _.map(circleValue.outports, mapPort)
}
}
key: key
image: circleValue.image
label: circleValue.name
}
metadata = _.merge(metadata, process.metadata)
representation.processes["/persona/#{persona.getKey()}/graph/#{graph.getKey()}/process/#{key}"] = {
id: "/persona/#{persona.getKey()}/graph/#{graph.getKey()}/process/#{key}"
component: circle.getKey()
metadata: metadata
}
deferred.resolve()
).fail(deferred.reject)
return deferred.promise
)
stateDeferred = Q.defer()
setupStateLink = (state) ->
if not req.accepts("application/hal+json")
return
link = "/agent/graph/#{req.params.graphKey}/state"
res.link( link, 'joukou:graph:state', title: 'Update graph state' )
representation._embedded['joukou:graph:state'] = {
x: state.x
y: state.y
scale: state.scale
metadata: state.metadata or {}
_links:
self:
href: link
}
GraphStateModel.retrieveForGraph(
req.user.getKey(),
req.params.graphKey
).then( (model) ->
state = model.getValue()
metadata = representation.properties.metadata
metadata.state = {
x: state.x
y: state.y
scale: state.scale
metadata: state.metadata or {}
}
setupStateLink(state)
stateDeferred.resolve(model)
).fail( (err) ->
state = {
x: 0
y: 0
scale: 1
metadata: {}
}
setupStateLink(state)
stateDeferred.resolve(state)
)
promises.push(stateDeferred.promise)
Q.all(promises).then(->
res.send( 200, representation )
).fail( (err) ->
next( err )
)
return
).fail( ( err ) -> next( err ) )
)
.fail( ( err ) -> next( err ) )
return
|
[
{
"context": "###\nGulp task watch\n@create 2014-10-07\n@author KoutarouYabe <idolm@ster.pw>\n###\n\nmodule.exports = (gulp, plug",
"end": 59,
"score": 0.9998920559883118,
"start": 47,
"tag": "NAME",
"value": "KoutarouYabe"
},
{
"context": "sk watch\n@create 2014-10-07\n@author Kouta... | tasks/config/watch.coffee | moorvin/Sea-Fight | 1 | ###
Gulp task watch
@create 2014-10-07
@author KoutarouYabe <idolm@ster.pw>
###
module.exports = (gulp, plugins, growl, path)->
gulp.task "watch:assets", ->
plugins.livereload = plugins.livereload.listen()
plugins.watch ["assets/**/*.sass", "assets/**/*.scss"], (file)->
plugins.util.log file.path.yellow + " was changed"
plugins.sequence "sass", "reload"
plugins.watch ["assets/**/*.coffee"], (file)->
plugins.util.log file.path.yellow + " was changed"
plugins.sequence "coffee","concat:js", "uglify:js", "reload"
plugins.watch ["assets/images/**/*"], (file)->
plugins.util.log file.path.yellow + " was changed"
plugins.sequence "images", "reload"
plugins.watch [
"assets/**/**.!(coffee|less|sass|scss)"
"!assets/images{,/**}"
], (file)->
plugins.util.log file.path.yellow + " was changed"
plugins.sequence(
["copy", 'concat:template'],
'concat:js',
'uglify:js',
"reload"
)
gulp.task "reload", ->
plugins.util.log "page was reloaded"
gulp
.src "assets/**/*"
.pipe plugins.livereload
quiet: true
gulp.task "watch", ->
plugins.sequence(
["build", "server"]
"watch:assets"
)
| 96274 | ###
Gulp task watch
@create 2014-10-07
@author <NAME> <<EMAIL>>
###
module.exports = (gulp, plugins, growl, path)->
gulp.task "watch:assets", ->
plugins.livereload = plugins.livereload.listen()
plugins.watch ["assets/**/*.sass", "assets/**/*.scss"], (file)->
plugins.util.log file.path.yellow + " was changed"
plugins.sequence "sass", "reload"
plugins.watch ["assets/**/*.coffee"], (file)->
plugins.util.log file.path.yellow + " was changed"
plugins.sequence "coffee","concat:js", "uglify:js", "reload"
plugins.watch ["assets/images/**/*"], (file)->
plugins.util.log file.path.yellow + " was changed"
plugins.sequence "images", "reload"
plugins.watch [
"assets/**/**.!(coffee|less|sass|scss)"
"!assets/images{,/**}"
], (file)->
plugins.util.log file.path.yellow + " was changed"
plugins.sequence(
["copy", 'concat:template'],
'concat:js',
'uglify:js',
"reload"
)
gulp.task "reload", ->
plugins.util.log "page was reloaded"
gulp
.src "assets/**/*"
.pipe plugins.livereload
quiet: true
gulp.task "watch", ->
plugins.sequence(
["build", "server"]
"watch:assets"
)
| true | ###
Gulp task watch
@create 2014-10-07
@author PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI>
###
module.exports = (gulp, plugins, growl, path)->
gulp.task "watch:assets", ->
plugins.livereload = plugins.livereload.listen()
plugins.watch ["assets/**/*.sass", "assets/**/*.scss"], (file)->
plugins.util.log file.path.yellow + " was changed"
plugins.sequence "sass", "reload"
plugins.watch ["assets/**/*.coffee"], (file)->
plugins.util.log file.path.yellow + " was changed"
plugins.sequence "coffee","concat:js", "uglify:js", "reload"
plugins.watch ["assets/images/**/*"], (file)->
plugins.util.log file.path.yellow + " was changed"
plugins.sequence "images", "reload"
plugins.watch [
"assets/**/**.!(coffee|less|sass|scss)"
"!assets/images{,/**}"
], (file)->
plugins.util.log file.path.yellow + " was changed"
plugins.sequence(
["copy", 'concat:template'],
'concat:js',
'uglify:js',
"reload"
)
gulp.task "reload", ->
plugins.util.log "page was reloaded"
gulp
.src "assets/**/*"
.pipe plugins.livereload
quiet: true
gulp.task "watch", ->
plugins.sequence(
["build", "server"]
"watch:assets"
)
|
[
{
"context": "#\n# Zen Photon Garden.\n#\n# Copyright (c) 2013 Micah Elizabeth Scott <micah@scanlime.org>\n#\n# Permission is hereby g",
"end": 71,
"score": 0.9998819828033447,
"start": 50,
"tag": "NAME",
"value": "Micah Elizabeth Scott"
},
{
"context": ".\n#\n# Copyright (c)... | example /html/src/zen-widgets.coffee | amane312/photon_generator | 0 | #
# Zen Photon Garden.
#
# Copyright (c) 2013 Micah Elizabeth Scott <micah@scanlime.org>
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation
# files (the "Software"), to deal in the Software without
# restriction, including without limitation the rights to use,
# copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following
# conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
# OTHER DEALINGS IN THE SOFTWARE.
#
class VSlider
# Events
beginChange: () ->
endChange: () ->
valueChanged: (v) ->
constructor: (@button, @track) ->
@button
.mousedown (e) =>
return unless e.which == 1
return if @touchDragging
e.preventDefault()
@beginDrag e.pageY
.bind 'touchstart', (e) =>
touches = e.originalEvent.changedTouches
e.preventDefault()
@touchDragging = true
@beginDrag touches[0].pageY
.bind 'touchmove', (e) =>
return unless @dragging
touches = e.originalEvent.changedTouches
e.preventDefault()
@updateDrag touches[0].pageY
.bind 'touchend', (e) =>
return unless @dragging
e.preventDefault()
@endDrag()
$(window)
.mousemove (e) =>
return unless @dragging
return if @touchDragging
e.preventDefault()
@updateDrag e.pageY
.mouseup (e) =>
return unless @dragging
return if @touchDragging
@endDrag()
beginDrag: (pageY) ->
@button.uiActive true
@dragging = true
@beginChange()
@updateDrag pageY
updateDrag: (pageY) ->
h = @button.innerHeight()
y = pageY - @button.parent().offset().top - h/2
value = y / (@track.innerHeight() - h)
value = 1 - Math.min(1, Math.max(0, value))
$('body').css cursor: 'pointer'
@setValue(value)
@valueChanged(value)
endDrag: ->
@dragging = false
@touchDragging = false
@button.uiActive false
$('body').css cursor: 'auto'
@endChange()
setValue: (@value) ->
y = (@track.innerHeight() - @button.innerHeight()) * (1 - @value)
@button.css top: y
class HSlider
# Events
beginChange: () ->
endChange: () ->
valueChanged: (v) ->
constructor: (@button) ->
@button
.mousedown (e) =>
return unless e.which == 1
return if @touchDragging
e.preventDefault()
@beginDrag e.pageX
.bind 'touchstart', (e) =>
touches = e.originalEvent.changedTouches
e.preventDefault()
@touchDragging = true
@beginDrag touches[0].pageX
.bind 'touchmove', (e) =>
return unless @dragging
touches = e.originalEvent.changedTouches
e.preventDefault()
@updateDrag touches[0].pageX
.bind 'touchend', (e) =>
return unless @dragging
e.preventDefault()
@endDrag()
$(window)
.mousemove (e) =>
return unless @dragging
return if @touchDragging
@updateDrag(e.pageX)
e.preventDefault()
.mouseup (e) =>
return if @touchDragging
@endDrag()
beginDrag: (pageX) ->
@dragging = true
@beginChange()
@updateDrag pageX
updateDrag: (pageX) ->
w = @button.innerWidth()
x = pageX - @button.parent().offset().left
value = Math.min(1, Math.max(0, x / w))
$('body').css cursor: 'pointer'
@setValue(value)
@valueChanged(value)
endDrag: ->
@dragging = false
@touchDragging = false
$('body').css cursor: 'auto'
@endChange()
setValue: (@value) ->
w = @button.innerWidth()
@button.children('.ui-hslider').width(w * @value)
class Button
# Events
onClick: () ->
constructor: (@button) ->
@button
.mousedown (e) =>
return unless e.which == 1
e.preventDefault()
@beginDrag()
.click (e) =>
@endDrag()
@onClick e
.bind 'touchstart', (e) =>
# Touches time out; long-touch is not interpreted as a click.
e.preventDefault()
@timer = window.setTimeout (() => @endDrag()), 500
@beginDrag()
.bind 'touchmove', (e) =>
return unless @dragging
e.preventDefault()
.bind 'touchend', (e) =>
return unless @dragging
e.preventDefault()
@endDrag()
@onClick e
$(window)
.mouseup (e) =>
return unless @dragging
@endDrag()
click: (handler) ->
@onClick = handler
return this
beginDrag: ->
@button.uiActive true
@dragging = true
$('body').css cursor: 'pointer'
endDrag: ->
@button.uiActive false
@dragging = false
$('body').css cursor: 'auto'
if @timer
window.clearTimeout @timer
@timer = null
hotkey: (key) ->
# We only use 'keydown' here... for keys that are also used by the browser UI,
# keyup and keypress don't work for all keys and platforms we care about.
$(document).bind 'keydown', key, (e) =>
@button.uiActive(true)
setTimeout (() => @button.uiActive(false)), 100
@onClick(e)
return this
$.fn.uiActive = (n) ->
if n
@addClass('ui-active')
@removeClass('ui-inactive')
else
@removeClass('ui-active')
@addClass('ui-inactive')
return this
$.fn.button = () ->
return new Button this
| 89424 | #
# Zen Photon Garden.
#
# Copyright (c) 2013 <NAME> <<EMAIL>>
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation
# files (the "Software"), to deal in the Software without
# restriction, including without limitation the rights to use,
# copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following
# conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
# OTHER DEALINGS IN THE SOFTWARE.
#
class VSlider
# Events
beginChange: () ->
endChange: () ->
valueChanged: (v) ->
constructor: (@button, @track) ->
@button
.mousedown (e) =>
return unless e.which == 1
return if @touchDragging
e.preventDefault()
@beginDrag e.pageY
.bind 'touchstart', (e) =>
touches = e.originalEvent.changedTouches
e.preventDefault()
@touchDragging = true
@beginDrag touches[0].pageY
.bind 'touchmove', (e) =>
return unless @dragging
touches = e.originalEvent.changedTouches
e.preventDefault()
@updateDrag touches[0].pageY
.bind 'touchend', (e) =>
return unless @dragging
e.preventDefault()
@endDrag()
$(window)
.mousemove (e) =>
return unless @dragging
return if @touchDragging
e.preventDefault()
@updateDrag e.pageY
.mouseup (e) =>
return unless @dragging
return if @touchDragging
@endDrag()
beginDrag: (pageY) ->
@button.uiActive true
@dragging = true
@beginChange()
@updateDrag pageY
updateDrag: (pageY) ->
h = @button.innerHeight()
y = pageY - @button.parent().offset().top - h/2
value = y / (@track.innerHeight() - h)
value = 1 - Math.min(1, Math.max(0, value))
$('body').css cursor: 'pointer'
@setValue(value)
@valueChanged(value)
endDrag: ->
@dragging = false
@touchDragging = false
@button.uiActive false
$('body').css cursor: 'auto'
@endChange()
setValue: (@value) ->
y = (@track.innerHeight() - @button.innerHeight()) * (1 - @value)
@button.css top: y
class HSlider
# Events
beginChange: () ->
endChange: () ->
valueChanged: (v) ->
constructor: (@button) ->
@button
.mousedown (e) =>
return unless e.which == 1
return if @touchDragging
e.preventDefault()
@beginDrag e.pageX
.bind 'touchstart', (e) =>
touches = e.originalEvent.changedTouches
e.preventDefault()
@touchDragging = true
@beginDrag touches[0].pageX
.bind 'touchmove', (e) =>
return unless @dragging
touches = e.originalEvent.changedTouches
e.preventDefault()
@updateDrag touches[0].pageX
.bind 'touchend', (e) =>
return unless @dragging
e.preventDefault()
@endDrag()
$(window)
.mousemove (e) =>
return unless @dragging
return if @touchDragging
@updateDrag(e.pageX)
e.preventDefault()
.mouseup (e) =>
return if @touchDragging
@endDrag()
beginDrag: (pageX) ->
@dragging = true
@beginChange()
@updateDrag pageX
updateDrag: (pageX) ->
w = @button.innerWidth()
x = pageX - @button.parent().offset().left
value = Math.min(1, Math.max(0, x / w))
$('body').css cursor: 'pointer'
@setValue(value)
@valueChanged(value)
endDrag: ->
@dragging = false
@touchDragging = false
$('body').css cursor: 'auto'
@endChange()
setValue: (@value) ->
w = @button.innerWidth()
@button.children('.ui-hslider').width(w * @value)
class Button
# Events
onClick: () ->
constructor: (@button) ->
@button
.mousedown (e) =>
return unless e.which == 1
e.preventDefault()
@beginDrag()
.click (e) =>
@endDrag()
@onClick e
.bind 'touchstart', (e) =>
# Touches time out; long-touch is not interpreted as a click.
e.preventDefault()
@timer = window.setTimeout (() => @endDrag()), 500
@beginDrag()
.bind 'touchmove', (e) =>
return unless @dragging
e.preventDefault()
.bind 'touchend', (e) =>
return unless @dragging
e.preventDefault()
@endDrag()
@onClick e
$(window)
.mouseup (e) =>
return unless @dragging
@endDrag()
click: (handler) ->
@onClick = handler
return this
beginDrag: ->
@button.uiActive true
@dragging = true
$('body').css cursor: 'pointer'
endDrag: ->
@button.uiActive false
@dragging = false
$('body').css cursor: 'auto'
if @timer
window.clearTimeout @timer
@timer = null
hotkey: (key) ->
# We only use 'keydown' here... for keys that are also used by the browser UI,
# keyup and keypress don't work for all keys and platforms we care about.
$(document).bind 'keydown', key, (e) =>
@button.uiActive(true)
setTimeout (() => @button.uiActive(false)), 100
@onClick(e)
return this
$.fn.uiActive = (n) ->
if n
@addClass('ui-active')
@removeClass('ui-inactive')
else
@removeClass('ui-active')
@addClass('ui-inactive')
return this
$.fn.button = () ->
return new Button this
| true | #
# Zen Photon Garden.
#
# Copyright (c) 2013 PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI>
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation
# files (the "Software"), to deal in the Software without
# restriction, including without limitation the rights to use,
# copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following
# conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
# OTHER DEALINGS IN THE SOFTWARE.
#
class VSlider
# Events
beginChange: () ->
endChange: () ->
valueChanged: (v) ->
constructor: (@button, @track) ->
@button
.mousedown (e) =>
return unless e.which == 1
return if @touchDragging
e.preventDefault()
@beginDrag e.pageY
.bind 'touchstart', (e) =>
touches = e.originalEvent.changedTouches
e.preventDefault()
@touchDragging = true
@beginDrag touches[0].pageY
.bind 'touchmove', (e) =>
return unless @dragging
touches = e.originalEvent.changedTouches
e.preventDefault()
@updateDrag touches[0].pageY
.bind 'touchend', (e) =>
return unless @dragging
e.preventDefault()
@endDrag()
$(window)
.mousemove (e) =>
return unless @dragging
return if @touchDragging
e.preventDefault()
@updateDrag e.pageY
.mouseup (e) =>
return unless @dragging
return if @touchDragging
@endDrag()
beginDrag: (pageY) ->
@button.uiActive true
@dragging = true
@beginChange()
@updateDrag pageY
updateDrag: (pageY) ->
h = @button.innerHeight()
y = pageY - @button.parent().offset().top - h/2
value = y / (@track.innerHeight() - h)
value = 1 - Math.min(1, Math.max(0, value))
$('body').css cursor: 'pointer'
@setValue(value)
@valueChanged(value)
endDrag: ->
@dragging = false
@touchDragging = false
@button.uiActive false
$('body').css cursor: 'auto'
@endChange()
setValue: (@value) ->
y = (@track.innerHeight() - @button.innerHeight()) * (1 - @value)
@button.css top: y
class HSlider
# Events
beginChange: () ->
endChange: () ->
valueChanged: (v) ->
constructor: (@button) ->
@button
.mousedown (e) =>
return unless e.which == 1
return if @touchDragging
e.preventDefault()
@beginDrag e.pageX
.bind 'touchstart', (e) =>
touches = e.originalEvent.changedTouches
e.preventDefault()
@touchDragging = true
@beginDrag touches[0].pageX
.bind 'touchmove', (e) =>
return unless @dragging
touches = e.originalEvent.changedTouches
e.preventDefault()
@updateDrag touches[0].pageX
.bind 'touchend', (e) =>
return unless @dragging
e.preventDefault()
@endDrag()
$(window)
.mousemove (e) =>
return unless @dragging
return if @touchDragging
@updateDrag(e.pageX)
e.preventDefault()
.mouseup (e) =>
return if @touchDragging
@endDrag()
beginDrag: (pageX) ->
@dragging = true
@beginChange()
@updateDrag pageX
updateDrag: (pageX) ->
w = @button.innerWidth()
x = pageX - @button.parent().offset().left
value = Math.min(1, Math.max(0, x / w))
$('body').css cursor: 'pointer'
@setValue(value)
@valueChanged(value)
endDrag: ->
@dragging = false
@touchDragging = false
$('body').css cursor: 'auto'
@endChange()
setValue: (@value) ->
w = @button.innerWidth()
@button.children('.ui-hslider').width(w * @value)
class Button
# Events
onClick: () ->
constructor: (@button) ->
@button
.mousedown (e) =>
return unless e.which == 1
e.preventDefault()
@beginDrag()
.click (e) =>
@endDrag()
@onClick e
.bind 'touchstart', (e) =>
# Touches time out; long-touch is not interpreted as a click.
e.preventDefault()
@timer = window.setTimeout (() => @endDrag()), 500
@beginDrag()
.bind 'touchmove', (e) =>
return unless @dragging
e.preventDefault()
.bind 'touchend', (e) =>
return unless @dragging
e.preventDefault()
@endDrag()
@onClick e
$(window)
.mouseup (e) =>
return unless @dragging
@endDrag()
click: (handler) ->
@onClick = handler
return this
beginDrag: ->
@button.uiActive true
@dragging = true
$('body').css cursor: 'pointer'
endDrag: ->
@button.uiActive false
@dragging = false
$('body').css cursor: 'auto'
if @timer
window.clearTimeout @timer
@timer = null
hotkey: (key) ->
# We only use 'keydown' here... for keys that are also used by the browser UI,
# keyup and keypress don't work for all keys and platforms we care about.
$(document).bind 'keydown', key, (e) =>
@button.uiActive(true)
setTimeout (() => @button.uiActive(false)), 100
@onClick(e)
return this
$.fn.uiActive = (n) ->
if n
@addClass('ui-active')
@removeClass('ui-inactive')
else
@removeClass('ui-active')
@addClass('ui-inactive')
return this
$.fn.button = () ->
return new Button this
|
[
{
"context": "T /cache', ->\n beforeEach ->\n @publicKey = '''-----BEGIN PUBLIC KEY-----\nMFwwDQYJKoZIhvcNAQEBBQADSwAwSAJBAILmZ+FAnHRH5uxDCjuZNf4/NO1+RlnB\nrgGbCwRSmrezo4kBnAcOEx54m18toGFLI40oHFazEgvOM3F1N3jxelkCAwEAAQ==\n-----END PUBLIC KEY-----'''\n @privateKey = '''-----BEGIN RSA PRIVATE KE",
"end... | test/integration/get-cache-spec.coffee | octoblu/meshblu-ref-cache-service | 0 | {beforeEach, afterEach, describe, it} = global
{expect} = require 'chai'
sinon = require 'sinon'
request = require 'request'
enableDestroy = require 'server-destroy'
Server = require '../../src/server'
Redis = require 'ioredis'
RedisNS = require '@octoblu/redis-ns'
UUID = require 'uuid'
describe 'GET /cache', ->
beforeEach ->
@publicKey = '''-----BEGIN PUBLIC KEY-----
MFwwDQYJKoZIhvcNAQEBBQADSwAwSAJBAILmZ+FAnHRH5uxDCjuZNf4/NO1+RlnB
rgGbCwRSmrezo4kBnAcOEx54m18toGFLI40oHFazEgvOM3F1N3jxelkCAwEAAQ==
-----END PUBLIC KEY-----'''
@privateKey = '''-----BEGIN RSA PRIVATE KEY-----
MIIBOgIBAAJBAILmZ+FAnHRH5uxDCjuZNf4/NO1+RlnBrgGbCwRSmrezo4kBnAcO
Ex54m18toGFLI40oHFazEgvOM3F1N3jxelkCAwEAAQJATs8rYbmFuJiFll8ybPls
QXuKgSYScv2hpsPS2TJmhgxQHYNFGc3DDRTRkHpLLxLWOvHw2pJ8EnlLIB2Wv6Tv
0QIhAP4MaMWkcCJNewGkrMUSiPLkMY0MDpja8rKoHTWsL9oNAiEAg+fSrLY6zB7u
xw1jselN6/qJXeGtGtduDu5cL6ztin0CIQDO5lBV1ow0g6GQPwsuHOBH4KyyUIV6
26YY9m2Djs4R6QIgRYAJVi0yL8kAoOriI6S9BOBeLpQxJFpsR/u5oPkps/UCICC+
keYaKc587IGMob72txxUbtNLXfQoU2o4+262ojUd
-----END RSA PRIVATE KEY-----'''
beforeEach (done) ->
@namespace = "test:ref-cache:#{UUID.v4()}"
@redisUri = 'localhost'
@logFn = sinon.spy()
serverOptions = {
port: undefined
disableLogging: true
@logFn
@publicKey
@redisUri
@namespace
}
@server = new Server serverOptions
@server.run =>
@serverPort = @server.address().port
done()
beforeEach (done) ->
@client = new RedisNS @namespace, new Redis @redisUri, dropBufferSupport: true
@client.on 'ready', done
afterEach ->
@server.destroy()
describe 'posting a single key', ->
beforeEach (done) ->
@client.del '87c32ca0-ae2b-4983-bcd4-9ce5500fe3c1/some/path', done
return
beforeEach (done) ->
uploadOptions =
headers:
'X-MESHBLU-UUID': '87c32ca0-ae2b-4983-bcd4-9ce5500fe3c1'
uri: '/cache?key=some.path'
baseUrl: "http://localhost:#{@serverPort}"
json:
some:
path: 'foo'
httpSignature:
keyId: 'meshblu-webhook-key'
key: @privateKey
headers: [ 'date', 'X-MESHBLU-UUID' ]
options =
baseUrl: "http://localhost:#{@serverPort}"
uri: '/cache/87c32ca0-ae2b-4983-bcd4-9ce5500fe3c1/some/path'
json: true
request.post uploadOptions, (error, uploadResponse, uploadBody) =>
done error if error?
console.log uploadBody if uploadResponse.statusCode > 499
request.get options, (error, @response, @body) =>
console.log @body if @response.statusCode > 499
done error
it 'should return a 200', ->
expect(@response.statusCode).to.equal 200
it 'should get the cache file', ->
expect(@body).to.equal 'foo'
describe 'getting the whole device', ->
beforeEach (done) ->
@client.del '87c32ca0-ae2b-4983-bcd4-9ce5500fe3c1/_', done
return
beforeEach (done) ->
uploadOptions =
headers:
'X-MESHBLU-UUID': '87c32ca0-ae2b-4983-bcd4-9ce5500fe3c1'
uri: '/cache?key=_'
baseUrl: "http://localhost:#{@serverPort}"
json:
some:
path: 'foo'
httpSignature:
keyId: 'meshblu-webhook-key'
key: @privateKey
headers: [ 'date', 'X-MESHBLU-UUID' ]
options =
baseUrl: "http://localhost:#{@serverPort}"
uri: '/cache/87c32ca0-ae2b-4983-bcd4-9ce5500fe3c1'
json: true
request.post uploadOptions, (error, uploadResponse, uploadBody) =>
done error if error?
request.get options, (error, @response, @body) =>
done error
it 'should return a 200', ->
expect(@response.statusCode).to.equal 200
it 'should get the cache file', ->
expect(@body).to.deep.equal some: path: 'foo'
describe 'when device does not exist', ->
beforeEach (done) ->
@client.del '87c32ca0-ae2b-4983-bcd4-9ce5500fe3c1/_', done
return
beforeEach (done) ->
options =
baseUrl: "http://localhost:#{@serverPort}"
uri: '/cache/87c32ca0-ae2b-4983-bcd4-9ce5500fe3c1'
json: true
request.get options, (error, @response, @body) =>
console.log @body if @response.statusCode > 499
done error
it 'should return a 404', ->
expect(@response.statusCode).to.equal 404
| 199765 | {beforeEach, afterEach, describe, it} = global
{expect} = require 'chai'
sinon = require 'sinon'
request = require 'request'
enableDestroy = require 'server-destroy'
Server = require '../../src/server'
Redis = require 'ioredis'
RedisNS = require '@octoblu/redis-ns'
UUID = require 'uuid'
describe 'GET /cache', ->
beforeEach ->
@publicKey = '''-----<KEY>'''
@privateKey = '''-----<KEY>
<KEY>Ud
-----END RSA PRIVATE KEY-----'''
beforeEach (done) ->
@namespace = "test:ref-cache:#{UUID.v4()}"
@redisUri = 'localhost'
@logFn = sinon.spy()
serverOptions = {
port: undefined
disableLogging: true
@logFn
@publicKey
@redisUri
@namespace
}
@server = new Server serverOptions
@server.run =>
@serverPort = @server.address().port
done()
beforeEach (done) ->
@client = new RedisNS @namespace, new Redis @redisUri, dropBufferSupport: true
@client.on 'ready', done
afterEach ->
@server.destroy()
describe 'posting a single key', ->
beforeEach (done) ->
@client.del '87c32ca0-ae2b-4983-bcd4-9ce5500fe3c1/some/path', done
return
beforeEach (done) ->
uploadOptions =
headers:
'X-MESHBLU-UUID': '87c32ca0-ae2b-4983-bcd4-9ce5500fe3c1'
uri: '/cache?key=some.path'
baseUrl: "http://localhost:#{@serverPort}"
json:
some:
path: 'foo'
httpSignature:
keyId: '<KEY>'
key: @privateKey
headers: [ 'date', 'X-MESHBLU-UUID' ]
options =
baseUrl: "http://localhost:#{@serverPort}"
uri: '/cache/87c32ca0-ae2b-4983-bcd4-9ce5500fe3c1/some/path'
json: true
request.post uploadOptions, (error, uploadResponse, uploadBody) =>
done error if error?
console.log uploadBody if uploadResponse.statusCode > 499
request.get options, (error, @response, @body) =>
console.log @body if @response.statusCode > 499
done error
it 'should return a 200', ->
expect(@response.statusCode).to.equal 200
it 'should get the cache file', ->
expect(@body).to.equal 'foo'
describe 'getting the whole device', ->
beforeEach (done) ->
@client.del '87c32ca0-ae2b-4983-bcd4-9ce5500fe3c1/_', done
return
beforeEach (done) ->
uploadOptions =
headers:
'X-MESHBLU-UUID': '87c32ca0-ae2b-4983-bcd4-9ce5500fe3c1'
uri: '/cache?key=_'
baseUrl: "http://localhost:#{@serverPort}"
json:
some:
path: 'foo'
httpSignature:
keyId: '<KEY>'
key: @privateKey
headers: [ 'date', 'X-MESHBLU-UUID' ]
options =
baseUrl: "http://localhost:#{@serverPort}"
uri: '/cache/87c32ca0-ae2b-4983-bcd4-9ce5500fe3c1'
json: true
request.post uploadOptions, (error, uploadResponse, uploadBody) =>
done error if error?
request.get options, (error, @response, @body) =>
done error
it 'should return a 200', ->
expect(@response.statusCode).to.equal 200
it 'should get the cache file', ->
expect(@body).to.deep.equal some: path: 'foo'
describe 'when device does not exist', ->
beforeEach (done) ->
@client.del '87c32ca0-ae2b-4983-bcd4-9ce5500fe3c1/_', done
return
beforeEach (done) ->
options =
baseUrl: "http://localhost:#{@serverPort}"
uri: '/cache/87c32ca0-ae2b-4983-bcd4-9ce5500fe3c1'
json: true
request.get options, (error, @response, @body) =>
console.log @body if @response.statusCode > 499
done error
it 'should return a 404', ->
expect(@response.statusCode).to.equal 404
| true | {beforeEach, afterEach, describe, it} = global
{expect} = require 'chai'
sinon = require 'sinon'
request = require 'request'
enableDestroy = require 'server-destroy'
Server = require '../../src/server'
Redis = require 'ioredis'
RedisNS = require '@octoblu/redis-ns'
UUID = require 'uuid'
describe 'GET /cache', ->
beforeEach ->
@publicKey = '''-----PI:KEY:<KEY>END_PI'''
@privateKey = '''-----PI:KEY:<KEY>END_PI
PI:KEY:<KEY>END_PIUd
-----END RSA PRIVATE KEY-----'''
beforeEach (done) ->
@namespace = "test:ref-cache:#{UUID.v4()}"
@redisUri = 'localhost'
@logFn = sinon.spy()
serverOptions = {
port: undefined
disableLogging: true
@logFn
@publicKey
@redisUri
@namespace
}
@server = new Server serverOptions
@server.run =>
@serverPort = @server.address().port
done()
beforeEach (done) ->
@client = new RedisNS @namespace, new Redis @redisUri, dropBufferSupport: true
@client.on 'ready', done
afterEach ->
@server.destroy()
describe 'posting a single key', ->
beforeEach (done) ->
@client.del '87c32ca0-ae2b-4983-bcd4-9ce5500fe3c1/some/path', done
return
beforeEach (done) ->
uploadOptions =
headers:
'X-MESHBLU-UUID': '87c32ca0-ae2b-4983-bcd4-9ce5500fe3c1'
uri: '/cache?key=some.path'
baseUrl: "http://localhost:#{@serverPort}"
json:
some:
path: 'foo'
httpSignature:
keyId: 'PI:KEY:<KEY>END_PI'
key: @privateKey
headers: [ 'date', 'X-MESHBLU-UUID' ]
options =
baseUrl: "http://localhost:#{@serverPort}"
uri: '/cache/87c32ca0-ae2b-4983-bcd4-9ce5500fe3c1/some/path'
json: true
request.post uploadOptions, (error, uploadResponse, uploadBody) =>
done error if error?
console.log uploadBody if uploadResponse.statusCode > 499
request.get options, (error, @response, @body) =>
console.log @body if @response.statusCode > 499
done error
it 'should return a 200', ->
expect(@response.statusCode).to.equal 200
it 'should get the cache file', ->
expect(@body).to.equal 'foo'
describe 'getting the whole device', ->
beforeEach (done) ->
@client.del '87c32ca0-ae2b-4983-bcd4-9ce5500fe3c1/_', done
return
beforeEach (done) ->
uploadOptions =
headers:
'X-MESHBLU-UUID': '87c32ca0-ae2b-4983-bcd4-9ce5500fe3c1'
uri: '/cache?key=_'
baseUrl: "http://localhost:#{@serverPort}"
json:
some:
path: 'foo'
httpSignature:
keyId: 'PI:KEY:<KEY>END_PI'
key: @privateKey
headers: [ 'date', 'X-MESHBLU-UUID' ]
options =
baseUrl: "http://localhost:#{@serverPort}"
uri: '/cache/87c32ca0-ae2b-4983-bcd4-9ce5500fe3c1'
json: true
request.post uploadOptions, (error, uploadResponse, uploadBody) =>
done error if error?
request.get options, (error, @response, @body) =>
done error
it 'should return a 200', ->
expect(@response.statusCode).to.equal 200
it 'should get the cache file', ->
expect(@body).to.deep.equal some: path: 'foo'
describe 'when device does not exist', ->
beforeEach (done) ->
@client.del '87c32ca0-ae2b-4983-bcd4-9ce5500fe3c1/_', done
return
beforeEach (done) ->
options =
baseUrl: "http://localhost:#{@serverPort}"
uri: '/cache/87c32ca0-ae2b-4983-bcd4-9ce5500fe3c1'
json: true
request.get options, (error, @response, @body) =>
console.log @body if @response.statusCode > 499
done error
it 'should return a 404', ->
expect(@response.statusCode).to.equal 404
|
[
{
"context": "end(\"<option jpg-val='#{jpg_url}' username-val='#{default_username}' password-val='#{default_password}' selected='se",
"end": 2436,
"score": 0.9821484684944153,
"start": 2420,
"tag": "USERNAME",
"value": "default_username"
},
{
"context": " username-val='#{default_user... | data/coffeescript/202ca33b972f16f13410cf05e393146a_add_camera.js.coffee | maxim5/code-inspector | 5 | #= require jquery
#= require jquery_ujs
#= require bootstrap
#= require ladda/spin.min.js
#= require ladda/ladda.min.js
Evercam_MEDIA_URL = 'https://media.evercam.io/v1/'
Evercam_API_URL = 'https://api.evercam.io/v1/'
Dasboard_URL = 'https://dash.evercam.io'
API_ID = ''
API_Key = ''
iframeWindow = undefined
gotSnapshot = false
loader = null
sortByKey = (array, key) ->
array.sort (a, b) ->
x = a[key]
y = b[key]
(if (x < y) then -1 else ((if (x > y) then 1 else 0)))
loadVendors = ->
data = {}
onError = (jqXHR, status, error) ->
false
onSuccess = (result, status, jqXHR) ->
vendors = sortByKey(result.vendors, "name")
for vendor in vendors
$("#camera-vendor").append("<option value='#{vendor.id}'>#{vendor.name}</option>")
settings =
cache: false
data: data
dataType: 'json'
error: onError
success: onSuccess
contentType: "application/json; charset=utf-8"
type: 'GET'
url: "#{Evercam_API_URL}vendors"
jQuery.ajax(settings)
true
loadVendorModels = (vendor_id) ->
$("#camera-model option").remove()
$("#camera-model").prop("disabled", true)
if vendor_id is ""
$("#camera-model").append('<option value="">Unknown / not specified</option>');
$("#camera-snapshot-url").val('')
$("#vemdor-image").attr("src", "/assets/plain.png")
$("#model-image").attr("src", "/assets/plain.png")
return
$("#camera-model").append('<option value="">Loading...</option>');
data = {}
data.vendor_id = vendor_id
data.limit = 400
onError = (jqXHR, status, error) ->
false
onSuccess = (result, status, jqXHR) ->
$("#camera-model option").remove()
if result.models.length == 0
$("#camera-model").append('<option value="">No Model Found</option>');
return
models = sortByKey(result.models, "name")
for model in models
jpg_url = if model.defaults.snapshots and model.defaults.snapshots.jpg.toLowerCase() isnt "unknown" then model.defaults.snapshots.jpg else ''
default_username = if model.defaults.auth != null and model.defaults.auth != undefined then model.defaults.auth.basic.username else ''
default_password = if model.defaults.auth != null and model.defaults.auth != undefined then model.defaults.auth.basic.password else ''
if model.name.toLowerCase().indexOf('default') isnt -1
$("#camera-model").prepend("<option jpg-val='#{jpg_url}' username-val='#{default_username}' password-val='#{default_password}' selected='selected' value='#{model.id}'>#{model.name}</option>")
hasModelImage($("#camera-vendor").val(), model.id)
else
$("#camera-model").append("<option jpg-val='#{jpg_url}' username-val='#{default_username}' password-val='#{default_password}' value='#{model.id}'>#{model.name}</option>")
$("#camera-model").removeAttr("disabled")
if $("#camera-model").find(":selected").attr("jpg-val") isnt 'Unknown'
selected_option = $("#camera-model").find(":selected")
cleanAndSetJpegUrl selected_option.attr("jpg-val")
$("#default-username").text(selected_option.attr("username-val"))
$("#default-password").text(selected_option.attr("password-val"))
$("#camera-snapshot-url").removeClass("invalid").addClass("valid")
settings =
cache: false
data: data
dataType: 'json'
error: onError
success: onSuccess
contentType: "application/json; charset=utf-8"
type: 'GET'
url: "#{Evercam_API_URL}models"
jQuery.ajax(settings)
true
hasModelImage = (vendor_id, model_id) ->
img = new Image()
image_url = "https://evercam-public-assets.s3.amazonaws.com/#{vendor_id}/#{model_id}/thumbnail.jpg"
img.onload = ->
$("#model-image").attr("src", image_url)
img.onerror = ->
$("#model-image").attr("src", "/assets/plain.png")
img.src = image_url
handleVendorModelEvents = ->
$("#camera-vendor").on "change", ->
img = new Image()
image_url = "https://evercam-public-assets.s3.amazonaws.com/#{$(this).val()}/logo.jpg"
img.onload = ->
$("#vemdor-image").attr("src", image_url)
img.onerror = ->
$("#vemdor-image").attr("src", "/assets/plain.png")
img.src = image_url
loadVendorModels($(this).val())
$("#camera-model").on "change", ->
selected_option = $(this).find(":selected")
hasModelImage($("#camera-vendor").val(), $(this).val())
snapshot_url = selected_option.attr("jpg-val")
$("#default-username").text(selected_option.attr("username-val"))
$("#default-password").text(selected_option.attr("password-val"))
if snapshot_url isnt 'Unknown'
cleanAndSetJpegUrl snapshot_url
cleanAndSetJpegUrl = (jpeg_url) ->
if jpeg_url.indexOf('/') == 0
jpeg_url = jpeg_url.substr(1)
$("#camera-snapshot-url").val jpeg_url
useAuthentication = ->
$("#required-authentication").on 'click', ->
if $(this).is(":checked")
$("#authentication").removeClass("hide")
else
$("#authentication").addClass("hide")
handleInputEvents = ->
$("#camera-url").on 'keyup', (e) ->
if validate_hostname($(this).val())
$(this).removeClass("invalid").addClass("valid")
else
$(this).removeClass("valid").addClass("invalid")
validAllInformation()
$("#camera-url").on 'focus', (e) ->
$(".info-box .info-header").text("EXTERNAL IP / URL")
$(".info-box .info-text").text("Put the public URL or IP address of your camera. You will need to have setup port forwarding for your camera.")
$(".external-url").on 'click', ->
$(".info-box .info-header").text("EXTERNAL IP / URL")
$(".info-box .info-text").text("Put the public URL or IP address of your camera.")
$("#camera-port").on 'keyup', (e) ->
if validateInt($(this).val())
$(this).removeClass("invalid").addClass("valid")
else
$(this).removeClass("valid").addClass("invalid")
validAllInformation()
$("#camera-port").on 'focus', (e) ->
$(".info-box .info-header").text("EXTERNAL PORT")
$(".info-box .info-text").text("The port should be a 2-5 digit number. The default external port is 80.")
$(".port").on 'click', ->
$(".info-box .info-header").text("EXTERNAL PORT")
$(".info-box .info-text").text("The port should be a 2-5 digit number. The default external port is 80.")
$("#camera-snapshot-url").on 'keyup', (e) ->
if $(this).val() is ''
$(this).removeClass("valid").addClass("invalid")
else
$(this).removeClass("invalid").addClass("valid")
validAllInformation()
$("#camera-snapshot-url").on 'focus', (e) ->
$(".info-box .info-header").text("SNAPSHOT URL")
$(".info-box .info-text").text("If you know your Camera Vendor & Model we can work this out for you. You can also enter it manually for your camera.")
$(".snapshot-url").on 'click', ->
$(".info-box .info-header").text("SNAPSHOT URL")
$(".info-box .info-text").text("If you know your Camera Vendor & Model we can work this out for you. You can also enter it manually for your camera.")
$("#camera-name").on 'keyup', (e) ->
$(this).removeClass("invalid").addClass("valid")
$("#camera-id").on 'keyup', (e) ->
$(this).removeClass("invalid").addClass("valid")
$("#user-email").on 'keyup', (e) ->
if validateEmail($(this).val())
$(this).removeClass("invalid").addClass("valid")
else
$(this).removeClass("valid").addClass("invalid")
$("#user-password").on 'keyup', (e) ->
$(this).removeClass("invalid").addClass("valid")
$("#username").on 'keyup', (e) ->
$(this).removeClass("invalid").addClass("valid")
$(".default-username").on 'click', ->
$("#camera-username").val($("#default-username").text())
$(".default-password").on 'click', ->
$("#camera-password").val($("#default-password").text())
validate_hostname = (str) ->
ValidIpAddressRegex = /^(([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\.){3}([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])$/
ValidHostnameRegex = /^(([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9\-]*[a-zA-Z0-9])\.)*([A-Za-z0-9]|[A-Za-z0-9][A-Za-z0-9\-]*[A-Za-z0-9])$/
ValidIpAddressRegex.test(str) or ValidHostnameRegex.test(str)
validateInt = (value) ->
reg = /^(0|[0-9][1-9]|[1-9][0-9]*)$/
reg.test value
validateEmail = (email) ->
reg = /^([A-Za-z0-9_\-\.])+\@([A-Za-z0-9_\-\.])+\.([A-Za-z]{2,4})$/
#remove all white space from value before validating
emailtrimed = email.replace(RegExp(' ', 'gi'), '')
reg.test emailtrimed
validAllInformation = ->
if $("#camera-port") is ''
if $("#camera-url").hasClass('valid') && $("#camera-snapshot-url").hasClass('valid')
$(".test-image").removeClass('hide')
$(".help-texts").addClass('hide')
else
$(".test-image").addClass('hide')
$(".help-texts").removeClass('hide')
else
if $("#camera-url").hasClass('valid') && $("#camera-port").hasClass('valid') && $("#camera-snapshot-url").hasClass('valid')
$(".test-image").removeClass('hide')
$(".help-texts").addClass('hide')
else
$(".test-image").addClass('hide')
$(".help-texts").removeClass('hide')
testSnapshot = ->
$("#test-snapshot").on 'click', ->
initLadda(this)
port = $("#camera-port").val() unless $("#camera-port").val() is ''
data = {}
data.external_url = "http://#{$('#camera-url').val()}:#{port}"
data.jpg_url = $('#camera-snapshot-url').val()
data.cam_username = $("#camera-username").val() unless $("#camera-username").val() is ''
data.cam_password = $("#camera-password").val() unless $("#camera-password").val() is ''
data.vendor_id = $("#camera-vendor").val() unless $("#camera-vendor").val() is ''
onError = (jqXHR, status, error) ->
$(".snapshot-msg").html(jqXHR.responseJSON.message)
$(".snapshot-msg").removeClass("msg-success").addClass("msg-error")
$(".snapshot-msg").show()
if loader isnt null
loader.stop()
onSuccess = (result, status, jqXHR) ->
if result.status is 'ok'
$("#testimg").attr('src', result.data)
$(".snapshot-msg").html("We got a snapshot!")
$(".snapshot-msg").removeClass("msg-error").addClass("msg-success")
$(".snapshot-msg").show()
$("#test-snapshot").hide()
$("#continue-step2").show()
gotSnapshot = true
if loader isnt null
loader.stop()
settings =
cache: false
data: data
dataType: 'json'
error: onError
success: onSuccess
contentType: "application/x-www-form-urlencoded"
type: 'POST'
url: "#{Evercam_MEDIA_URL}cameras/test"
jQuery.ajax(settings)
handleContinueBtn = ->
$("#continue-step2").on 'click', ->
switchTab("camera-details", "camera-info")
$("#continue-step3").on 'click', ->
if $("#camera-name").val() is ''
$("#camera-name").removeClass("valid").addClass("invalid")
return
if $("#camera-id").val() is ''
$("#camera-id").removeClass("valid").addClass("invalid")
return
$("#camera-name").removeClass("invalid").addClass("valid")
switchTab("camera-info", "user-create")
autoCreateCameraId = ->
$("#camera-name").on 'keyup', ->
$("#camera-id").val $(this).val().replace(RegExp(" ", "g"), "-").toLowerCase()
hasCameraInfo = ->
if $("#camera-url").val() is '' && $("#camera-snapshot-url").val() is ''
$("#camera-url").removeClass("valid").addClass("invalid")
$("#camera-snapshot-url").removeClass("valid").addClass("invalid")
switchTab("user-create", "camera-details")
return false
if $("#camera-name").val() is '' && $("#camera-id").val() is ''
$("#camera-name").removeClass("valid").addClass("invalid")
$("#camera-id").removeClass("valid").addClass("invalid")
switchTab("user-create", "camera-info")
return false
true
autoLogInDashboard = () ->
data = {
'session[login]': $("#username").val()
'session[password]': $("#user-password").val()
'session[widget]': 'login-from-widget'
'authenticity_token': $("#authenticity_token").val()
}
onError = (jqXHR, status, error) ->
false
onSuccess = (result, status, jqXHR) ->
parent.location.href = "#{Dasboard_URL}"
true
settings =
cache: false
data: data
dataType: 'json'
error: onError
success: onSuccess
contentType: "application/x-www-form-urlencoded"
type: 'POST'
url: "#{Dasboard_URL}/sessions"
jQuery.ajax(settings)
initLadda = (control_id) ->
loader = Ladda.create(control_id)
loader.start()
progress = 0
interval = setInterval(->
progress = Math.min(progress + 0.025, 1)
loader.setProgress(progress)
if (progress == 1)
loader.stop()
clearInterval(interval)
, 200)
createUserAccount = ->
$("#create-account").on 'click', ->
if $("#username").val() is ''
$("#username").removeClass("valid").addClass("invalid")
return
if $("#user-email").val() is '' || !validateEmail($("#user-email").val())
$("#user-email").removeClass("valid").addClass("invalid")
return
if $("#user-password").val() is ''
$("#user-password").removeClass("valid").addClass("invalid")
return
if !hasCameraInfo()
return
initLadda(this)
if API_ID isnt '' && API_Key isnt ''
createCamera(API_ID, API_Key)
return
data = {}
data.firstname = $("#username").val()
data.lastname = $("#username").val()
data.username = $("#username").val()
data.email = $("#user-email").val()
data.password = $("#user-password").val()
data.token = $("#app_token").val()
onError = (jqXHR, status, error) ->
$("#message-user-create").text(jqXHR.responseJSON.message)
$("#message-user-create").removeClass("hide")
if loader isnt null
loader.stop()
onSuccess = (result, status, jqXHR) ->
getAPICredentials()
settings =
cache: false
data: data
dataType: 'json'
error: onError
success: onSuccess
contentType: "application/x-www-form-urlencoded"
type: 'POST'
url: "#{Evercam_API_URL}users"
jQuery.ajax(settings)
getAPICredentials = ->
data = {}
data.password = $("#user-password").val()
onError = (jqXHR, status, error) ->
if loader isnt null
loader.stop()
false
onSuccess = (result, status, jqXHR) ->
API_ID = result.api_id
API_Key = result.api_key
createCamera(result.api_id, result.api_key)
settings =
cache: false
data: data
dataType: 'json'
error: onError
success: onSuccess
contentType: "application/json; charset=utf-8"
type: 'GET'
url: "#{Evercam_API_URL}users/#{$("#user-email").val()}/credentials"
jQuery.ajax(settings)
createCamera = (api_id, api_key) ->
data = {}
data.name = $("#camera-name").val()
data.vendor = $("#camera-vendor").val() unless $("#camera-vendor").val() is ''
data.model = $('#camera-model').val() unless $("#camera-model").val() is ''
data.is_public = false
data.is_online = true
data.cam_username = $("#camera-username").val() unless $("#camera-username").val() is ''
data.cam_password = $("#camera-password").val() unless $("#camera-password").val() is ''
data.external_host = $("#camera-url").val()
data.external_http_port = $("#camera-port").val() unless $("#camera-port").val() is ''
data.jpg_url = $("#camera-snapshot-url").val()
onError = (jqXHR, status, error) ->
$("#message-camera-info").text(jqXHR.responseJSON.message)
$("#message-camera-info").removeClass("hide")
$("#message-user-create").addClass("hide")
switchTab("user-create", "camera-info")
if loader isnt null
loader.stop()
onSuccess = (result, status, jqXHR) ->
parent.location.href = "#{Dasboard_URL}/v1/cameras?api_id=#{api_id}&api_key=#{api_key}"
onDuplicateError = (xhr) ->
switchTab("user-create", "camera-info")
$("#message-camera-info").text(xhr.responseText.message)
$("#message-camera-info").removeClass("hide")
$("#message-user-create").addClass("hide")
if loader isnt null
loader.stop()
settings =
cache: false
data: data
dataType: 'json'
error: onError
success: onSuccess
statusCode: {409: onDuplicateError, 400: onDuplicateError },
contentType: "application/x-www-form-urlencoded"
type: 'POST'
url: "#{Evercam_API_URL}cameras?api_id=#{api_id}&api_key=#{api_key}"
jQuery.ajax(settings)
clearForm = ->
$("#camera-id").val('')
$("#camera-id").removeClass('valid').removeClass("invalid")
$("#camera-name").val('')
$("#camera-name").removeClass('valid').removeClass("invalid")
$("#user-email").val('')
$("#user-email").removeClass('valid').removeClass("invalid")
$("#username").val('')
$("#username").removeClass('valid').removeClass("invalid")
$("#user-password").val('')
$("#user-password").removeClass('valid').removeClass("invalid")
$("#camera-username").val('')
$("#camera-password").val('')
$("#camera-port").val('')
$("#camera-port").removeClass('valid').removeClass("invalid")
$("#camera-url").val('')
$("#camera-url").removeClass('valid').removeClass("invalid")
$("#camera-snapshot-url").val('')
$("#camera-snapshot-url").removeClass('valid').removeClass("invalid")
$("#camera-vendor").val('')
$("#camera-model option").remove()
$("#camera-model").append('<option value="">Unknown / Not specified</option>');
switchTab("user-create", "camera-details")
$("#required-authentication").removeAttr("checked")
$("#authentication").addClass("hide")
$("#message-camera-info").addClass("hide")
$("#message-user-create").addClass("hide")
$("#testimg").attr('src', '')
$(".snapshot-msg").hide()
$("#test-snapshot").show()
$("#continue-step2").hide()
API_ID = ''
API_Key = ''
onClickTabs = ->
$(".nav-steps li").on 'click', ->
if !gotSnapshot
return
previousTab = $(".nav-steps li.active").attr("href")
$(".nav-steps li").removeClass('active')
currentTab = $(this).attr("href")
$(this).addClass('active')
$("#{previousTab}").fadeOut(300, ->
$("#{currentTab}").fadeIn(300)
)
switchTab = (hideTab, showTab) ->
$(".nav-steps li").removeClass('active')
$("##{hideTab}").fadeOut(300, ->
$("##{showTab}").fadeIn(300)
)
$("#li-#{showTab}").addClass('active')
initAddCamera = ->
url = window.location.origin
embedCode = '<div evercam="add-camera-public"></div>' + '<script type="text/javascript" src="' + url + '/widgets/add.camera.js"></script>'
$('#code').html embedCode
$('.placeholder').empty()
iframe = jQuery('<iframe />').css(
'overflow': 'hidden'
'width': '100%'
'height': '420px').attr(
'src': '/widgets/cameras/public/add'
'frameborder': '0'
scrolling: 'no').appendTo('.placeholder')
return
resizeIframe = (iframeControl) ->
iframeWindow = iframeControl
iframeControl.style.height = iframeControl.contentWindow.document.body.scrollHeight + 'px'
return
handleWindowResize = ->
$(window).resize ->
if !iframeWindow
return
resizeIframe iframeWindow
return
window.initializeAddCameraPublic = ->
useAuthentication()
loadVendors()
handleVendorModelEvents()
handleInputEvents()
testSnapshot()
handleContinueBtn()
createUserAccount()
onClickTabs()
window.initializeAddCamera = ->
initAddCamera();
$("#code").on "click", ->
this.select();
handleWindowResize()
$(window, document, undefined).ready ->
wskCheckbox = do ->
wskCheckboxes = []
SPACE_KEY = 32
addEventHandler = (elem, eventType, handler) ->
if elem.addEventListener
elem.addEventListener eventType, handler, false
else if elem.attachEvent
elem.attachEvent 'on' + eventType, handler
return
clickHandler = (e) ->
e.stopPropagation()
if @className.indexOf('checked') < 0
@className += ' checked'
else
@className = 'chk-span'
return
keyHandler = (e) ->
e.stopPropagation()
if e.keyCode == SPACE_KEY
clickHandler.call this, e
# Also update the checkbox state.
cbox = document.getElementById(@parentNode.getAttribute('for'))
cbox.checked = !cbox.checked
return
clickHandlerLabel = (e) ->
id = @getAttribute('for')
i = wskCheckboxes.length
while i--
if wskCheckboxes[i].id == id
if wskCheckboxes[i].checkbox.className.indexOf('checked') < 0
wskCheckboxes[i].checkbox.className += ' checked'
else
wskCheckboxes[i].checkbox.className = 'chk-span'
break
return
findCheckBoxes = ->
labels = document.getElementsByTagName('label')
i = labels.length
while i--
posCheckbox = document.getElementById(labels[i].getAttribute('for'))
if posCheckbox != null and posCheckbox.type == 'checkbox'
text = labels[i].innerText
span = document.createElement('span')
span.className = 'chk-span'
span.tabIndex = i
labels[i].insertBefore span, labels[i].firstChild
addEventHandler span, 'click', clickHandler
addEventHandler span, 'keyup', keyHandler
addEventHandler labels[i], 'click', clickHandlerLabel
wskCheckboxes.push
'checkbox': span
'id': labels[i].getAttribute('for')
return
{ init: findCheckBoxes }
wskCheckbox.init()
return
| 20068 | #= require jquery
#= require jquery_ujs
#= require bootstrap
#= require ladda/spin.min.js
#= require ladda/ladda.min.js
Evercam_MEDIA_URL = 'https://media.evercam.io/v1/'
Evercam_API_URL = 'https://api.evercam.io/v1/'
Dasboard_URL = 'https://dash.evercam.io'
API_ID = ''
API_Key = ''
iframeWindow = undefined
gotSnapshot = false
loader = null
sortByKey = (array, key) ->
array.sort (a, b) ->
x = a[key]
y = b[key]
(if (x < y) then -1 else ((if (x > y) then 1 else 0)))
loadVendors = ->
data = {}
onError = (jqXHR, status, error) ->
false
onSuccess = (result, status, jqXHR) ->
vendors = sortByKey(result.vendors, "name")
for vendor in vendors
$("#camera-vendor").append("<option value='#{vendor.id}'>#{vendor.name}</option>")
settings =
cache: false
data: data
dataType: 'json'
error: onError
success: onSuccess
contentType: "application/json; charset=utf-8"
type: 'GET'
url: "#{Evercam_API_URL}vendors"
jQuery.ajax(settings)
true
loadVendorModels = (vendor_id) ->
$("#camera-model option").remove()
$("#camera-model").prop("disabled", true)
if vendor_id is ""
$("#camera-model").append('<option value="">Unknown / not specified</option>');
$("#camera-snapshot-url").val('')
$("#vemdor-image").attr("src", "/assets/plain.png")
$("#model-image").attr("src", "/assets/plain.png")
return
$("#camera-model").append('<option value="">Loading...</option>');
data = {}
data.vendor_id = vendor_id
data.limit = 400
onError = (jqXHR, status, error) ->
false
onSuccess = (result, status, jqXHR) ->
$("#camera-model option").remove()
if result.models.length == 0
$("#camera-model").append('<option value="">No Model Found</option>');
return
models = sortByKey(result.models, "name")
for model in models
jpg_url = if model.defaults.snapshots and model.defaults.snapshots.jpg.toLowerCase() isnt "unknown" then model.defaults.snapshots.jpg else ''
default_username = if model.defaults.auth != null and model.defaults.auth != undefined then model.defaults.auth.basic.username else ''
default_password = if model.defaults.auth != null and model.defaults.auth != undefined then model.defaults.auth.basic.password else ''
if model.name.toLowerCase().indexOf('default') isnt -1
$("#camera-model").prepend("<option jpg-val='#{jpg_url}' username-val='#{default_username}' password-val='<PASSWORD>}' selected='selected' value='#{model.id}'>#{model.name}</option>")
hasModelImage($("#camera-vendor").val(), model.id)
else
$("#camera-model").append("<option jpg-val='#{jpg_url}' username-val='#{default_username}' password-val='#{<PASSWORD>}' value='#{model.id}'>#{model.name}</option>")
$("#camera-model").removeAttr("disabled")
if $("#camera-model").find(":selected").attr("jpg-val") isnt 'Unknown'
selected_option = $("#camera-model").find(":selected")
cleanAndSetJpegUrl selected_option.attr("jpg-val")
$("#default-username").text(selected_option.attr("username-val"))
$("#default-password").text(selected_option.attr("password-val"))
$("#camera-snapshot-url").removeClass("invalid").addClass("valid")
settings =
cache: false
data: data
dataType: 'json'
error: onError
success: onSuccess
contentType: "application/json; charset=utf-8"
type: 'GET'
url: "#{Evercam_API_URL}models"
jQuery.ajax(settings)
true
hasModelImage = (vendor_id, model_id) ->
img = new Image()
image_url = "https://evercam-public-assets.s3.amazonaws.com/#{vendor_id}/#{model_id}/thumbnail.jpg"
img.onload = ->
$("#model-image").attr("src", image_url)
img.onerror = ->
$("#model-image").attr("src", "/assets/plain.png")
img.src = image_url
handleVendorModelEvents = ->
$("#camera-vendor").on "change", ->
img = new Image()
image_url = "https://evercam-public-assets.s3.amazonaws.com/#{$(this).val()}/logo.jpg"
img.onload = ->
$("#vemdor-image").attr("src", image_url)
img.onerror = ->
$("#vemdor-image").attr("src", "/assets/plain.png")
img.src = image_url
loadVendorModels($(this).val())
$("#camera-model").on "change", ->
selected_option = $(this).find(":selected")
hasModelImage($("#camera-vendor").val(), $(this).val())
snapshot_url = selected_option.attr("jpg-val")
$("#default-username").text(selected_option.attr("username-val"))
$("#default-password").text(selected_option.attr("password-val"))
if snapshot_url isnt 'Unknown'
cleanAndSetJpegUrl snapshot_url
cleanAndSetJpegUrl = (jpeg_url) ->
if jpeg_url.indexOf('/') == 0
jpeg_url = jpeg_url.substr(1)
$("#camera-snapshot-url").val jpeg_url
useAuthentication = ->
$("#required-authentication").on 'click', ->
if $(this).is(":checked")
$("#authentication").removeClass("hide")
else
$("#authentication").addClass("hide")
handleInputEvents = ->
$("#camera-url").on 'keyup', (e) ->
if validate_hostname($(this).val())
$(this).removeClass("invalid").addClass("valid")
else
$(this).removeClass("valid").addClass("invalid")
validAllInformation()
$("#camera-url").on 'focus', (e) ->
$(".info-box .info-header").text("EXTERNAL IP / URL")
$(".info-box .info-text").text("Put the public URL or IP address of your camera. You will need to have setup port forwarding for your camera.")
$(".external-url").on 'click', ->
$(".info-box .info-header").text("EXTERNAL IP / URL")
$(".info-box .info-text").text("Put the public URL or IP address of your camera.")
$("#camera-port").on 'keyup', (e) ->
if validateInt($(this).val())
$(this).removeClass("invalid").addClass("valid")
else
$(this).removeClass("valid").addClass("invalid")
validAllInformation()
$("#camera-port").on 'focus', (e) ->
$(".info-box .info-header").text("EXTERNAL PORT")
$(".info-box .info-text").text("The port should be a 2-5 digit number. The default external port is 80.")
$(".port").on 'click', ->
$(".info-box .info-header").text("EXTERNAL PORT")
$(".info-box .info-text").text("The port should be a 2-5 digit number. The default external port is 80.")
$("#camera-snapshot-url").on 'keyup', (e) ->
if $(this).val() is ''
$(this).removeClass("valid").addClass("invalid")
else
$(this).removeClass("invalid").addClass("valid")
validAllInformation()
$("#camera-snapshot-url").on 'focus', (e) ->
$(".info-box .info-header").text("SNAPSHOT URL")
$(".info-box .info-text").text("If you know your Camera Vendor & Model we can work this out for you. You can also enter it manually for your camera.")
$(".snapshot-url").on 'click', ->
$(".info-box .info-header").text("SNAPSHOT URL")
$(".info-box .info-text").text("If you know your Camera Vendor & Model we can work this out for you. You can also enter it manually for your camera.")
$("#camera-name").on 'keyup', (e) ->
$(this).removeClass("invalid").addClass("valid")
$("#camera-id").on 'keyup', (e) ->
$(this).removeClass("invalid").addClass("valid")
$("#user-email").on 'keyup', (e) ->
if validateEmail($(this).val())
$(this).removeClass("invalid").addClass("valid")
else
$(this).removeClass("valid").addClass("invalid")
$("#user-password").on 'keyup', (e) ->
$(this).removeClass("invalid").addClass("valid")
$("#username").on 'keyup', (e) ->
$(this).removeClass("invalid").addClass("valid")
$(".default-username").on 'click', ->
$("#camera-username").val($("#default-username").text())
$(".default-password").on 'click', ->
$("#camera-password").val($("#default-password").text())
validate_hostname = (str) ->
ValidIpAddressRegex = /^(([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\.){3}([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])$/
ValidHostnameRegex = /^(([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9\-]*[a-zA-Z0-9])\.)*([A-Za-z0-9]|[A-Za-z0-9][A-Za-z0-9\-]*[A-Za-z0-9])$/
ValidIpAddressRegex.test(str) or ValidHostnameRegex.test(str)
validateInt = (value) ->
reg = /^(0|[0-9][1-9]|[1-9][0-9]*)$/
reg.test value
validateEmail = (email) ->
reg = /^([A-Za-z0-9_\-\.])+\@([A-Za-z0-9_\-\.])+\.([A-Za-z]{2,4})$/
#remove all white space from value before validating
emailtrimed = email.replace(RegExp(' ', 'gi'), '')
reg.test emailtrimed
validAllInformation = ->
if $("#camera-port") is ''
if $("#camera-url").hasClass('valid') && $("#camera-snapshot-url").hasClass('valid')
$(".test-image").removeClass('hide')
$(".help-texts").addClass('hide')
else
$(".test-image").addClass('hide')
$(".help-texts").removeClass('hide')
else
if $("#camera-url").hasClass('valid') && $("#camera-port").hasClass('valid') && $("#camera-snapshot-url").hasClass('valid')
$(".test-image").removeClass('hide')
$(".help-texts").addClass('hide')
else
$(".test-image").addClass('hide')
$(".help-texts").removeClass('hide')
testSnapshot = ->
$("#test-snapshot").on 'click', ->
initLadda(this)
port = $("#camera-port").val() unless $("#camera-port").val() is ''
data = {}
data.external_url = "http://#{$('#camera-url').val()}:#{port}"
data.jpg_url = $('#camera-snapshot-url').val()
data.cam_username = $("#camera-username").val() unless $("#camera-username").val() is ''
data.cam_password = $("#<PASSWORD>").val() unless $("#camera-password").val() is ''
data.vendor_id = $("#camera-vendor").val() unless $("#camera-vendor").val() is ''
onError = (jqXHR, status, error) ->
$(".snapshot-msg").html(jqXHR.responseJSON.message)
$(".snapshot-msg").removeClass("msg-success").addClass("msg-error")
$(".snapshot-msg").show()
if loader isnt null
loader.stop()
onSuccess = (result, status, jqXHR) ->
if result.status is 'ok'
$("#testimg").attr('src', result.data)
$(".snapshot-msg").html("We got a snapshot!")
$(".snapshot-msg").removeClass("msg-error").addClass("msg-success")
$(".snapshot-msg").show()
$("#test-snapshot").hide()
$("#continue-step2").show()
gotSnapshot = true
if loader isnt null
loader.stop()
settings =
cache: false
data: data
dataType: 'json'
error: onError
success: onSuccess
contentType: "application/x-www-form-urlencoded"
type: 'POST'
url: "#{Evercam_MEDIA_URL}cameras/test"
jQuery.ajax(settings)
handleContinueBtn = ->
$("#continue-step2").on 'click', ->
switchTab("camera-details", "camera-info")
$("#continue-step3").on 'click', ->
if $("#camera-name").val() is ''
$("#camera-name").removeClass("valid").addClass("invalid")
return
if $("#camera-id").val() is ''
$("#camera-id").removeClass("valid").addClass("invalid")
return
$("#camera-name").removeClass("invalid").addClass("valid")
switchTab("camera-info", "user-create")
autoCreateCameraId = ->
$("#camera-name").on 'keyup', ->
$("#camera-id").val $(this).val().replace(RegExp(" ", "g"), "-").toLowerCase()
hasCameraInfo = ->
if $("#camera-url").val() is '' && $("#camera-snapshot-url").val() is ''
$("#camera-url").removeClass("valid").addClass("invalid")
$("#camera-snapshot-url").removeClass("valid").addClass("invalid")
switchTab("user-create", "camera-details")
return false
if $("#camera-name").val() is '' && $("#camera-id").val() is ''
$("#camera-name").removeClass("valid").addClass("invalid")
$("#camera-id").removeClass("valid").addClass("invalid")
switchTab("user-create", "camera-info")
return false
true
autoLogInDashboard = () ->
data = {
'session[login]': $("#username").val()
'session[password]': $("#<PASSWORD>").val()
'session[widget]': 'login-from-widget'
'authenticity_token': $("#authenticity_token").val()
}
onError = (jqXHR, status, error) ->
false
onSuccess = (result, status, jqXHR) ->
parent.location.href = "#{Dasboard_URL}"
true
settings =
cache: false
data: data
dataType: 'json'
error: onError
success: onSuccess
contentType: "application/x-www-form-urlencoded"
type: 'POST'
url: "#{Dasboard_URL}/sessions"
jQuery.ajax(settings)
initLadda = (control_id) ->
loader = Ladda.create(control_id)
loader.start()
progress = 0
interval = setInterval(->
progress = Math.min(progress + 0.025, 1)
loader.setProgress(progress)
if (progress == 1)
loader.stop()
clearInterval(interval)
, 200)
createUserAccount = ->
$("#create-account").on 'click', ->
if $("#username").val() is ''
$("#username").removeClass("valid").addClass("invalid")
return
if $("#user-email").val() is '' || !validateEmail($("#user-email").val())
$("#user-email").removeClass("valid").addClass("invalid")
return
if $("#user-password").val() is ''
$("#user-password").removeClass("valid").addClass("invalid")
return
if !hasCameraInfo()
return
initLadda(this)
if API_ID isnt '' && API_Key isnt ''
createCamera(API_ID, API_Key)
return
data = {}
data.firstname = $("#username").val()
data.lastname = $("#username").val()
data.username = $("#username").val()
data.email = $("#user-email").val()
data.password = <PASSWORD>").val()
data.token = $("#app_token").val()
onError = (jqXHR, status, error) ->
$("#message-user-create").text(jqXHR.responseJSON.message)
$("#message-user-create").removeClass("hide")
if loader isnt null
loader.stop()
onSuccess = (result, status, jqXHR) ->
getAPICredentials()
settings =
cache: false
data: data
dataType: 'json'
error: onError
success: onSuccess
contentType: "application/x-www-form-urlencoded"
type: 'POST'
url: "#{Evercam_API_URL}users"
jQuery.ajax(settings)
getAPICredentials = ->
data = {}
data.password = $("#<PASSWORD>").val()
onError = (jqXHR, status, error) ->
if loader isnt null
loader.stop()
false
onSuccess = (result, status, jqXHR) ->
API_ID = result.api_id
API_Key = result.api_key
createCamera(result.api_id, result.api_key)
settings =
cache: false
data: data
dataType: 'json'
error: onError
success: onSuccess
contentType: "application/json; charset=utf-8"
type: 'GET'
url: "#{Evercam_API_URL}users/#{$("#user-email").val()}/credentials"
jQuery.ajax(settings)
createCamera = (api_id, api_key) ->
data = {}
data.name = $("#camera-name").val()
data.vendor = $("#camera-vendor").val() unless $("#camera-vendor").val() is ''
data.model = $('#camera-model').val() unless $("#camera-model").val() is ''
data.is_public = false
data.is_online = true
data.cam_username = $("#camera-username").val() unless $("#camera-username").val() is ''
data.cam_password = <PASSWORD>").<PASSWORD>() unless $("#camera-password").val() is ''
data.external_host = $("#camera-url").val()
data.external_http_port = $("#camera-port").val() unless $("#camera-port").val() is ''
data.jpg_url = $("#camera-snapshot-url").val()
onError = (jqXHR, status, error) ->
$("#message-camera-info").text(jqXHR.responseJSON.message)
$("#message-camera-info").removeClass("hide")
$("#message-user-create").addClass("hide")
switchTab("user-create", "camera-info")
if loader isnt null
loader.stop()
onSuccess = (result, status, jqXHR) ->
parent.location.href = "#{Dasboard_URL}/v1/cameras?api_id=#{api_id}&api_key=#{api_key}"
onDuplicateError = (xhr) ->
switchTab("user-create", "camera-info")
$("#message-camera-info").text(xhr.responseText.message)
$("#message-camera-info").removeClass("hide")
$("#message-user-create").addClass("hide")
if loader isnt null
loader.stop()
settings =
cache: false
data: data
dataType: 'json'
error: onError
success: onSuccess
statusCode: {409: onDuplicateError, 400: onDuplicateError },
contentType: "application/x-www-form-urlencoded"
type: 'POST'
url: "#{Evercam_API_URL}cameras?api_id=#{api_id}&api_key=#{api_key}"
jQuery.ajax(settings)
clearForm = ->
$("#camera-id").val('')
$("#camera-id").removeClass('valid').removeClass("invalid")
$("#camera-name").val('')
$("#camera-name").removeClass('valid').removeClass("invalid")
$("#user-email").val('')
$("#user-email").removeClass('valid').removeClass("invalid")
$("#username").val('')
$("#username").removeClass('valid').removeClass("invalid")
$("#user-password").val('')
$("#user-password").removeClass('valid').removeClass("invalid")
$("#camera-username").val('')
$("#camera-password").val('')
$("#camera-port").val('')
$("#camera-port").removeClass('valid').removeClass("invalid")
$("#camera-url").val('')
$("#camera-url").removeClass('valid').removeClass("invalid")
$("#camera-snapshot-url").val('')
$("#camera-snapshot-url").removeClass('valid').removeClass("invalid")
$("#camera-vendor").val('')
$("#camera-model option").remove()
$("#camera-model").append('<option value="">Unknown / Not specified</option>');
switchTab("user-create", "camera-details")
$("#required-authentication").removeAttr("checked")
$("#authentication").addClass("hide")
$("#message-camera-info").addClass("hide")
$("#message-user-create").addClass("hide")
$("#testimg").attr('src', '')
$(".snapshot-msg").hide()
$("#test-snapshot").show()
$("#continue-step2").hide()
API_ID = ''
API_Key = ''
onClickTabs = ->
$(".nav-steps li").on 'click', ->
if !gotSnapshot
return
previousTab = $(".nav-steps li.active").attr("href")
$(".nav-steps li").removeClass('active')
currentTab = $(this).attr("href")
$(this).addClass('active')
$("#{previousTab}").fadeOut(300, ->
$("#{currentTab}").fadeIn(300)
)
switchTab = (hideTab, showTab) ->
$(".nav-steps li").removeClass('active')
$("##{hideTab}").fadeOut(300, ->
$("##{showTab}").fadeIn(300)
)
$("#li-#{showTab}").addClass('active')
initAddCamera = ->
url = window.location.origin
embedCode = '<div evercam="add-camera-public"></div>' + '<script type="text/javascript" src="' + url + '/widgets/add.camera.js"></script>'
$('#code').html embedCode
$('.placeholder').empty()
iframe = jQuery('<iframe />').css(
'overflow': 'hidden'
'width': '100%'
'height': '420px').attr(
'src': '/widgets/cameras/public/add'
'frameborder': '0'
scrolling: 'no').appendTo('.placeholder')
return
resizeIframe = (iframeControl) ->
iframeWindow = iframeControl
iframeControl.style.height = iframeControl.contentWindow.document.body.scrollHeight + 'px'
return
handleWindowResize = ->
$(window).resize ->
if !iframeWindow
return
resizeIframe iframeWindow
return
window.initializeAddCameraPublic = ->
useAuthentication()
loadVendors()
handleVendorModelEvents()
handleInputEvents()
testSnapshot()
handleContinueBtn()
createUserAccount()
onClickTabs()
window.initializeAddCamera = ->
initAddCamera();
$("#code").on "click", ->
this.select();
handleWindowResize()
$(window, document, undefined).ready ->
wskCheckbox = do ->
wskCheckboxes = []
SPACE_KEY = 32
addEventHandler = (elem, eventType, handler) ->
if elem.addEventListener
elem.addEventListener eventType, handler, false
else if elem.attachEvent
elem.attachEvent 'on' + eventType, handler
return
clickHandler = (e) ->
e.stopPropagation()
if @className.indexOf('checked') < 0
@className += ' checked'
else
@className = 'chk-span'
return
keyHandler = (e) ->
e.stopPropagation()
if e.keyCode == SPACE_KEY
clickHandler.call this, e
# Also update the checkbox state.
cbox = document.getElementById(@parentNode.getAttribute('for'))
cbox.checked = !cbox.checked
return
clickHandlerLabel = (e) ->
id = @getAttribute('for')
i = wskCheckboxes.length
while i--
if wskCheckboxes[i].id == id
if wskCheckboxes[i].checkbox.className.indexOf('checked') < 0
wskCheckboxes[i].checkbox.className += ' checked'
else
wskCheckboxes[i].checkbox.className = 'chk-span'
break
return
findCheckBoxes = ->
labels = document.getElementsByTagName('label')
i = labels.length
while i--
posCheckbox = document.getElementById(labels[i].getAttribute('for'))
if posCheckbox != null and posCheckbox.type == 'checkbox'
text = labels[i].innerText
span = document.createElement('span')
span.className = 'chk-span'
span.tabIndex = i
labels[i].insertBefore span, labels[i].firstChild
addEventHandler span, 'click', clickHandler
addEventHandler span, 'keyup', keyHandler
addEventHandler labels[i], 'click', clickHandlerLabel
wskCheckboxes.push
'checkbox': span
'id': labels[i].getAttribute('for')
return
{ init: findCheckBoxes }
wskCheckbox.init()
return
| true | #= require jquery
#= require jquery_ujs
#= require bootstrap
#= require ladda/spin.min.js
#= require ladda/ladda.min.js
Evercam_MEDIA_URL = 'https://media.evercam.io/v1/'
Evercam_API_URL = 'https://api.evercam.io/v1/'
Dasboard_URL = 'https://dash.evercam.io'
API_ID = ''
API_Key = ''
iframeWindow = undefined
gotSnapshot = false
loader = null
sortByKey = (array, key) ->
array.sort (a, b) ->
x = a[key]
y = b[key]
(if (x < y) then -1 else ((if (x > y) then 1 else 0)))
loadVendors = ->
data = {}
onError = (jqXHR, status, error) ->
false
onSuccess = (result, status, jqXHR) ->
vendors = sortByKey(result.vendors, "name")
for vendor in vendors
$("#camera-vendor").append("<option value='#{vendor.id}'>#{vendor.name}</option>")
settings =
cache: false
data: data
dataType: 'json'
error: onError
success: onSuccess
contentType: "application/json; charset=utf-8"
type: 'GET'
url: "#{Evercam_API_URL}vendors"
jQuery.ajax(settings)
true
loadVendorModels = (vendor_id) ->
$("#camera-model option").remove()
$("#camera-model").prop("disabled", true)
if vendor_id is ""
$("#camera-model").append('<option value="">Unknown / not specified</option>');
$("#camera-snapshot-url").val('')
$("#vemdor-image").attr("src", "/assets/plain.png")
$("#model-image").attr("src", "/assets/plain.png")
return
$("#camera-model").append('<option value="">Loading...</option>');
data = {}
data.vendor_id = vendor_id
data.limit = 400
onError = (jqXHR, status, error) ->
false
onSuccess = (result, status, jqXHR) ->
$("#camera-model option").remove()
if result.models.length == 0
$("#camera-model").append('<option value="">No Model Found</option>');
return
models = sortByKey(result.models, "name")
for model in models
jpg_url = if model.defaults.snapshots and model.defaults.snapshots.jpg.toLowerCase() isnt "unknown" then model.defaults.snapshots.jpg else ''
default_username = if model.defaults.auth != null and model.defaults.auth != undefined then model.defaults.auth.basic.username else ''
default_password = if model.defaults.auth != null and model.defaults.auth != undefined then model.defaults.auth.basic.password else ''
if model.name.toLowerCase().indexOf('default') isnt -1
$("#camera-model").prepend("<option jpg-val='#{jpg_url}' username-val='#{default_username}' password-val='PI:PASSWORD:<PASSWORD>END_PI}' selected='selected' value='#{model.id}'>#{model.name}</option>")
hasModelImage($("#camera-vendor").val(), model.id)
else
$("#camera-model").append("<option jpg-val='#{jpg_url}' username-val='#{default_username}' password-val='#{PI:PASSWORD:<PASSWORD>END_PI}' value='#{model.id}'>#{model.name}</option>")
$("#camera-model").removeAttr("disabled")
if $("#camera-model").find(":selected").attr("jpg-val") isnt 'Unknown'
selected_option = $("#camera-model").find(":selected")
cleanAndSetJpegUrl selected_option.attr("jpg-val")
$("#default-username").text(selected_option.attr("username-val"))
$("#default-password").text(selected_option.attr("password-val"))
$("#camera-snapshot-url").removeClass("invalid").addClass("valid")
settings =
cache: false
data: data
dataType: 'json'
error: onError
success: onSuccess
contentType: "application/json; charset=utf-8"
type: 'GET'
url: "#{Evercam_API_URL}models"
jQuery.ajax(settings)
true
hasModelImage = (vendor_id, model_id) ->
img = new Image()
image_url = "https://evercam-public-assets.s3.amazonaws.com/#{vendor_id}/#{model_id}/thumbnail.jpg"
img.onload = ->
$("#model-image").attr("src", image_url)
img.onerror = ->
$("#model-image").attr("src", "/assets/plain.png")
img.src = image_url
handleVendorModelEvents = ->
$("#camera-vendor").on "change", ->
img = new Image()
image_url = "https://evercam-public-assets.s3.amazonaws.com/#{$(this).val()}/logo.jpg"
img.onload = ->
$("#vemdor-image").attr("src", image_url)
img.onerror = ->
$("#vemdor-image").attr("src", "/assets/plain.png")
img.src = image_url
loadVendorModels($(this).val())
$("#camera-model").on "change", ->
selected_option = $(this).find(":selected")
hasModelImage($("#camera-vendor").val(), $(this).val())
snapshot_url = selected_option.attr("jpg-val")
$("#default-username").text(selected_option.attr("username-val"))
$("#default-password").text(selected_option.attr("password-val"))
if snapshot_url isnt 'Unknown'
cleanAndSetJpegUrl snapshot_url
cleanAndSetJpegUrl = (jpeg_url) ->
if jpeg_url.indexOf('/') == 0
jpeg_url = jpeg_url.substr(1)
$("#camera-snapshot-url").val jpeg_url
useAuthentication = ->
$("#required-authentication").on 'click', ->
if $(this).is(":checked")
$("#authentication").removeClass("hide")
else
$("#authentication").addClass("hide")
handleInputEvents = ->
$("#camera-url").on 'keyup', (e) ->
if validate_hostname($(this).val())
$(this).removeClass("invalid").addClass("valid")
else
$(this).removeClass("valid").addClass("invalid")
validAllInformation()
$("#camera-url").on 'focus', (e) ->
$(".info-box .info-header").text("EXTERNAL IP / URL")
$(".info-box .info-text").text("Put the public URL or IP address of your camera. You will need to have setup port forwarding for your camera.")
$(".external-url").on 'click', ->
$(".info-box .info-header").text("EXTERNAL IP / URL")
$(".info-box .info-text").text("Put the public URL or IP address of your camera.")
$("#camera-port").on 'keyup', (e) ->
if validateInt($(this).val())
$(this).removeClass("invalid").addClass("valid")
else
$(this).removeClass("valid").addClass("invalid")
validAllInformation()
$("#camera-port").on 'focus', (e) ->
$(".info-box .info-header").text("EXTERNAL PORT")
$(".info-box .info-text").text("The port should be a 2-5 digit number. The default external port is 80.")
$(".port").on 'click', ->
$(".info-box .info-header").text("EXTERNAL PORT")
$(".info-box .info-text").text("The port should be a 2-5 digit number. The default external port is 80.")
$("#camera-snapshot-url").on 'keyup', (e) ->
if $(this).val() is ''
$(this).removeClass("valid").addClass("invalid")
else
$(this).removeClass("invalid").addClass("valid")
validAllInformation()
$("#camera-snapshot-url").on 'focus', (e) ->
$(".info-box .info-header").text("SNAPSHOT URL")
$(".info-box .info-text").text("If you know your Camera Vendor & Model we can work this out for you. You can also enter it manually for your camera.")
$(".snapshot-url").on 'click', ->
$(".info-box .info-header").text("SNAPSHOT URL")
$(".info-box .info-text").text("If you know your Camera Vendor & Model we can work this out for you. You can also enter it manually for your camera.")
$("#camera-name").on 'keyup', (e) ->
$(this).removeClass("invalid").addClass("valid")
$("#camera-id").on 'keyup', (e) ->
$(this).removeClass("invalid").addClass("valid")
$("#user-email").on 'keyup', (e) ->
if validateEmail($(this).val())
$(this).removeClass("invalid").addClass("valid")
else
$(this).removeClass("valid").addClass("invalid")
$("#user-password").on 'keyup', (e) ->
$(this).removeClass("invalid").addClass("valid")
$("#username").on 'keyup', (e) ->
$(this).removeClass("invalid").addClass("valid")
$(".default-username").on 'click', ->
$("#camera-username").val($("#default-username").text())
$(".default-password").on 'click', ->
$("#camera-password").val($("#default-password").text())
validate_hostname = (str) ->
ValidIpAddressRegex = /^(([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\.){3}([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])$/
ValidHostnameRegex = /^(([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9\-]*[a-zA-Z0-9])\.)*([A-Za-z0-9]|[A-Za-z0-9][A-Za-z0-9\-]*[A-Za-z0-9])$/
ValidIpAddressRegex.test(str) or ValidHostnameRegex.test(str)
validateInt = (value) ->
reg = /^(0|[0-9][1-9]|[1-9][0-9]*)$/
reg.test value
validateEmail = (email) ->
reg = /^([A-Za-z0-9_\-\.])+\@([A-Za-z0-9_\-\.])+\.([A-Za-z]{2,4})$/
#remove all white space from value before validating
emailtrimed = email.replace(RegExp(' ', 'gi'), '')
reg.test emailtrimed
validAllInformation = ->
if $("#camera-port") is ''
if $("#camera-url").hasClass('valid') && $("#camera-snapshot-url").hasClass('valid')
$(".test-image").removeClass('hide')
$(".help-texts").addClass('hide')
else
$(".test-image").addClass('hide')
$(".help-texts").removeClass('hide')
else
if $("#camera-url").hasClass('valid') && $("#camera-port").hasClass('valid') && $("#camera-snapshot-url").hasClass('valid')
$(".test-image").removeClass('hide')
$(".help-texts").addClass('hide')
else
$(".test-image").addClass('hide')
$(".help-texts").removeClass('hide')
testSnapshot = ->
$("#test-snapshot").on 'click', ->
initLadda(this)
port = $("#camera-port").val() unless $("#camera-port").val() is ''
data = {}
data.external_url = "http://#{$('#camera-url').val()}:#{port}"
data.jpg_url = $('#camera-snapshot-url').val()
data.cam_username = $("#camera-username").val() unless $("#camera-username").val() is ''
data.cam_password = $("#PI:PASSWORD:<PASSWORD>END_PI").val() unless $("#camera-password").val() is ''
data.vendor_id = $("#camera-vendor").val() unless $("#camera-vendor").val() is ''
onError = (jqXHR, status, error) ->
$(".snapshot-msg").html(jqXHR.responseJSON.message)
$(".snapshot-msg").removeClass("msg-success").addClass("msg-error")
$(".snapshot-msg").show()
if loader isnt null
loader.stop()
onSuccess = (result, status, jqXHR) ->
if result.status is 'ok'
$("#testimg").attr('src', result.data)
$(".snapshot-msg").html("We got a snapshot!")
$(".snapshot-msg").removeClass("msg-error").addClass("msg-success")
$(".snapshot-msg").show()
$("#test-snapshot").hide()
$("#continue-step2").show()
gotSnapshot = true
if loader isnt null
loader.stop()
settings =
cache: false
data: data
dataType: 'json'
error: onError
success: onSuccess
contentType: "application/x-www-form-urlencoded"
type: 'POST'
url: "#{Evercam_MEDIA_URL}cameras/test"
jQuery.ajax(settings)
handleContinueBtn = ->
$("#continue-step2").on 'click', ->
switchTab("camera-details", "camera-info")
$("#continue-step3").on 'click', ->
if $("#camera-name").val() is ''
$("#camera-name").removeClass("valid").addClass("invalid")
return
if $("#camera-id").val() is ''
$("#camera-id").removeClass("valid").addClass("invalid")
return
$("#camera-name").removeClass("invalid").addClass("valid")
switchTab("camera-info", "user-create")
autoCreateCameraId = ->
$("#camera-name").on 'keyup', ->
$("#camera-id").val $(this).val().replace(RegExp(" ", "g"), "-").toLowerCase()
hasCameraInfo = ->
if $("#camera-url").val() is '' && $("#camera-snapshot-url").val() is ''
$("#camera-url").removeClass("valid").addClass("invalid")
$("#camera-snapshot-url").removeClass("valid").addClass("invalid")
switchTab("user-create", "camera-details")
return false
if $("#camera-name").val() is '' && $("#camera-id").val() is ''
$("#camera-name").removeClass("valid").addClass("invalid")
$("#camera-id").removeClass("valid").addClass("invalid")
switchTab("user-create", "camera-info")
return false
true
autoLogInDashboard = () ->
data = {
'session[login]': $("#username").val()
'session[password]': $("#PI:PASSWORD:<PASSWORD>END_PI").val()
'session[widget]': 'login-from-widget'
'authenticity_token': $("#authenticity_token").val()
}
onError = (jqXHR, status, error) ->
false
onSuccess = (result, status, jqXHR) ->
parent.location.href = "#{Dasboard_URL}"
true
settings =
cache: false
data: data
dataType: 'json'
error: onError
success: onSuccess
contentType: "application/x-www-form-urlencoded"
type: 'POST'
url: "#{Dasboard_URL}/sessions"
jQuery.ajax(settings)
initLadda = (control_id) ->
loader = Ladda.create(control_id)
loader.start()
progress = 0
interval = setInterval(->
progress = Math.min(progress + 0.025, 1)
loader.setProgress(progress)
if (progress == 1)
loader.stop()
clearInterval(interval)
, 200)
createUserAccount = ->
$("#create-account").on 'click', ->
if $("#username").val() is ''
$("#username").removeClass("valid").addClass("invalid")
return
if $("#user-email").val() is '' || !validateEmail($("#user-email").val())
$("#user-email").removeClass("valid").addClass("invalid")
return
if $("#user-password").val() is ''
$("#user-password").removeClass("valid").addClass("invalid")
return
if !hasCameraInfo()
return
initLadda(this)
if API_ID isnt '' && API_Key isnt ''
createCamera(API_ID, API_Key)
return
data = {}
data.firstname = $("#username").val()
data.lastname = $("#username").val()
data.username = $("#username").val()
data.email = $("#user-email").val()
data.password = PI:PASSWORD:<PASSWORD>END_PI").val()
data.token = $("#app_token").val()
onError = (jqXHR, status, error) ->
$("#message-user-create").text(jqXHR.responseJSON.message)
$("#message-user-create").removeClass("hide")
if loader isnt null
loader.stop()
onSuccess = (result, status, jqXHR) ->
getAPICredentials()
settings =
cache: false
data: data
dataType: 'json'
error: onError
success: onSuccess
contentType: "application/x-www-form-urlencoded"
type: 'POST'
url: "#{Evercam_API_URL}users"
jQuery.ajax(settings)
getAPICredentials = ->
data = {}
data.password = $("#PI:PASSWORD:<PASSWORD>END_PI").val()
onError = (jqXHR, status, error) ->
if loader isnt null
loader.stop()
false
onSuccess = (result, status, jqXHR) ->
API_ID = result.api_id
API_Key = result.api_key
createCamera(result.api_id, result.api_key)
settings =
cache: false
data: data
dataType: 'json'
error: onError
success: onSuccess
contentType: "application/json; charset=utf-8"
type: 'GET'
url: "#{Evercam_API_URL}users/#{$("#user-email").val()}/credentials"
jQuery.ajax(settings)
createCamera = (api_id, api_key) ->
data = {}
data.name = $("#camera-name").val()
data.vendor = $("#camera-vendor").val() unless $("#camera-vendor").val() is ''
data.model = $('#camera-model').val() unless $("#camera-model").val() is ''
data.is_public = false
data.is_online = true
data.cam_username = $("#camera-username").val() unless $("#camera-username").val() is ''
data.cam_password = PI:PASSWORD:<PASSWORD>END_PI").PI:PASSWORD:<PASSWORD>END_PI() unless $("#camera-password").val() is ''
data.external_host = $("#camera-url").val()
data.external_http_port = $("#camera-port").val() unless $("#camera-port").val() is ''
data.jpg_url = $("#camera-snapshot-url").val()
onError = (jqXHR, status, error) ->
$("#message-camera-info").text(jqXHR.responseJSON.message)
$("#message-camera-info").removeClass("hide")
$("#message-user-create").addClass("hide")
switchTab("user-create", "camera-info")
if loader isnt null
loader.stop()
onSuccess = (result, status, jqXHR) ->
parent.location.href = "#{Dasboard_URL}/v1/cameras?api_id=#{api_id}&api_key=#{api_key}"
onDuplicateError = (xhr) ->
switchTab("user-create", "camera-info")
$("#message-camera-info").text(xhr.responseText.message)
$("#message-camera-info").removeClass("hide")
$("#message-user-create").addClass("hide")
if loader isnt null
loader.stop()
settings =
cache: false
data: data
dataType: 'json'
error: onError
success: onSuccess
statusCode: {409: onDuplicateError, 400: onDuplicateError },
contentType: "application/x-www-form-urlencoded"
type: 'POST'
url: "#{Evercam_API_URL}cameras?api_id=#{api_id}&api_key=#{api_key}"
jQuery.ajax(settings)
clearForm = ->
$("#camera-id").val('')
$("#camera-id").removeClass('valid').removeClass("invalid")
$("#camera-name").val('')
$("#camera-name").removeClass('valid').removeClass("invalid")
$("#user-email").val('')
$("#user-email").removeClass('valid').removeClass("invalid")
$("#username").val('')
$("#username").removeClass('valid').removeClass("invalid")
$("#user-password").val('')
$("#user-password").removeClass('valid').removeClass("invalid")
$("#camera-username").val('')
$("#camera-password").val('')
$("#camera-port").val('')
$("#camera-port").removeClass('valid').removeClass("invalid")
$("#camera-url").val('')
$("#camera-url").removeClass('valid').removeClass("invalid")
$("#camera-snapshot-url").val('')
$("#camera-snapshot-url").removeClass('valid').removeClass("invalid")
$("#camera-vendor").val('')
$("#camera-model option").remove()
$("#camera-model").append('<option value="">Unknown / Not specified</option>');
switchTab("user-create", "camera-details")
$("#required-authentication").removeAttr("checked")
$("#authentication").addClass("hide")
$("#message-camera-info").addClass("hide")
$("#message-user-create").addClass("hide")
$("#testimg").attr('src', '')
$(".snapshot-msg").hide()
$("#test-snapshot").show()
$("#continue-step2").hide()
API_ID = ''
API_Key = ''
onClickTabs = ->
$(".nav-steps li").on 'click', ->
if !gotSnapshot
return
previousTab = $(".nav-steps li.active").attr("href")
$(".nav-steps li").removeClass('active')
currentTab = $(this).attr("href")
$(this).addClass('active')
$("#{previousTab}").fadeOut(300, ->
$("#{currentTab}").fadeIn(300)
)
switchTab = (hideTab, showTab) ->
$(".nav-steps li").removeClass('active')
$("##{hideTab}").fadeOut(300, ->
$("##{showTab}").fadeIn(300)
)
$("#li-#{showTab}").addClass('active')
initAddCamera = ->
url = window.location.origin
embedCode = '<div evercam="add-camera-public"></div>' + '<script type="text/javascript" src="' + url + '/widgets/add.camera.js"></script>'
$('#code').html embedCode
$('.placeholder').empty()
iframe = jQuery('<iframe />').css(
'overflow': 'hidden'
'width': '100%'
'height': '420px').attr(
'src': '/widgets/cameras/public/add'
'frameborder': '0'
scrolling: 'no').appendTo('.placeholder')
return
resizeIframe = (iframeControl) ->
iframeWindow = iframeControl
iframeControl.style.height = iframeControl.contentWindow.document.body.scrollHeight + 'px'
return
handleWindowResize = ->
$(window).resize ->
if !iframeWindow
return
resizeIframe iframeWindow
return
window.initializeAddCameraPublic = ->
useAuthentication()
loadVendors()
handleVendorModelEvents()
handleInputEvents()
testSnapshot()
handleContinueBtn()
createUserAccount()
onClickTabs()
window.initializeAddCamera = ->
initAddCamera();
$("#code").on "click", ->
this.select();
handleWindowResize()
$(window, document, undefined).ready ->
wskCheckbox = do ->
wskCheckboxes = []
SPACE_KEY = 32
addEventHandler = (elem, eventType, handler) ->
if elem.addEventListener
elem.addEventListener eventType, handler, false
else if elem.attachEvent
elem.attachEvent 'on' + eventType, handler
return
clickHandler = (e) ->
e.stopPropagation()
if @className.indexOf('checked') < 0
@className += ' checked'
else
@className = 'chk-span'
return
keyHandler = (e) ->
e.stopPropagation()
if e.keyCode == SPACE_KEY
clickHandler.call this, e
# Also update the checkbox state.
cbox = document.getElementById(@parentNode.getAttribute('for'))
cbox.checked = !cbox.checked
return
clickHandlerLabel = (e) ->
id = @getAttribute('for')
i = wskCheckboxes.length
while i--
if wskCheckboxes[i].id == id
if wskCheckboxes[i].checkbox.className.indexOf('checked') < 0
wskCheckboxes[i].checkbox.className += ' checked'
else
wskCheckboxes[i].checkbox.className = 'chk-span'
break
return
findCheckBoxes = ->
labels = document.getElementsByTagName('label')
i = labels.length
while i--
posCheckbox = document.getElementById(labels[i].getAttribute('for'))
if posCheckbox != null and posCheckbox.type == 'checkbox'
text = labels[i].innerText
span = document.createElement('span')
span.className = 'chk-span'
span.tabIndex = i
labels[i].insertBefore span, labels[i].firstChild
addEventHandler span, 'click', clickHandler
addEventHandler span, 'keyup', keyHandler
addEventHandler labels[i], 'click', clickHandlerLabel
wskCheckboxes.push
'checkbox': span
'id': labels[i].getAttribute('for')
return
{ init: findCheckBoxes }
wskCheckbox.init()
return
|
[
{
"context": "datoms[ idx ] = lets d, ( d ) ->\n d.$key = \"#{sigil}#{p.tagname}\"\n d.id = p.id if p.id?\n ",
"end": 3783,
"score": 0.9524312019348145,
"start": 3772,
"tag": "KEY",
"value": "\"#{sigil}#{"
},
{
"context": "s d, ( d ) ->\n d.$key = \"#{sig... | dev/paragate/src/old-intertext-code/mkts.coffee | loveencounterflow/hengist | 0 |
'use strict'
############################################################################################################
CND = require 'cnd'
rpr = CND.rpr
badge = 'INTERTEXT/MKTS'
log = CND.get_logger 'plain', badge
info = CND.get_logger 'info', badge
whisper = CND.get_logger 'whisper', badge
alert = CND.get_logger 'alert', badge
debug = CND.get_logger 'debug', badge
warn = CND.get_logger 'warn', badge
help = CND.get_logger 'help', badge
urge = CND.get_logger 'urge', badge
echo = CND.echo.bind CND
{ jr, } = CND
#...........................................................................................................
DATOM = new ( require 'datom' ).Datom { dirty: false, }
{ new_datom
lets
select } = DATOM.export()
types = require './types'
{ isa
validate
# cast
# declare
# declare_cast
# check
# sad
# is_sad
# is_happy
type_of } = types
#...........................................................................................................
HtmlParser = require 'atlas-html-stream'
assign = Object.assign
HTML = null
#-----------------------------------------------------------------------------------------------------------
@html_from_datoms = ( P... ) => ( HTML ?= ( require '..' ).HTML ).html_from_datoms P...
@$html_from_datoms = ( P... ) => ( HTML ?= ( require '..' ).HTML ).$html_from_datoms P...
#===========================================================================================================
# PARSING
#-----------------------------------------------------------------------------------------------------------
@_find_next_tag = ( text, prv_idx = 0 ) ->
idx_0 = text.indexOf '<', prv_idx
idx_1 = text.indexOf '>', prv_idx
if ( idx_0 < 0 )
return [ null, null, ] if ( idx_1 < 0 )
throw new Error "Syntax error: closing but no opening bracket"
throw new Error "Syntax error: opening but no closing bracket" if ( idx_1 < 0 )
throw new Error "Syntax error: closing before opening bracket" if ( idx_1 < idx_0 )
throw new Error "Syntax error: closing bracket too close to opening bracket" if ( idx_1 < idx_0 + 2 )
throw new Error "Syntax error: whitespace not allowed here" if /\s/.test text[ idx_0 + 1 ]
idx_2 = text.indexOf '<', idx_0 + 1
throw new Error "Syntax error: additional opening bracket" if ( 0 < idx_2 < idx_1 )
return [ idx_0, idx_1, ]
#-----------------------------------------------------------------------------------------------------------
@_analyze_compact_tag_syntax = ( datoms ) ->
###
compact syntax for HTMLish tags:
`<div#c432.foo.bar>...</div>` => `<div id=c432 class='foo bar'>...</div>`
`<p.noindent>...</p>` => `<p class=noindent>...</p>`
positional arguments (not yet implemented):
`<columns=2>` => `<columns count=2/>` => `<columns count=2></columns>` ?=> `<mkts-columns count=2></mkts-columns>`
`<multiply =2 =3>` (???)
NB Svelte uses capitalized names, allows self-closing tags(!): `<Mytag/>`
###
HTML ?= ( require '..' ).HTML
for d, idx in datoms
sigil = d.$key[ 0 ]
compact_tagname = d.$key[ 1 .. ]
p = HTML._parse_compact_tagname compact_tagname
continue if ( p.tagname is compact_tagname ) and ( not p.id? ) and ( not p.class? )
datoms[ idx ] = lets d, ( d ) ->
d.$key = "#{sigil}#{p.tagname}"
d.id = p.id if p.id?
d.class = p.class if p.class?
return datoms
#-----------------------------------------------------------------------------------------------------------
@datoms_from_html = ( text ) ->
R = []
prv_idx = 0
prv_idx_1 = -1
HTML ?= ( require '..' ).HTML
#.........................................................................................................
loop
#.......................................................................................................
try
[ idx_0, idx_1, ] = @_find_next_tag text, prv_idx
catch error
throw error unless /Syntax error/.test error.message
source = text[ prv_idx .. ]
R.push new_datom '~error', {
message: "#{error.message}: #{jr source}",
type: 'mkts-syntax-html',
source: source, }
return R
#.......................................................................................................
if idx_0 > prv_idx_1 + 1
R.push new_datom '^text', { text: text[ prv_idx_1 + 1 ... idx_0 ].toString(), }
break unless idx_0?
tags = @_analyze_compact_tag_syntax HTML.datoms_from_html text[ idx_0 .. idx_1 ]
if text[ idx_1 - 1 ] is '/'
R.push d = lets tags[ 0 ], ( d ) -> d.$key = '^' + d.$key[ 1 .. ]
else
R.push tags...
prv_idx_1 = idx_1
prv_idx = idx_1 + 1
#.........................................................................................................
# debug '7776^', rpr { prv_idx, prv_idx_1, idx_0, idx_1, length: text.length, }
if prv_idx < text.length
R.push new_datom '^text', { text: text[ prv_idx_1 + 1 .. ].toString(), }
return R
#-----------------------------------------------------------------------------------------------------------
@$datoms_from_html = ->
{ $, } = ( require 'steampipes' ).export()
return $ ( buffer_or_text, send ) =>
send d for d in @datoms_from_html buffer_or_text
return null
############################################################################################################
if module is require.main then do => # await do =>
help 'ok'
###
# #-----------------------------------------------------------------------------------------------------------
# @html5_block_level_tagnames = new Set """address article aside blockquote dd details dialog div dl dt
# fieldset figcaption figure footer form h1 h2 h3 h4 h5 h6 header hgroup hr li main nav ol p pre section table
# td th ul""".split /\s+/
# #-----------------------------------------------------------------------------------------------------------
# @_new_datom = ( name, data, text ) ->
# return new_datom '^text', { text, } if text?
# #.........................................................................................................
# is_block = @html5_block_level_tagnames.has name
# unless data?
# return new_datom '>' + name unless is_block
# return new_datom '>' + name, { is_block, }
# #.........................................................................................................
# has_keys = false
# for key, value of data
# has_keys = true
# data[ key ] = true if value is ''
# #.........................................................................................................
# unless has_keys
# return new_datom '<' + name unless is_block
# return new_datom '<' + name, { is_block, }
# #.........................................................................................................
# return new_datom '<' + name, { data, } unless is_block
# return new_datom '<' + name, { data, is_block, }
#-----------------------------------------------------------------------------------------------------------
# @_new_parse_method = ( piecemeal ) ->
# R = null
# parser = new HtmlParser { preserveWS: true, }
# #.........................................................................................................
# parser.on 'data', ( { name, data, text, } ) => R.push @_new_datom name, data, text
# parser.on 'error', ( error ) -> throw error
# # parser.on 'end', -> R.push new_datom '^stop'
# #.........................................................................................................
# R = ( html ) =>
# R = []
# parser.write html
# unless piecemeal
# parser.flushText()
# parser.reset()
# return R
# #.........................................................................................................
# R.flush = -> parser.flushText()
# R.reset = -> parser.reset()
# return R
#-----------------------------------------------------------------------------------------------------------
class Htmlparser extends Multimix
# @extend object_with_class_properties
@include L
#---------------------------------------------------------------------------------------------------------
constructor: ( @settings = null ) ->
super()
### | 24039 |
'use strict'
############################################################################################################
CND = require 'cnd'
rpr = CND.rpr
badge = 'INTERTEXT/MKTS'
log = CND.get_logger 'plain', badge
info = CND.get_logger 'info', badge
whisper = CND.get_logger 'whisper', badge
alert = CND.get_logger 'alert', badge
debug = CND.get_logger 'debug', badge
warn = CND.get_logger 'warn', badge
help = CND.get_logger 'help', badge
urge = CND.get_logger 'urge', badge
echo = CND.echo.bind CND
{ jr, } = CND
#...........................................................................................................
DATOM = new ( require 'datom' ).Datom { dirty: false, }
{ new_datom
lets
select } = DATOM.export()
types = require './types'
{ isa
validate
# cast
# declare
# declare_cast
# check
# sad
# is_sad
# is_happy
type_of } = types
#...........................................................................................................
HtmlParser = require 'atlas-html-stream'
assign = Object.assign
HTML = null
#-----------------------------------------------------------------------------------------------------------
@html_from_datoms = ( P... ) => ( HTML ?= ( require '..' ).HTML ).html_from_datoms P...
@$html_from_datoms = ( P... ) => ( HTML ?= ( require '..' ).HTML ).$html_from_datoms P...
#===========================================================================================================
# PARSING
#-----------------------------------------------------------------------------------------------------------
@_find_next_tag = ( text, prv_idx = 0 ) ->
idx_0 = text.indexOf '<', prv_idx
idx_1 = text.indexOf '>', prv_idx
if ( idx_0 < 0 )
return [ null, null, ] if ( idx_1 < 0 )
throw new Error "Syntax error: closing but no opening bracket"
throw new Error "Syntax error: opening but no closing bracket" if ( idx_1 < 0 )
throw new Error "Syntax error: closing before opening bracket" if ( idx_1 < idx_0 )
throw new Error "Syntax error: closing bracket too close to opening bracket" if ( idx_1 < idx_0 + 2 )
throw new Error "Syntax error: whitespace not allowed here" if /\s/.test text[ idx_0 + 1 ]
idx_2 = text.indexOf '<', idx_0 + 1
throw new Error "Syntax error: additional opening bracket" if ( 0 < idx_2 < idx_1 )
return [ idx_0, idx_1, ]
#-----------------------------------------------------------------------------------------------------------
@_analyze_compact_tag_syntax = ( datoms ) ->
###
compact syntax for HTMLish tags:
`<div#c432.foo.bar>...</div>` => `<div id=c432 class='foo bar'>...</div>`
`<p.noindent>...</p>` => `<p class=noindent>...</p>`
positional arguments (not yet implemented):
`<columns=2>` => `<columns count=2/>` => `<columns count=2></columns>` ?=> `<mkts-columns count=2></mkts-columns>`
`<multiply =2 =3>` (???)
NB Svelte uses capitalized names, allows self-closing tags(!): `<Mytag/>`
###
HTML ?= ( require '..' ).HTML
for d, idx in datoms
sigil = d.$key[ 0 ]
compact_tagname = d.$key[ 1 .. ]
p = HTML._parse_compact_tagname compact_tagname
continue if ( p.tagname is compact_tagname ) and ( not p.id? ) and ( not p.class? )
datoms[ idx ] = lets d, ( d ) ->
d.$key = <KEY>p.tagname<KEY>}"
d.id = p.id if p.id?
d.class = p.class if p.class?
return datoms
#-----------------------------------------------------------------------------------------------------------
@datoms_from_html = ( text ) ->
R = []
prv_idx = 0
prv_idx_1 = -1
HTML ?= ( require '..' ).HTML
#.........................................................................................................
loop
#.......................................................................................................
try
[ idx_0, idx_1, ] = @_find_next_tag text, prv_idx
catch error
throw error unless /Syntax error/.test error.message
source = text[ prv_idx .. ]
R.push new_datom '~error', {
message: "#{error.message}: #{jr source}",
type: 'mkts-syntax-html',
source: source, }
return R
#.......................................................................................................
if idx_0 > prv_idx_1 + 1
R.push new_datom '^text', { text: text[ prv_idx_1 + 1 ... idx_0 ].toString(), }
break unless idx_0?
tags = @_analyze_compact_tag_syntax HTML.datoms_from_html text[ idx_0 .. idx_1 ]
if text[ idx_1 - 1 ] is '/'
R.push d = lets tags[ 0 ], ( d ) -> d.$key = '^' + d.$key[ 1 .. ]
else
R.push tags...
prv_idx_1 = idx_1
prv_idx = idx_1 + 1
#.........................................................................................................
# debug '7776^', rpr { prv_idx, prv_idx_1, idx_0, idx_1, length: text.length, }
if prv_idx < text.length
R.push new_datom '^text', { text: text[ prv_idx_1 + 1 .. ].toString(), }
return R
#-----------------------------------------------------------------------------------------------------------
@$datoms_from_html = ->
{ $, } = ( require 'steampipes' ).export()
return $ ( buffer_or_text, send ) =>
send d for d in @datoms_from_html buffer_or_text
return null
############################################################################################################
if module is require.main then do => # await do =>
help 'ok'
###
# #-----------------------------------------------------------------------------------------------------------
# @html5_block_level_tagnames = new Set """address article aside blockquote dd details dialog div dl dt
# fieldset figcaption figure footer form h1 h2 h3 h4 h5 h6 header hgroup hr li main nav ol p pre section table
# td th ul""".split /\s+/
# #-----------------------------------------------------------------------------------------------------------
# @_new_datom = ( name, data, text ) ->
# return new_datom '^text', { text, } if text?
# #.........................................................................................................
# is_block = @html5_block_level_tagnames.has name
# unless data?
# return new_datom '>' + name unless is_block
# return new_datom '>' + name, { is_block, }
# #.........................................................................................................
# has_keys = false
# for key, value of data
# has_keys = true
# data[ key ] = true if value is ''
# #.........................................................................................................
# unless has_keys
# return new_datom '<' + name unless is_block
# return new_datom '<' + name, { is_block, }
# #.........................................................................................................
# return new_datom '<' + name, { data, } unless is_block
# return new_datom '<' + name, { data, is_block, }
#-----------------------------------------------------------------------------------------------------------
# @_new_parse_method = ( piecemeal ) ->
# R = null
# parser = new HtmlParser { preserveWS: true, }
# #.........................................................................................................
# parser.on 'data', ( { name, data, text, } ) => R.push @_new_datom name, data, text
# parser.on 'error', ( error ) -> throw error
# # parser.on 'end', -> R.push new_datom '^stop'
# #.........................................................................................................
# R = ( html ) =>
# R = []
# parser.write html
# unless piecemeal
# parser.flushText()
# parser.reset()
# return R
# #.........................................................................................................
# R.flush = -> parser.flushText()
# R.reset = -> parser.reset()
# return R
#-----------------------------------------------------------------------------------------------------------
class Htmlparser extends Multimix
# @extend object_with_class_properties
@include L
#---------------------------------------------------------------------------------------------------------
constructor: ( @settings = null ) ->
super()
### | true |
'use strict'
############################################################################################################
CND = require 'cnd'
rpr = CND.rpr
badge = 'INTERTEXT/MKTS'
log = CND.get_logger 'plain', badge
info = CND.get_logger 'info', badge
whisper = CND.get_logger 'whisper', badge
alert = CND.get_logger 'alert', badge
debug = CND.get_logger 'debug', badge
warn = CND.get_logger 'warn', badge
help = CND.get_logger 'help', badge
urge = CND.get_logger 'urge', badge
echo = CND.echo.bind CND
{ jr, } = CND
#...........................................................................................................
DATOM = new ( require 'datom' ).Datom { dirty: false, }
{ new_datom
lets
select } = DATOM.export()
types = require './types'
{ isa
validate
# cast
# declare
# declare_cast
# check
# sad
# is_sad
# is_happy
type_of } = types
#...........................................................................................................
HtmlParser = require 'atlas-html-stream'
assign = Object.assign
HTML = null
#-----------------------------------------------------------------------------------------------------------
@html_from_datoms = ( P... ) => ( HTML ?= ( require '..' ).HTML ).html_from_datoms P...
@$html_from_datoms = ( P... ) => ( HTML ?= ( require '..' ).HTML ).$html_from_datoms P...
#===========================================================================================================
# PARSING
#-----------------------------------------------------------------------------------------------------------
@_find_next_tag = ( text, prv_idx = 0 ) ->
idx_0 = text.indexOf '<', prv_idx
idx_1 = text.indexOf '>', prv_idx
if ( idx_0 < 0 )
return [ null, null, ] if ( idx_1 < 0 )
throw new Error "Syntax error: closing but no opening bracket"
throw new Error "Syntax error: opening but no closing bracket" if ( idx_1 < 0 )
throw new Error "Syntax error: closing before opening bracket" if ( idx_1 < idx_0 )
throw new Error "Syntax error: closing bracket too close to opening bracket" if ( idx_1 < idx_0 + 2 )
throw new Error "Syntax error: whitespace not allowed here" if /\s/.test text[ idx_0 + 1 ]
idx_2 = text.indexOf '<', idx_0 + 1
throw new Error "Syntax error: additional opening bracket" if ( 0 < idx_2 < idx_1 )
return [ idx_0, idx_1, ]
#-----------------------------------------------------------------------------------------------------------
@_analyze_compact_tag_syntax = ( datoms ) ->
###
compact syntax for HTMLish tags:
`<div#c432.foo.bar>...</div>` => `<div id=c432 class='foo bar'>...</div>`
`<p.noindent>...</p>` => `<p class=noindent>...</p>`
positional arguments (not yet implemented):
`<columns=2>` => `<columns count=2/>` => `<columns count=2></columns>` ?=> `<mkts-columns count=2></mkts-columns>`
`<multiply =2 =3>` (???)
NB Svelte uses capitalized names, allows self-closing tags(!): `<Mytag/>`
###
HTML ?= ( require '..' ).HTML
for d, idx in datoms
sigil = d.$key[ 0 ]
compact_tagname = d.$key[ 1 .. ]
p = HTML._parse_compact_tagname compact_tagname
continue if ( p.tagname is compact_tagname ) and ( not p.id? ) and ( not p.class? )
datoms[ idx ] = lets d, ( d ) ->
d.$key = PI:KEY:<KEY>END_PIp.tagnamePI:KEY:<KEY>END_PI}"
d.id = p.id if p.id?
d.class = p.class if p.class?
return datoms
#-----------------------------------------------------------------------------------------------------------
@datoms_from_html = ( text ) ->
R = []
prv_idx = 0
prv_idx_1 = -1
HTML ?= ( require '..' ).HTML
#.........................................................................................................
loop
#.......................................................................................................
try
[ idx_0, idx_1, ] = @_find_next_tag text, prv_idx
catch error
throw error unless /Syntax error/.test error.message
source = text[ prv_idx .. ]
R.push new_datom '~error', {
message: "#{error.message}: #{jr source}",
type: 'mkts-syntax-html',
source: source, }
return R
#.......................................................................................................
if idx_0 > prv_idx_1 + 1
R.push new_datom '^text', { text: text[ prv_idx_1 + 1 ... idx_0 ].toString(), }
break unless idx_0?
tags = @_analyze_compact_tag_syntax HTML.datoms_from_html text[ idx_0 .. idx_1 ]
if text[ idx_1 - 1 ] is '/'
R.push d = lets tags[ 0 ], ( d ) -> d.$key = '^' + d.$key[ 1 .. ]
else
R.push tags...
prv_idx_1 = idx_1
prv_idx = idx_1 + 1
#.........................................................................................................
# debug '7776^', rpr { prv_idx, prv_idx_1, idx_0, idx_1, length: text.length, }
if prv_idx < text.length
R.push new_datom '^text', { text: text[ prv_idx_1 + 1 .. ].toString(), }
return R
#-----------------------------------------------------------------------------------------------------------
@$datoms_from_html = ->
{ $, } = ( require 'steampipes' ).export()
return $ ( buffer_or_text, send ) =>
send d for d in @datoms_from_html buffer_or_text
return null
############################################################################################################
if module is require.main then do => # await do =>
help 'ok'
###
# #-----------------------------------------------------------------------------------------------------------
# @html5_block_level_tagnames = new Set """address article aside blockquote dd details dialog div dl dt
# fieldset figcaption figure footer form h1 h2 h3 h4 h5 h6 header hgroup hr li main nav ol p pre section table
# td th ul""".split /\s+/
# #-----------------------------------------------------------------------------------------------------------
# @_new_datom = ( name, data, text ) ->
# return new_datom '^text', { text, } if text?
# #.........................................................................................................
# is_block = @html5_block_level_tagnames.has name
# unless data?
# return new_datom '>' + name unless is_block
# return new_datom '>' + name, { is_block, }
# #.........................................................................................................
# has_keys = false
# for key, value of data
# has_keys = true
# data[ key ] = true if value is ''
# #.........................................................................................................
# unless has_keys
# return new_datom '<' + name unless is_block
# return new_datom '<' + name, { is_block, }
# #.........................................................................................................
# return new_datom '<' + name, { data, } unless is_block
# return new_datom '<' + name, { data, is_block, }
#-----------------------------------------------------------------------------------------------------------
# @_new_parse_method = ( piecemeal ) ->
# R = null
# parser = new HtmlParser { preserveWS: true, }
# #.........................................................................................................
# parser.on 'data', ( { name, data, text, } ) => R.push @_new_datom name, data, text
# parser.on 'error', ( error ) -> throw error
# # parser.on 'end', -> R.push new_datom '^stop'
# #.........................................................................................................
# R = ( html ) =>
# R = []
# parser.write html
# unless piecemeal
# parser.flushText()
# parser.reset()
# return R
# #.........................................................................................................
# R.flush = -> parser.flushText()
# R.reset = -> parser.reset()
# return R
#-----------------------------------------------------------------------------------------------------------
class Htmlparser extends Multimix
# @extend object_with_class_properties
@include L
#---------------------------------------------------------------------------------------------------------
constructor: ( @settings = null ) ->
super()
### |
[
{
"context": " \"Model Sin\", ()->\n sin = new Sin\n name: \"name\"\n icon: \"icon\"\n explanation: \"text\"\n ",
"end": 114,
"score": 0.7901666760444641,
"start": 110,
"tag": "NAME",
"value": "name"
}
] | public/coffeescripts/tests/models/sin.coffee | googleheim/ablass | 0 | define ["sin", "sin_view"], (Sin, SinItemView)->
describe "Model Sin", ()->
sin = new Sin
name: "name"
icon: "icon"
explanation: "text"
id: 23
sin_view = new SinItemView( {model: sin} )
it "should have attributes", ()->
expect(sin.get("name")).toBe("name")
it "should parse a template with given attributes", ()->
sin_view.render()
expect(sin_view.$el).toBeTruthy()
expect(sin_view.$el.find("div.name").html()).toBe("name")
expect(sin_view.$el.find("div.icon").html()).toMatch(/<img/)
expect(sin_view.$el.find("div.explanation").html()).toBe("text")
| 220744 | define ["sin", "sin_view"], (Sin, SinItemView)->
describe "Model Sin", ()->
sin = new Sin
name: "<NAME>"
icon: "icon"
explanation: "text"
id: 23
sin_view = new SinItemView( {model: sin} )
it "should have attributes", ()->
expect(sin.get("name")).toBe("name")
it "should parse a template with given attributes", ()->
sin_view.render()
expect(sin_view.$el).toBeTruthy()
expect(sin_view.$el.find("div.name").html()).toBe("name")
expect(sin_view.$el.find("div.icon").html()).toMatch(/<img/)
expect(sin_view.$el.find("div.explanation").html()).toBe("text")
| true | define ["sin", "sin_view"], (Sin, SinItemView)->
describe "Model Sin", ()->
sin = new Sin
name: "PI:NAME:<NAME>END_PI"
icon: "icon"
explanation: "text"
id: 23
sin_view = new SinItemView( {model: sin} )
it "should have attributes", ()->
expect(sin.get("name")).toBe("name")
it "should parse a template with given attributes", ()->
sin_view.render()
expect(sin_view.$el).toBeTruthy()
expect(sin_view.$el.find("div.name").html()).toBe("name")
expect(sin_view.$el.find("div.icon").html()).toMatch(/<img/)
expect(sin_view.$el.find("div.explanation").html()).toBe("text")
|
[
{
"context": "#\t> File Name: index.coffee\n#\t> Author: LY\n#\t> Mail: ly.franky@gmail.com\n#\t> Created Time: W",
"end": 42,
"score": 0.9991358518600464,
"start": 40,
"tag": "USERNAME",
"value": "LY"
},
{
"context": "> File Name: index.coffee\n#\t> Author: LY\n#\t> Mail: ly.franky@... | server/routes/index.coffee | wiiliamking/miac-website | 0 | # > File Name: index.coffee
# > Author: LY
# > Mail: ly.franky@gmail.com
# > Created Time: Wednesday, November 19, 2014 AM10:44:10 CST
express = require 'express'
router = express.Router()
router.get '/', (req, res)->
res.render('index')
module.exports = router
| 225996 | # > File Name: index.coffee
# > Author: LY
# > Mail: <EMAIL>
# > Created Time: Wednesday, November 19, 2014 AM10:44:10 CST
express = require 'express'
router = express.Router()
router.get '/', (req, res)->
res.render('index')
module.exports = router
| true | # > File Name: index.coffee
# > Author: LY
# > Mail: PI:EMAIL:<EMAIL>END_PI
# > Created Time: Wednesday, November 19, 2014 AM10:44:10 CST
express = require 'express'
router = express.Router()
router.get '/', (req, res)->
res.render('index')
module.exports = router
|
[
{
"context": "png\" />\n <div className=\"team-member-name\">Paola Bouley</div>\n <div className=\"team-member-title\">",
"end": 374,
"score": 0.9997668266296387,
"start": 362,
"tag": "NAME",
"value": "Paola Bouley"
},
{
"context": "png\" />\n <div className=\"te... | app/pages/about/team.cjsx | zooniverse/wildcam-gorongosa-facebook | 7 | React = require 'react'
module.exports = React.createClass
displayName: 'Team'
render: ->
<div className="secondary-page">
<h2 className="team-page-header">Lion Research Project Team</h2>
<div className="team-list">
<div className="team-member">
<img src="assets/about/team/paola.png" />
<div className="team-member-name">Paola Bouley</div>
<div className="team-member-title">Lion Project Director & Co-founder</div>
</div>
<div className="team-member">
<img src="assets/about/team/rui.png" />
<div className="team-member-name">Rui Branco</div>
<div className="team-member-title">Wildlife Veterinarian & Co-founder</div>
</div>
<div className="team-member">
<img src="assets/about/team/celina.png" />
<div className="team-member-name">Celina Dias</div>
<div className="team-member-title">Lion Researcher</div>
</div>
<div className="team-member">
<img src="assets/about/team/domingas.png" />
<div className="team-member-name">Domingas Alexis</div>
<div className="team-member-title">Assistant Lion Researcher</div>
</div>
<div className="team-member">
<img src="assets/about/team/tonga.png" />
<div className="team-member-name">Tonga Torcida</div>
<div className="team-member-title">Assistant Researcher (Seasonal)</div>
</div>
<div className="team-member">
<img src="assets/about/team/fernandinho.png" />
<div className="team-member-name">Fernandinho Pedro</div>
<div className="team-member-title">Assistant Researcher (Seasonal)</div>
</div>
<div className="team-member">
<img src="assets/about/team/isaquel_nginga.png" />
<div className="team-member-name">Isaquel Nginga</div>
<div className="team-member-title">Lion Project Intern</div>
</div>
</div>
</div>
| 224762 | React = require 'react'
module.exports = React.createClass
displayName: 'Team'
render: ->
<div className="secondary-page">
<h2 className="team-page-header">Lion Research Project Team</h2>
<div className="team-list">
<div className="team-member">
<img src="assets/about/team/paola.png" />
<div className="team-member-name"><NAME></div>
<div className="team-member-title">Lion Project Director & Co-founder</div>
</div>
<div className="team-member">
<img src="assets/about/team/rui.png" />
<div className="team-member-name"><NAME></div>
<div className="team-member-title">Wildlife Veterinarian & Co-founder</div>
</div>
<div className="team-member">
<img src="assets/about/team/celina.png" />
<div className="team-member-name"><NAME></div>
<div className="team-member-title">Lion Researcher</div>
</div>
<div className="team-member">
<img src="assets/about/team/domingas.png" />
<div className="team-member-name"><NAME></div>
<div className="team-member-title">Assistant Lion Researcher</div>
</div>
<div className="team-member">
<img src="assets/about/team/tonga.png" />
<div className="team-member-name"><NAME></div>
<div className="team-member-title">Assistant Researcher (Seasonal)</div>
</div>
<div className="team-member">
<img src="assets/about/team/fernandinho.png" />
<div className="team-member-name"><NAME></div>
<div className="team-member-title">Assistant Researcher (Seasonal)</div>
</div>
<div className="team-member">
<img src="assets/about/team/isaquel_nginga.png" />
<div className="team-member-name"><NAME></div>
<div className="team-member-title">Lion Project Intern</div>
</div>
</div>
</div>
| true | React = require 'react'
module.exports = React.createClass
displayName: 'Team'
render: ->
<div className="secondary-page">
<h2 className="team-page-header">Lion Research Project Team</h2>
<div className="team-list">
<div className="team-member">
<img src="assets/about/team/paola.png" />
<div className="team-member-name">PI:NAME:<NAME>END_PI</div>
<div className="team-member-title">Lion Project Director & Co-founder</div>
</div>
<div className="team-member">
<img src="assets/about/team/rui.png" />
<div className="team-member-name">PI:NAME:<NAME>END_PI</div>
<div className="team-member-title">Wildlife Veterinarian & Co-founder</div>
</div>
<div className="team-member">
<img src="assets/about/team/celina.png" />
<div className="team-member-name">PI:NAME:<NAME>END_PI</div>
<div className="team-member-title">Lion Researcher</div>
</div>
<div className="team-member">
<img src="assets/about/team/domingas.png" />
<div className="team-member-name">PI:NAME:<NAME>END_PI</div>
<div className="team-member-title">Assistant Lion Researcher</div>
</div>
<div className="team-member">
<img src="assets/about/team/tonga.png" />
<div className="team-member-name">PI:NAME:<NAME>END_PI</div>
<div className="team-member-title">Assistant Researcher (Seasonal)</div>
</div>
<div className="team-member">
<img src="assets/about/team/fernandinho.png" />
<div className="team-member-name">PI:NAME:<NAME>END_PI</div>
<div className="team-member-title">Assistant Researcher (Seasonal)</div>
</div>
<div className="team-member">
<img src="assets/about/team/isaquel_nginga.png" />
<div className="team-member-name">PI:NAME:<NAME>END_PI</div>
<div className="team-member-title">Lion Project Intern</div>
</div>
</div>
</div>
|
[
{
"context": "ed from CourseEnrollView\n\n data =\n name: 'Single Player'\n seats: 9999\n courseID: courseID\n ",
"end": 5433,
"score": 0.9983364343643188,
"start": 5420,
"tag": "NAME",
"value": "Single Player"
}
] | app/views/courses/CoursesView.coffee | SaintRamzes/codecombat | 1 | app = require 'core/application'
AuthModal = require 'views/core/AuthModal'
CocoCollection = require 'collections/CocoCollection'
Course = require 'models/Course'
CourseInstance = require 'models/CourseInstance'
RootView = require 'views/core/RootView'
template = require 'templates/courses/courses'
utils = require 'core/utils'
# TODO: Hour of Code (HoC) integration is a mess
module.exports = class CoursesView extends RootView
id: 'courses-view'
template: template
events:
'click .btn-buy': 'onClickBuy'
'click .btn-enroll': 'onClickEnroll'
'click .btn-enter': 'onClickEnter'
'click .btn-hoc-student-continue': 'onClickHOCStudentContinue'
'click .btn-student': 'onClickStudent'
'click .btn-teacher': 'onClickTeacher'
constructor: (options) ->
super(options)
@setUpHourOfCode()
@praise = utils.getCoursePraise()
@studentMode = Backbone.history.getFragment()?.indexOf('courses/students') >= 0
@courses = new CocoCollection([], { url: "/db/course", model: Course})
@supermodel.loadCollection(@courses, 'courses')
@courseInstances = new CocoCollection([], { url: "/db/user/#{me.id}/course_instances", model: CourseInstance})
@listenToOnce @courseInstances, 'sync', @onCourseInstancesLoaded
@supermodel.loadCollection(@courseInstances, 'course_instances')
if prepaidCode = utils.getQueryVariable('_ppc', false)
if me.isAnonymous()
@state = 'ppc_logged_out'
else
@studentMode = true
@courseEnrollByURL(prepaidCode)
setUpHourOfCode: ->
# If we are coming in at /hoc, then we show the landing page.
# If we have ?hoc=true (for the step after the landing page), then we show any HoC-specific instructions.
# If we haven't tracked this player as an hourOfCode player yet, and it's a new account, we do that now.
@hocLandingPage = Backbone.history.getFragment()?.indexOf('hoc') >= 0
@hocMode = utils.getQueryVariable('hoc', false)
elapsed = new Date() - new Date(me.get('dateCreated'))
if not me.get('hourOfCode') and (@hocLandingPage or @hocMode) and elapsed < 5 * 60 * 1000
me.set('hourOfCode', true)
me.patch()
$('body').append($('<img src="https://code.org/api/hour/begin_codecombat.png" style="visibility: hidden;">'))
application.tracker?.trackEvent 'Hour of Code Begin'
if me.get('hourOfCode') and elapsed < 24 * 60 * 60 * 1000
@hocMode = true # If they really just arrived, make sure we're still in hocMode even if they lost ?hoc=true.
getRenderData: ->
context = super()
context.courses = @courses.models ? []
context.enrolledCourses = @enrolledCourses ? {}
context.hocLandingPage = @hocLandingPage
context.hocMode = @hocMode
context.instances = @courseInstances.models ? []
context.praise = @praise
context.state = @state
context.stateMessage = @stateMessage
context.studentMode = @studentMode
context
afterRender: ->
super()
@setupCoursesFAQPopover()
onCourseInstancesLoaded: ->
@enrolledCourses = {}
@enrolledCourses[courseInstance.get('courseID')] = true for courseInstance in @courseInstances.models
setupCoursesFAQPopover: ->
popoverTitle = "<h3>" + $.i18n.t('courses.faq') + "<button type='button' class='close' onclick='$('.courses-faq').popover('hide');'>×</button></h3>"
popoverContent = "<p><strong>" + $.i18n.t('courses.question') + "</strong> " + $.i18n.t('courses.question1') + "</p>"
popoverContent += "<p><strong>" + $.i18n.t('courses.answer') + "</strong> " + $.i18n.t('courses.answer1') + "</p>"
popoverContent += "<p>" + $.i18n.t('courses.answer2') + "</p>"
@$el.find('.courses-faq').popover(
animation: true
html: true
placement: 'top'
trigger: 'click'
title: popoverTitle
content: popoverContent
container: @$el
).on 'shown.bs.popover', =>
application.tracker?.trackEvent 'Subscription payment methods hover'
onClickBuy: (e) ->
$('.continue-dialog').modal('hide')
courseID = $(e.target).data('course-id')
route = "/courses/enroll/#{courseID}"
viewClass = require 'views/courses/CourseEnrollView'
viewArgs = [{}, courseID]
navigationEvent = route: route, viewClass: viewClass, viewArgs: viewArgs
Backbone.Mediator.publish 'router:navigate', navigationEvent
onClickEnroll: (e) ->
return @openModalView new AuthModal() if me.isAnonymous()
courseID = $(e.target).data('course-id')
prepaidCode = ($(".code-input[data-course-id=#{courseID}]").val() ? '').trim()
@courseEnrollByModal(prepaidCode)
onClickEnter: (e) ->
$('.continue-dialog').modal('hide')
courseID = $(e.target).data('course-id')
courseInstanceID = $(".select-session[data-course-id=#{courseID}]").val()
route = "/courses/#{courseID}/#{courseInstanceID}"
viewClass = require 'views/courses/CourseDetailsView'
viewArgs = [{}, courseID, courseInstanceID]
navigationEvent = route: route, viewClass: viewClass, viewArgs: viewArgs
Backbone.Mediator.publish 'router:navigate', navigationEvent
onClickHOCStudentContinue: (e) ->
$('.continue-dialog').modal('hide')
if e
courseID = $(e.target).data('course-id')
else
courseID = '560f1a9f22961295f9427742'
@state = 'enrolling'
@stateMessage = undefined
@render?()
# TODO: Copied from CourseEnrollView
data =
name: 'Single Player'
seats: 9999
courseID: courseID
hourOfCode: true
jqxhr = $.post('/db/course_instance/-/create', data)
jqxhr.done (data, textStatus, jqXHR) =>
application.tracker?.trackEvent 'Finished HoC student course creation', {courseID: courseID}
# TODO: handle fetch errors
me.fetch(cache: false).always =>
courseID = courseID
route = "/courses/#{courseID}"
viewArgs = [{}, courseID]
if data?.length > 0
courseInstanceID = data[0]._id
route += "/#{courseInstanceID}"
viewArgs[0].courseInstanceID = courseInstanceID
Backbone.Mediator.publish 'router:navigate',
route: route
viewClass: 'views/courses/CourseDetailsView'
viewArgs: viewArgs
jqxhr.fail (xhr, textStatus, errorThrown) =>
console.error 'Got an error purchasing a course:', textStatus, errorThrown
application.tracker?.trackEvent 'Failed HoC student course creation', status: textStatus
if xhr.status is 402
@state = 'declined'
@stateMessage = arguments[2]
else
@state = 'unknown_error'
@stateMessage = "#{xhr.status}: #{xhr.responseText}"
@render?()
onClickStudent: (e) ->
if @supermodel.finished() and @hocLandingPage
# Automatically enroll in first course
@onClickHOCStudentContinue()
return
route = "/courses/students"
route += "?hoc=true" if @hocLandingPage or @hocMode
viewClass = require 'views/courses/CoursesView'
navigationEvent = route: route, viewClass: viewClass, viewArgs: []
Backbone.Mediator.publish 'router:navigate', navigationEvent
onClickTeacher: (e) ->
route = "/courses/teachers"
route += "?hoc=true" if @hocLandingPage or @hocMode
viewClass = require 'views/courses/CoursesView'
navigationEvent = route: route, viewClass: viewClass, viewArgs: []
Backbone.Mediator.publish 'router:navigate', navigationEvent
courseEnrollByURL: (prepaidCode) ->
@state = 'enrolling'
@render?()
$.ajax({
method: 'POST'
url: '/db/course_instance/-/redeem_prepaid'
data: prepaidCode: prepaidCode
context: @
success: @onRedeemPrepaidSuccess
error: (xhr, textStatus, errorThrown) ->
console.error 'Got an error redeeming a course prepaid code:', textStatus, errorThrown
application.tracker?.trackEvent 'Failed to redeem course prepaid code by url', status: textStatus
@state = 'unknown_error'
@stateMessage = "Failed to redeem code: #{xhr.responseText}"
@render?()
})
courseEnrollByModal: (prepaidCode) ->
@state = 'enrolling-by-modal'
@renderSelectors '.student-dialog-state-row'
$.ajax({
method: 'POST'
url: '/db/course_instance/-/redeem_prepaid'
data: prepaidCode: prepaidCode
context: @
success: ->
$('.continue-dialog').modal('hide')
@onRedeemPrepaidSuccess(arguments...)
error: (jqxhr, textStatus, errorThrown) ->
application.tracker?.trackEvent 'Failed to redeem course prepaid code by modal', status: textStatus
@state = 'unknown_error'
if jqxhr.status is 422
@stateMessage = 'Please enter a code.'
else if jqxhr.status is 404
@stateMessage = 'Code not found.'
else
@stateMessage = "#{jqxhr.responseText}"
@renderSelectors '.student-dialog-state-row'
})
onRedeemPrepaidSuccess: (data, textStatus, jqxhr) ->
prepaidID = data[0]?.prepaidID
application.tracker?.trackEvent 'Redeemed course prepaid code', {prepaidCode: prepaidID}
me.fetch(cache: false).always =>
if data?.length > 0 && data[0].courseID && data[0]._id
courseID = data[0].courseID
courseInstanceID = data[0]._id
route = "/courses/#{courseID}/#{courseInstanceID}"
viewArgs = [{}, courseID, courseInstanceID]
Backbone.Mediator.publish 'router:navigate',
route: route
viewClass: 'views/courses/CourseDetailsView'
viewArgs: viewArgs
else
@state = 'unknown_error'
@stateMessage = "Database error."
@render?()
| 50812 | app = require 'core/application'
AuthModal = require 'views/core/AuthModal'
CocoCollection = require 'collections/CocoCollection'
Course = require 'models/Course'
CourseInstance = require 'models/CourseInstance'
RootView = require 'views/core/RootView'
template = require 'templates/courses/courses'
utils = require 'core/utils'
# TODO: Hour of Code (HoC) integration is a mess
module.exports = class CoursesView extends RootView
id: 'courses-view'
template: template
events:
'click .btn-buy': 'onClickBuy'
'click .btn-enroll': 'onClickEnroll'
'click .btn-enter': 'onClickEnter'
'click .btn-hoc-student-continue': 'onClickHOCStudentContinue'
'click .btn-student': 'onClickStudent'
'click .btn-teacher': 'onClickTeacher'
constructor: (options) ->
super(options)
@setUpHourOfCode()
@praise = utils.getCoursePraise()
@studentMode = Backbone.history.getFragment()?.indexOf('courses/students') >= 0
@courses = new CocoCollection([], { url: "/db/course", model: Course})
@supermodel.loadCollection(@courses, 'courses')
@courseInstances = new CocoCollection([], { url: "/db/user/#{me.id}/course_instances", model: CourseInstance})
@listenToOnce @courseInstances, 'sync', @onCourseInstancesLoaded
@supermodel.loadCollection(@courseInstances, 'course_instances')
if prepaidCode = utils.getQueryVariable('_ppc', false)
if me.isAnonymous()
@state = 'ppc_logged_out'
else
@studentMode = true
@courseEnrollByURL(prepaidCode)
setUpHourOfCode: ->
# If we are coming in at /hoc, then we show the landing page.
# If we have ?hoc=true (for the step after the landing page), then we show any HoC-specific instructions.
# If we haven't tracked this player as an hourOfCode player yet, and it's a new account, we do that now.
@hocLandingPage = Backbone.history.getFragment()?.indexOf('hoc') >= 0
@hocMode = utils.getQueryVariable('hoc', false)
elapsed = new Date() - new Date(me.get('dateCreated'))
if not me.get('hourOfCode') and (@hocLandingPage or @hocMode) and elapsed < 5 * 60 * 1000
me.set('hourOfCode', true)
me.patch()
$('body').append($('<img src="https://code.org/api/hour/begin_codecombat.png" style="visibility: hidden;">'))
application.tracker?.trackEvent 'Hour of Code Begin'
if me.get('hourOfCode') and elapsed < 24 * 60 * 60 * 1000
@hocMode = true # If they really just arrived, make sure we're still in hocMode even if they lost ?hoc=true.
getRenderData: ->
context = super()
context.courses = @courses.models ? []
context.enrolledCourses = @enrolledCourses ? {}
context.hocLandingPage = @hocLandingPage
context.hocMode = @hocMode
context.instances = @courseInstances.models ? []
context.praise = @praise
context.state = @state
context.stateMessage = @stateMessage
context.studentMode = @studentMode
context
afterRender: ->
super()
@setupCoursesFAQPopover()
onCourseInstancesLoaded: ->
@enrolledCourses = {}
@enrolledCourses[courseInstance.get('courseID')] = true for courseInstance in @courseInstances.models
setupCoursesFAQPopover: ->
popoverTitle = "<h3>" + $.i18n.t('courses.faq') + "<button type='button' class='close' onclick='$('.courses-faq').popover('hide');'>×</button></h3>"
popoverContent = "<p><strong>" + $.i18n.t('courses.question') + "</strong> " + $.i18n.t('courses.question1') + "</p>"
popoverContent += "<p><strong>" + $.i18n.t('courses.answer') + "</strong> " + $.i18n.t('courses.answer1') + "</p>"
popoverContent += "<p>" + $.i18n.t('courses.answer2') + "</p>"
@$el.find('.courses-faq').popover(
animation: true
html: true
placement: 'top'
trigger: 'click'
title: popoverTitle
content: popoverContent
container: @$el
).on 'shown.bs.popover', =>
application.tracker?.trackEvent 'Subscription payment methods hover'
onClickBuy: (e) ->
$('.continue-dialog').modal('hide')
courseID = $(e.target).data('course-id')
route = "/courses/enroll/#{courseID}"
viewClass = require 'views/courses/CourseEnrollView'
viewArgs = [{}, courseID]
navigationEvent = route: route, viewClass: viewClass, viewArgs: viewArgs
Backbone.Mediator.publish 'router:navigate', navigationEvent
onClickEnroll: (e) ->
return @openModalView new AuthModal() if me.isAnonymous()
courseID = $(e.target).data('course-id')
prepaidCode = ($(".code-input[data-course-id=#{courseID}]").val() ? '').trim()
@courseEnrollByModal(prepaidCode)
onClickEnter: (e) ->
$('.continue-dialog').modal('hide')
courseID = $(e.target).data('course-id')
courseInstanceID = $(".select-session[data-course-id=#{courseID}]").val()
route = "/courses/#{courseID}/#{courseInstanceID}"
viewClass = require 'views/courses/CourseDetailsView'
viewArgs = [{}, courseID, courseInstanceID]
navigationEvent = route: route, viewClass: viewClass, viewArgs: viewArgs
Backbone.Mediator.publish 'router:navigate', navigationEvent
onClickHOCStudentContinue: (e) ->
$('.continue-dialog').modal('hide')
if e
courseID = $(e.target).data('course-id')
else
courseID = '560f1a9f22961295f9427742'
@state = 'enrolling'
@stateMessage = undefined
@render?()
# TODO: Copied from CourseEnrollView
data =
name: '<NAME>'
seats: 9999
courseID: courseID
hourOfCode: true
jqxhr = $.post('/db/course_instance/-/create', data)
jqxhr.done (data, textStatus, jqXHR) =>
application.tracker?.trackEvent 'Finished HoC student course creation', {courseID: courseID}
# TODO: handle fetch errors
me.fetch(cache: false).always =>
courseID = courseID
route = "/courses/#{courseID}"
viewArgs = [{}, courseID]
if data?.length > 0
courseInstanceID = data[0]._id
route += "/#{courseInstanceID}"
viewArgs[0].courseInstanceID = courseInstanceID
Backbone.Mediator.publish 'router:navigate',
route: route
viewClass: 'views/courses/CourseDetailsView'
viewArgs: viewArgs
jqxhr.fail (xhr, textStatus, errorThrown) =>
console.error 'Got an error purchasing a course:', textStatus, errorThrown
application.tracker?.trackEvent 'Failed HoC student course creation', status: textStatus
if xhr.status is 402
@state = 'declined'
@stateMessage = arguments[2]
else
@state = 'unknown_error'
@stateMessage = "#{xhr.status}: #{xhr.responseText}"
@render?()
onClickStudent: (e) ->
if @supermodel.finished() and @hocLandingPage
# Automatically enroll in first course
@onClickHOCStudentContinue()
return
route = "/courses/students"
route += "?hoc=true" if @hocLandingPage or @hocMode
viewClass = require 'views/courses/CoursesView'
navigationEvent = route: route, viewClass: viewClass, viewArgs: []
Backbone.Mediator.publish 'router:navigate', navigationEvent
onClickTeacher: (e) ->
route = "/courses/teachers"
route += "?hoc=true" if @hocLandingPage or @hocMode
viewClass = require 'views/courses/CoursesView'
navigationEvent = route: route, viewClass: viewClass, viewArgs: []
Backbone.Mediator.publish 'router:navigate', navigationEvent
courseEnrollByURL: (prepaidCode) ->
@state = 'enrolling'
@render?()
$.ajax({
method: 'POST'
url: '/db/course_instance/-/redeem_prepaid'
data: prepaidCode: prepaidCode
context: @
success: @onRedeemPrepaidSuccess
error: (xhr, textStatus, errorThrown) ->
console.error 'Got an error redeeming a course prepaid code:', textStatus, errorThrown
application.tracker?.trackEvent 'Failed to redeem course prepaid code by url', status: textStatus
@state = 'unknown_error'
@stateMessage = "Failed to redeem code: #{xhr.responseText}"
@render?()
})
courseEnrollByModal: (prepaidCode) ->
@state = 'enrolling-by-modal'
@renderSelectors '.student-dialog-state-row'
$.ajax({
method: 'POST'
url: '/db/course_instance/-/redeem_prepaid'
data: prepaidCode: prepaidCode
context: @
success: ->
$('.continue-dialog').modal('hide')
@onRedeemPrepaidSuccess(arguments...)
error: (jqxhr, textStatus, errorThrown) ->
application.tracker?.trackEvent 'Failed to redeem course prepaid code by modal', status: textStatus
@state = 'unknown_error'
if jqxhr.status is 422
@stateMessage = 'Please enter a code.'
else if jqxhr.status is 404
@stateMessage = 'Code not found.'
else
@stateMessage = "#{jqxhr.responseText}"
@renderSelectors '.student-dialog-state-row'
})
onRedeemPrepaidSuccess: (data, textStatus, jqxhr) ->
prepaidID = data[0]?.prepaidID
application.tracker?.trackEvent 'Redeemed course prepaid code', {prepaidCode: prepaidID}
me.fetch(cache: false).always =>
if data?.length > 0 && data[0].courseID && data[0]._id
courseID = data[0].courseID
courseInstanceID = data[0]._id
route = "/courses/#{courseID}/#{courseInstanceID}"
viewArgs = [{}, courseID, courseInstanceID]
Backbone.Mediator.publish 'router:navigate',
route: route
viewClass: 'views/courses/CourseDetailsView'
viewArgs: viewArgs
else
@state = 'unknown_error'
@stateMessage = "Database error."
@render?()
| true | app = require 'core/application'
AuthModal = require 'views/core/AuthModal'
CocoCollection = require 'collections/CocoCollection'
Course = require 'models/Course'
CourseInstance = require 'models/CourseInstance'
RootView = require 'views/core/RootView'
template = require 'templates/courses/courses'
utils = require 'core/utils'
# TODO: Hour of Code (HoC) integration is a mess
module.exports = class CoursesView extends RootView
id: 'courses-view'
template: template
events:
'click .btn-buy': 'onClickBuy'
'click .btn-enroll': 'onClickEnroll'
'click .btn-enter': 'onClickEnter'
'click .btn-hoc-student-continue': 'onClickHOCStudentContinue'
'click .btn-student': 'onClickStudent'
'click .btn-teacher': 'onClickTeacher'
constructor: (options) ->
super(options)
@setUpHourOfCode()
@praise = utils.getCoursePraise()
@studentMode = Backbone.history.getFragment()?.indexOf('courses/students') >= 0
@courses = new CocoCollection([], { url: "/db/course", model: Course})
@supermodel.loadCollection(@courses, 'courses')
@courseInstances = new CocoCollection([], { url: "/db/user/#{me.id}/course_instances", model: CourseInstance})
@listenToOnce @courseInstances, 'sync', @onCourseInstancesLoaded
@supermodel.loadCollection(@courseInstances, 'course_instances')
if prepaidCode = utils.getQueryVariable('_ppc', false)
if me.isAnonymous()
@state = 'ppc_logged_out'
else
@studentMode = true
@courseEnrollByURL(prepaidCode)
setUpHourOfCode: ->
# If we are coming in at /hoc, then we show the landing page.
# If we have ?hoc=true (for the step after the landing page), then we show any HoC-specific instructions.
# If we haven't tracked this player as an hourOfCode player yet, and it's a new account, we do that now.
@hocLandingPage = Backbone.history.getFragment()?.indexOf('hoc') >= 0
@hocMode = utils.getQueryVariable('hoc', false)
elapsed = new Date() - new Date(me.get('dateCreated'))
if not me.get('hourOfCode') and (@hocLandingPage or @hocMode) and elapsed < 5 * 60 * 1000
me.set('hourOfCode', true)
me.patch()
$('body').append($('<img src="https://code.org/api/hour/begin_codecombat.png" style="visibility: hidden;">'))
application.tracker?.trackEvent 'Hour of Code Begin'
if me.get('hourOfCode') and elapsed < 24 * 60 * 60 * 1000
@hocMode = true # If they really just arrived, make sure we're still in hocMode even if they lost ?hoc=true.
getRenderData: ->
context = super()
context.courses = @courses.models ? []
context.enrolledCourses = @enrolledCourses ? {}
context.hocLandingPage = @hocLandingPage
context.hocMode = @hocMode
context.instances = @courseInstances.models ? []
context.praise = @praise
context.state = @state
context.stateMessage = @stateMessage
context.studentMode = @studentMode
context
afterRender: ->
super()
@setupCoursesFAQPopover()
onCourseInstancesLoaded: ->
@enrolledCourses = {}
@enrolledCourses[courseInstance.get('courseID')] = true for courseInstance in @courseInstances.models
setupCoursesFAQPopover: ->
popoverTitle = "<h3>" + $.i18n.t('courses.faq') + "<button type='button' class='close' onclick='$('.courses-faq').popover('hide');'>×</button></h3>"
popoverContent = "<p><strong>" + $.i18n.t('courses.question') + "</strong> " + $.i18n.t('courses.question1') + "</p>"
popoverContent += "<p><strong>" + $.i18n.t('courses.answer') + "</strong> " + $.i18n.t('courses.answer1') + "</p>"
popoverContent += "<p>" + $.i18n.t('courses.answer2') + "</p>"
@$el.find('.courses-faq').popover(
animation: true
html: true
placement: 'top'
trigger: 'click'
title: popoverTitle
content: popoverContent
container: @$el
).on 'shown.bs.popover', =>
application.tracker?.trackEvent 'Subscription payment methods hover'
onClickBuy: (e) ->
$('.continue-dialog').modal('hide')
courseID = $(e.target).data('course-id')
route = "/courses/enroll/#{courseID}"
viewClass = require 'views/courses/CourseEnrollView'
viewArgs = [{}, courseID]
navigationEvent = route: route, viewClass: viewClass, viewArgs: viewArgs
Backbone.Mediator.publish 'router:navigate', navigationEvent
onClickEnroll: (e) ->
return @openModalView new AuthModal() if me.isAnonymous()
courseID = $(e.target).data('course-id')
prepaidCode = ($(".code-input[data-course-id=#{courseID}]").val() ? '').trim()
@courseEnrollByModal(prepaidCode)
onClickEnter: (e) ->
$('.continue-dialog').modal('hide')
courseID = $(e.target).data('course-id')
courseInstanceID = $(".select-session[data-course-id=#{courseID}]").val()
route = "/courses/#{courseID}/#{courseInstanceID}"
viewClass = require 'views/courses/CourseDetailsView'
viewArgs = [{}, courseID, courseInstanceID]
navigationEvent = route: route, viewClass: viewClass, viewArgs: viewArgs
Backbone.Mediator.publish 'router:navigate', navigationEvent
onClickHOCStudentContinue: (e) ->
$('.continue-dialog').modal('hide')
if e
courseID = $(e.target).data('course-id')
else
courseID = '560f1a9f22961295f9427742'
@state = 'enrolling'
@stateMessage = undefined
@render?()
# TODO: Copied from CourseEnrollView
data =
name: 'PI:NAME:<NAME>END_PI'
seats: 9999
courseID: courseID
hourOfCode: true
jqxhr = $.post('/db/course_instance/-/create', data)
jqxhr.done (data, textStatus, jqXHR) =>
application.tracker?.trackEvent 'Finished HoC student course creation', {courseID: courseID}
# TODO: handle fetch errors
me.fetch(cache: false).always =>
courseID = courseID
route = "/courses/#{courseID}"
viewArgs = [{}, courseID]
if data?.length > 0
courseInstanceID = data[0]._id
route += "/#{courseInstanceID}"
viewArgs[0].courseInstanceID = courseInstanceID
Backbone.Mediator.publish 'router:navigate',
route: route
viewClass: 'views/courses/CourseDetailsView'
viewArgs: viewArgs
jqxhr.fail (xhr, textStatus, errorThrown) =>
console.error 'Got an error purchasing a course:', textStatus, errorThrown
application.tracker?.trackEvent 'Failed HoC student course creation', status: textStatus
if xhr.status is 402
@state = 'declined'
@stateMessage = arguments[2]
else
@state = 'unknown_error'
@stateMessage = "#{xhr.status}: #{xhr.responseText}"
@render?()
onClickStudent: (e) ->
if @supermodel.finished() and @hocLandingPage
# Automatically enroll in first course
@onClickHOCStudentContinue()
return
route = "/courses/students"
route += "?hoc=true" if @hocLandingPage or @hocMode
viewClass = require 'views/courses/CoursesView'
navigationEvent = route: route, viewClass: viewClass, viewArgs: []
Backbone.Mediator.publish 'router:navigate', navigationEvent
onClickTeacher: (e) ->
route = "/courses/teachers"
route += "?hoc=true" if @hocLandingPage or @hocMode
viewClass = require 'views/courses/CoursesView'
navigationEvent = route: route, viewClass: viewClass, viewArgs: []
Backbone.Mediator.publish 'router:navigate', navigationEvent
courseEnrollByURL: (prepaidCode) ->
@state = 'enrolling'
@render?()
$.ajax({
method: 'POST'
url: '/db/course_instance/-/redeem_prepaid'
data: prepaidCode: prepaidCode
context: @
success: @onRedeemPrepaidSuccess
error: (xhr, textStatus, errorThrown) ->
console.error 'Got an error redeeming a course prepaid code:', textStatus, errorThrown
application.tracker?.trackEvent 'Failed to redeem course prepaid code by url', status: textStatus
@state = 'unknown_error'
@stateMessage = "Failed to redeem code: #{xhr.responseText}"
@render?()
})
courseEnrollByModal: (prepaidCode) ->
@state = 'enrolling-by-modal'
@renderSelectors '.student-dialog-state-row'
$.ajax({
method: 'POST'
url: '/db/course_instance/-/redeem_prepaid'
data: prepaidCode: prepaidCode
context: @
success: ->
$('.continue-dialog').modal('hide')
@onRedeemPrepaidSuccess(arguments...)
error: (jqxhr, textStatus, errorThrown) ->
application.tracker?.trackEvent 'Failed to redeem course prepaid code by modal', status: textStatus
@state = 'unknown_error'
if jqxhr.status is 422
@stateMessage = 'Please enter a code.'
else if jqxhr.status is 404
@stateMessage = 'Code not found.'
else
@stateMessage = "#{jqxhr.responseText}"
@renderSelectors '.student-dialog-state-row'
})
onRedeemPrepaidSuccess: (data, textStatus, jqxhr) ->
prepaidID = data[0]?.prepaidID
application.tracker?.trackEvent 'Redeemed course prepaid code', {prepaidCode: prepaidID}
me.fetch(cache: false).always =>
if data?.length > 0 && data[0].courseID && data[0]._id
courseID = data[0].courseID
courseInstanceID = data[0]._id
route = "/courses/#{courseID}/#{courseInstanceID}"
viewArgs = [{}, courseID, courseInstanceID]
Backbone.Mediator.publish 'router:navigate',
route: route
viewClass: 'views/courses/CourseDetailsView'
viewArgs: viewArgs
else
@state = 'unknown_error'
@stateMessage = "Database error."
@render?()
|
[
{
"context": "<Task {...@props} workflow={@state.workflow} key={id} plumbId={id} taskKey={id} taskNumber={idx} initi",
"end": 6807,
"score": 0.5269137620925903,
"start": 6805,
"tag": "KEY",
"value": "id"
}
] | app/pages/lab/workflow-viewer/workflow.cjsx | Crentist/Panoptes-frontend-spanish | 1 | React = require 'react'
ReactDOM = require 'react-dom'
{Task, StartEndNode} = require './task.cjsx'
DETACHABLE = false
module.exports = React.createClass
displayName: 'WorkflowNodes'
getInitialState: ->
{initialTask, keys, workflow, taskStateSet} = @getWorkflow()
position = {}
#if @props.workflow.metadata?.task_positions?
# pos = clone(@props.workflow.metadata.task_positions)
taskStateSet: taskStateSet
initialTask: initialTask
uuid: keys.length
uuids: keys
workflow: workflow
position: position
allSet: false
nextTask: {}
previousTask: {}
appendToDict: (dict, key, value) ->
dict[key] ?= {}
dict[key][value] = value
addConnection: (c, id, nextTask, previousTask) ->
@props.jp.connect({uuids: c, detachable: DETACHABLE})
@appendToDict(nextTask, id, c[1])
@appendToDict(previousTask, c[1], id)
componentDidMount: ->
nextTask = {}
previousTask = {}
# Once all nodes are drawn connect them up correctly
for id in @state.uuids
if id == @state.initialTask
c = ['start', id]
@props.jp.connect({uuids: c, detachable: DETACHABLE})
task = @state.workflow[id]
switch task.type
when 'single'
if task.subtask
if task.next?
c = ["#{id}_next", task.next]
@addConnection(c, id, nextTask, previousTask)
else
for a, adx in task.answers
c = ["#{id}_answer_#{adx}", a.next ? 'end']
@addConnection(c, id, nextTask, previousTask)
when 'multiple'
if task.subtask
if task.next?
c = ["#{id}_next", task.next]
@addConnection(c, id, nextTask, previousTask)
else
c = ["#{id}_next", task.next ? 'end']
@addConnection(c, id, nextTask, previousTask)
when 'drawing'
c = ["#{id}_next", task.next ? 'end']
@addConnection(c, id, nextTask, previousTask)
for a, adx in task.tools
if a.details[0]?
c = ["#{id}_answer_#{adx}", a.details[0]]
@addConnection(c, id, nextTask, previousTask)
else
c=["#{id}_next", task.next ? 'end']
@addConnection(c, id, nextTask, previousTask)
nextTask[id] ?= {'end': 'end'}
@setState({nextTask: nextTask, previousTask: previousTask})
#@sortTasks(connections)
getUuid: (idx, uuid = @state.uuid, uuids = @state.uuids) ->
if uuids[idx]?
return uuids[idx]
else
return "T#{uuid}"
setUuid: (id) ->
current_uuids = @state.uuids.concat([id])
current_uuid = @state.uuid + 1
@setState({uuids: current_uuids, uuid: current_uuid})
getWorkflow: ->
# format workflow so 'init' is not a key and sub-tasks are their own tasks
keys = []
taskStateSet = {}
workflow = {}
initialTask = @props.workflow.first_task
#L = Object.keys(@props.workflow.tasks).length
L = 0
for k, v of @props.workflow.tasks
if k == 'init'
# find a new name for 'init' (lowest task number not in use)
ct = 0
tmp = 'T' + ct
while tmp of @props.workflow.tasks
ct += 1
tmp = 'T' + ct
k = tmp
if initialTask == 'init'
initialTask = k
keys.push(k)
taskStateSet[k] = false
# clone the workflow so it does not overwrite the original (will update the API after re-formatting)
workflow[k] = clone(v)
workflow[k].subTask = false
if v.type == 'drawing'
for tool, tdx in v.tools
subList = []
# make subtasks into their own tasks (use lowest task number not in use)
for st, sdx in tool.details
tmp = 'S' + L
subList.push(tmp)
keys.push(tmp)
workflow[tmp] = clone(st)
workflow[tmp].subtask = true
L += 1
# set 'next' to be the next subtask in the list (if it exists)
if tool.details[sdx + 1]?
workflow[tmp].next = 'S' + L
# replace details with a list of keys
workflow[k].tools[tdx].details = subList
# make sure the keys are in number order
keys.sort()
{initialTask, keys, workflow, taskStateSet}
doSort: (task_map, t, D) ->
for i of task_map[t]
msg = i + ' ' + D[i] + ' ,' + D[t]
if (i not of D) or (D[i] <= D[t])
D[i] = D[t] + 1
@doSort(task_map, i, D)
sortTasks: ->
start = @state.initialTask
D = {}
D[@state.initialTask] = 0
@doSort(@state.nextTask, start, D)
levels = {}
for k,v of D
if levels[v]?
levels[v].push(k)
else
levels[v] = [k]
posX = 150
w = 0
for i in [0...Object.keys(levels).length]
max_width = 0
posY = 20
for t in levels[i]
# move the task div
if @state.previousTask[t]?
N = 0
if @state.workflow[t]?.subtask and not @state.workflow[Object.keys(@state.previousTask[t])[0]].subtask
previousY = 65
else
previousY = 0
for pt of @state.previousTask[t]
previousY += parseFloat(ReactDOM.findDOMNode(@refs[pt]).style.top)
N += 1
previousY /= N
if previousY > posY
posY = previousY
@refs[t].moveMe({left: posX, top: posY})
# calculate new y position
DOMNode = ReactDOM.findDOMNode(@refs[t])
posY += DOMNode.offsetHeight + 40
# calculate next x position
w = DOMNode.offsetWidth
if w > max_width
max_width = w
posX += 70 + max_width
taskMove: (id) ->
if @state.allSet
position = @refs[id].state.style
currentPosition = @state.position
newPosition =
left: position.left
top: position.top
if position.width
newPosition.width = position.width
currentPosition[id] = newPosition
# TODO add code to save positions to workflow metadata (when this is allowed)
#change = {}
#change["metadata.task_positions.#{id}"] = new_pos
#@props.workflow.update(change)
#@props.workflow.save()
@setState({position: currentPosition})
setTaskState: (id) ->
if not @state.allSet
currentTaskState = @state.taskStateSet
currentTaskState[id] = true
@setState({taskStateSet: currentTaskState}, @allTasksSet)
allTasksSet: ->
s = true
for id, taskSet of @state.taskStateSet
s &= taskSet
if s and not @state.allSet
@setState({allSet: true}, @sortTasks)
createTask: (id, idx) ->
if @state.position[id]?
position = @state.position[id]
else
position =
left: 100 + 240*idx + 'px'
top: 0 + 'px'
<Task {...@props} workflow={@state.workflow} key={id} plumbId={id} taskKey={id} taskNumber={idx} initialPosition={position} ref={id} onMove={@taskMove} onDone={@setTaskState.bind(@, id)} />
render: ->
tasks = (@createTask(id, idx) for id,idx in @state.uuids)
<div id='editor' className='editor noselect'>
<StartEndNode jp={@props.jp} type='start' onMove={@taskMove} ref='start' key='start' />
<StartEndNode jp={@props.jp} type='end' onMove={@taskMove} ref='end' key='end' />
{tasks}
</div>
# A function to clone JSON object
clone = (obj) ->
return JSON.parse(JSON.stringify(obj))
| 159338 | React = require 'react'
ReactDOM = require 'react-dom'
{Task, StartEndNode} = require './task.cjsx'
DETACHABLE = false
module.exports = React.createClass
displayName: 'WorkflowNodes'
getInitialState: ->
{initialTask, keys, workflow, taskStateSet} = @getWorkflow()
position = {}
#if @props.workflow.metadata?.task_positions?
# pos = clone(@props.workflow.metadata.task_positions)
taskStateSet: taskStateSet
initialTask: initialTask
uuid: keys.length
uuids: keys
workflow: workflow
position: position
allSet: false
nextTask: {}
previousTask: {}
appendToDict: (dict, key, value) ->
dict[key] ?= {}
dict[key][value] = value
addConnection: (c, id, nextTask, previousTask) ->
@props.jp.connect({uuids: c, detachable: DETACHABLE})
@appendToDict(nextTask, id, c[1])
@appendToDict(previousTask, c[1], id)
componentDidMount: ->
nextTask = {}
previousTask = {}
# Once all nodes are drawn connect them up correctly
for id in @state.uuids
if id == @state.initialTask
c = ['start', id]
@props.jp.connect({uuids: c, detachable: DETACHABLE})
task = @state.workflow[id]
switch task.type
when 'single'
if task.subtask
if task.next?
c = ["#{id}_next", task.next]
@addConnection(c, id, nextTask, previousTask)
else
for a, adx in task.answers
c = ["#{id}_answer_#{adx}", a.next ? 'end']
@addConnection(c, id, nextTask, previousTask)
when 'multiple'
if task.subtask
if task.next?
c = ["#{id}_next", task.next]
@addConnection(c, id, nextTask, previousTask)
else
c = ["#{id}_next", task.next ? 'end']
@addConnection(c, id, nextTask, previousTask)
when 'drawing'
c = ["#{id}_next", task.next ? 'end']
@addConnection(c, id, nextTask, previousTask)
for a, adx in task.tools
if a.details[0]?
c = ["#{id}_answer_#{adx}", a.details[0]]
@addConnection(c, id, nextTask, previousTask)
else
c=["#{id}_next", task.next ? 'end']
@addConnection(c, id, nextTask, previousTask)
nextTask[id] ?= {'end': 'end'}
@setState({nextTask: nextTask, previousTask: previousTask})
#@sortTasks(connections)
getUuid: (idx, uuid = @state.uuid, uuids = @state.uuids) ->
if uuids[idx]?
return uuids[idx]
else
return "T#{uuid}"
setUuid: (id) ->
current_uuids = @state.uuids.concat([id])
current_uuid = @state.uuid + 1
@setState({uuids: current_uuids, uuid: current_uuid})
getWorkflow: ->
# format workflow so 'init' is not a key and sub-tasks are their own tasks
keys = []
taskStateSet = {}
workflow = {}
initialTask = @props.workflow.first_task
#L = Object.keys(@props.workflow.tasks).length
L = 0
for k, v of @props.workflow.tasks
if k == 'init'
# find a new name for 'init' (lowest task number not in use)
ct = 0
tmp = 'T' + ct
while tmp of @props.workflow.tasks
ct += 1
tmp = 'T' + ct
k = tmp
if initialTask == 'init'
initialTask = k
keys.push(k)
taskStateSet[k] = false
# clone the workflow so it does not overwrite the original (will update the API after re-formatting)
workflow[k] = clone(v)
workflow[k].subTask = false
if v.type == 'drawing'
for tool, tdx in v.tools
subList = []
# make subtasks into their own tasks (use lowest task number not in use)
for st, sdx in tool.details
tmp = 'S' + L
subList.push(tmp)
keys.push(tmp)
workflow[tmp] = clone(st)
workflow[tmp].subtask = true
L += 1
# set 'next' to be the next subtask in the list (if it exists)
if tool.details[sdx + 1]?
workflow[tmp].next = 'S' + L
# replace details with a list of keys
workflow[k].tools[tdx].details = subList
# make sure the keys are in number order
keys.sort()
{initialTask, keys, workflow, taskStateSet}
doSort: (task_map, t, D) ->
for i of task_map[t]
msg = i + ' ' + D[i] + ' ,' + D[t]
if (i not of D) or (D[i] <= D[t])
D[i] = D[t] + 1
@doSort(task_map, i, D)
sortTasks: ->
start = @state.initialTask
D = {}
D[@state.initialTask] = 0
@doSort(@state.nextTask, start, D)
levels = {}
for k,v of D
if levels[v]?
levels[v].push(k)
else
levels[v] = [k]
posX = 150
w = 0
for i in [0...Object.keys(levels).length]
max_width = 0
posY = 20
for t in levels[i]
# move the task div
if @state.previousTask[t]?
N = 0
if @state.workflow[t]?.subtask and not @state.workflow[Object.keys(@state.previousTask[t])[0]].subtask
previousY = 65
else
previousY = 0
for pt of @state.previousTask[t]
previousY += parseFloat(ReactDOM.findDOMNode(@refs[pt]).style.top)
N += 1
previousY /= N
if previousY > posY
posY = previousY
@refs[t].moveMe({left: posX, top: posY})
# calculate new y position
DOMNode = ReactDOM.findDOMNode(@refs[t])
posY += DOMNode.offsetHeight + 40
# calculate next x position
w = DOMNode.offsetWidth
if w > max_width
max_width = w
posX += 70 + max_width
taskMove: (id) ->
if @state.allSet
position = @refs[id].state.style
currentPosition = @state.position
newPosition =
left: position.left
top: position.top
if position.width
newPosition.width = position.width
currentPosition[id] = newPosition
# TODO add code to save positions to workflow metadata (when this is allowed)
#change = {}
#change["metadata.task_positions.#{id}"] = new_pos
#@props.workflow.update(change)
#@props.workflow.save()
@setState({position: currentPosition})
setTaskState: (id) ->
if not @state.allSet
currentTaskState = @state.taskStateSet
currentTaskState[id] = true
@setState({taskStateSet: currentTaskState}, @allTasksSet)
allTasksSet: ->
s = true
for id, taskSet of @state.taskStateSet
s &= taskSet
if s and not @state.allSet
@setState({allSet: true}, @sortTasks)
createTask: (id, idx) ->
if @state.position[id]?
position = @state.position[id]
else
position =
left: 100 + 240*idx + 'px'
top: 0 + 'px'
<Task {...@props} workflow={@state.workflow} key={<KEY>} plumbId={id} taskKey={id} taskNumber={idx} initialPosition={position} ref={id} onMove={@taskMove} onDone={@setTaskState.bind(@, id)} />
render: ->
tasks = (@createTask(id, idx) for id,idx in @state.uuids)
<div id='editor' className='editor noselect'>
<StartEndNode jp={@props.jp} type='start' onMove={@taskMove} ref='start' key='start' />
<StartEndNode jp={@props.jp} type='end' onMove={@taskMove} ref='end' key='end' />
{tasks}
</div>
# A function to clone JSON object
clone = (obj) ->
return JSON.parse(JSON.stringify(obj))
| true | React = require 'react'
ReactDOM = require 'react-dom'
{Task, StartEndNode} = require './task.cjsx'
DETACHABLE = false
module.exports = React.createClass
displayName: 'WorkflowNodes'
getInitialState: ->
{initialTask, keys, workflow, taskStateSet} = @getWorkflow()
position = {}
#if @props.workflow.metadata?.task_positions?
# pos = clone(@props.workflow.metadata.task_positions)
taskStateSet: taskStateSet
initialTask: initialTask
uuid: keys.length
uuids: keys
workflow: workflow
position: position
allSet: false
nextTask: {}
previousTask: {}
appendToDict: (dict, key, value) ->
dict[key] ?= {}
dict[key][value] = value
addConnection: (c, id, nextTask, previousTask) ->
@props.jp.connect({uuids: c, detachable: DETACHABLE})
@appendToDict(nextTask, id, c[1])
@appendToDict(previousTask, c[1], id)
componentDidMount: ->
nextTask = {}
previousTask = {}
# Once all nodes are drawn connect them up correctly
for id in @state.uuids
if id == @state.initialTask
c = ['start', id]
@props.jp.connect({uuids: c, detachable: DETACHABLE})
task = @state.workflow[id]
switch task.type
when 'single'
if task.subtask
if task.next?
c = ["#{id}_next", task.next]
@addConnection(c, id, nextTask, previousTask)
else
for a, adx in task.answers
c = ["#{id}_answer_#{adx}", a.next ? 'end']
@addConnection(c, id, nextTask, previousTask)
when 'multiple'
if task.subtask
if task.next?
c = ["#{id}_next", task.next]
@addConnection(c, id, nextTask, previousTask)
else
c = ["#{id}_next", task.next ? 'end']
@addConnection(c, id, nextTask, previousTask)
when 'drawing'
c = ["#{id}_next", task.next ? 'end']
@addConnection(c, id, nextTask, previousTask)
for a, adx in task.tools
if a.details[0]?
c = ["#{id}_answer_#{adx}", a.details[0]]
@addConnection(c, id, nextTask, previousTask)
else
c=["#{id}_next", task.next ? 'end']
@addConnection(c, id, nextTask, previousTask)
nextTask[id] ?= {'end': 'end'}
@setState({nextTask: nextTask, previousTask: previousTask})
#@sortTasks(connections)
getUuid: (idx, uuid = @state.uuid, uuids = @state.uuids) ->
if uuids[idx]?
return uuids[idx]
else
return "T#{uuid}"
setUuid: (id) ->
current_uuids = @state.uuids.concat([id])
current_uuid = @state.uuid + 1
@setState({uuids: current_uuids, uuid: current_uuid})
getWorkflow: ->
# format workflow so 'init' is not a key and sub-tasks are their own tasks
keys = []
taskStateSet = {}
workflow = {}
initialTask = @props.workflow.first_task
#L = Object.keys(@props.workflow.tasks).length
L = 0
for k, v of @props.workflow.tasks
if k == 'init'
# find a new name for 'init' (lowest task number not in use)
ct = 0
tmp = 'T' + ct
while tmp of @props.workflow.tasks
ct += 1
tmp = 'T' + ct
k = tmp
if initialTask == 'init'
initialTask = k
keys.push(k)
taskStateSet[k] = false
# clone the workflow so it does not overwrite the original (will update the API after re-formatting)
workflow[k] = clone(v)
workflow[k].subTask = false
if v.type == 'drawing'
for tool, tdx in v.tools
subList = []
# make subtasks into their own tasks (use lowest task number not in use)
for st, sdx in tool.details
tmp = 'S' + L
subList.push(tmp)
keys.push(tmp)
workflow[tmp] = clone(st)
workflow[tmp].subtask = true
L += 1
# set 'next' to be the next subtask in the list (if it exists)
if tool.details[sdx + 1]?
workflow[tmp].next = 'S' + L
# replace details with a list of keys
workflow[k].tools[tdx].details = subList
# make sure the keys are in number order
keys.sort()
{initialTask, keys, workflow, taskStateSet}
doSort: (task_map, t, D) ->
for i of task_map[t]
msg = i + ' ' + D[i] + ' ,' + D[t]
if (i not of D) or (D[i] <= D[t])
D[i] = D[t] + 1
@doSort(task_map, i, D)
sortTasks: ->
start = @state.initialTask
D = {}
D[@state.initialTask] = 0
@doSort(@state.nextTask, start, D)
levels = {}
for k,v of D
if levels[v]?
levels[v].push(k)
else
levels[v] = [k]
posX = 150
w = 0
for i in [0...Object.keys(levels).length]
max_width = 0
posY = 20
for t in levels[i]
# move the task div
if @state.previousTask[t]?
N = 0
if @state.workflow[t]?.subtask and not @state.workflow[Object.keys(@state.previousTask[t])[0]].subtask
previousY = 65
else
previousY = 0
for pt of @state.previousTask[t]
previousY += parseFloat(ReactDOM.findDOMNode(@refs[pt]).style.top)
N += 1
previousY /= N
if previousY > posY
posY = previousY
@refs[t].moveMe({left: posX, top: posY})
# calculate new y position
DOMNode = ReactDOM.findDOMNode(@refs[t])
posY += DOMNode.offsetHeight + 40
# calculate next x position
w = DOMNode.offsetWidth
if w > max_width
max_width = w
posX += 70 + max_width
taskMove: (id) ->
if @state.allSet
position = @refs[id].state.style
currentPosition = @state.position
newPosition =
left: position.left
top: position.top
if position.width
newPosition.width = position.width
currentPosition[id] = newPosition
# TODO add code to save positions to workflow metadata (when this is allowed)
#change = {}
#change["metadata.task_positions.#{id}"] = new_pos
#@props.workflow.update(change)
#@props.workflow.save()
@setState({position: currentPosition})
setTaskState: (id) ->
if not @state.allSet
currentTaskState = @state.taskStateSet
currentTaskState[id] = true
@setState({taskStateSet: currentTaskState}, @allTasksSet)
allTasksSet: ->
s = true
for id, taskSet of @state.taskStateSet
s &= taskSet
if s and not @state.allSet
@setState({allSet: true}, @sortTasks)
createTask: (id, idx) ->
if @state.position[id]?
position = @state.position[id]
else
position =
left: 100 + 240*idx + 'px'
top: 0 + 'px'
<Task {...@props} workflow={@state.workflow} key={PI:KEY:<KEY>END_PI} plumbId={id} taskKey={id} taskNumber={idx} initialPosition={position} ref={id} onMove={@taskMove} onDone={@setTaskState.bind(@, id)} />
render: ->
tasks = (@createTask(id, idx) for id,idx in @state.uuids)
<div id='editor' className='editor noselect'>
<StartEndNode jp={@props.jp} type='start' onMove={@taskMove} ref='start' key='start' />
<StartEndNode jp={@props.jp} type='end' onMove={@taskMove} ref='end' key='end' />
{tasks}
</div>
# A function to clone JSON object
clone = (obj) ->
return JSON.parse(JSON.stringify(obj))
|
[
{
"context": "e COOLSTRAP.Util\n * @class Platform\n * \n * @author Abraham Barrera <abarrerac@gmail.com> || @abraham_barrera\n * Insp",
"end": 118,
"score": 0.9998939633369446,
"start": 103,
"tag": "NAME",
"value": "Abraham Barrera"
},
{
"context": "* @class Platform\n * \n * @autho... | coolstrap-core/app/assets/javascripts/coolstrap/util/_Coolstrap.Util.Platform.coffee | cristianferrarig/coolstrap | 0 | ###
* Coolstrap Platform functions
*
* @namespace COOLSTRAP.Util
* @class Platform
*
* @author Abraham Barrera <abarrerac@gmail.com> || @abraham_barrera
* Inspired by LungoJS
###
COOLSTRAP.Util.Platform = ((cool) ->
SUPPORTED_OS =
android: /(Android)\s+([\d.]+)/
ipad: /(iPad).*OS\s([\d_]+)/
iphone: /(iPhone\sOS)\s([\d_]+)/
blackberry: /(BlackBerry).*Version\/([\d.]+)/
webos: /(webOS|hpwOS)[\s\/]([\d.]+)/
windows: /(Windows Phone OS)[\s\/]([\d.]+)/
_current_environment = null
_detectBrowser = (user_agent) ->
is_webkit = user_agent.match(/WebKit\/([\d.]+)/)
(if (is_webkit) then is_webkit[0] else user_agent)
_detectOS = (user_agent) ->
detected_os = undefined
for os of SUPPORTED_OS
supported = user_agent.match(SUPPORTED_OS[os])
if supported
detected_os =
name: (if (os is "iphone" or os is "ipad") then "ios" else os)
version: supported[2].replace("_", ".")
break
detected_os
_detectEnvironment = ->
ua = navigator.userAgent
environment = {}
environment.browser = _detectBrowser(ua)
environment.os = _detectOS(ua)
environment.isMobile = (if (environment.os) then true else false)
environment.screen =
width: window.innerWidth
height: window.innerHeight
environment
###
* Determine if the current environment is a mobile environment
*
* @method isMobile
*
* @return {boolean} true if is mobile environment, false if not.
###
isMobile = ->
_current_environment = _current_environment or _detectEnvironment()
_current_environment.isMobile
###
* Get from current environment
*
* Inspired by LungoJS
*
* @method environment
###
environment = (reload_environment) ->
_current_environment = (if reload_environment then _detectEnvironment() else _current_environment or _detectEnvironment())
_current_environment
###
* Reload current environment
*
* @method reloadEnvironment
###
reloadEnvironment = ->
environment true
###
* Detect if browser is online
*
* @method isOnline
###
isOnline = ->
navigator.onLine
isMobile: isMobile
environment: environment
isOnline: isOnline
reloadEnvironment: reloadEnvironment
)(COOLSTRAP) | 164372 | ###
* Coolstrap Platform functions
*
* @namespace COOLSTRAP.Util
* @class Platform
*
* @author <NAME> <<EMAIL>> || @abraham_barrera
* Inspired by LungoJS
###
COOLSTRAP.Util.Platform = ((cool) ->
SUPPORTED_OS =
android: /(Android)\s+([\d.]+)/
ipad: /(iPad).*OS\s([\d_]+)/
iphone: /(iPhone\sOS)\s([\d_]+)/
blackberry: /(BlackBerry).*Version\/([\d.]+)/
webos: /(webOS|hpwOS)[\s\/]([\d.]+)/
windows: /(Windows Phone OS)[\s\/]([\d.]+)/
_current_environment = null
_detectBrowser = (user_agent) ->
is_webkit = user_agent.match(/WebKit\/([\d.]+)/)
(if (is_webkit) then is_webkit[0] else user_agent)
_detectOS = (user_agent) ->
detected_os = undefined
for os of SUPPORTED_OS
supported = user_agent.match(SUPPORTED_OS[os])
if supported
detected_os =
name: (if (os is "iphone" or os is "ipad") then "ios" else os)
version: supported[2].replace("_", ".")
break
detected_os
_detectEnvironment = ->
ua = navigator.userAgent
environment = {}
environment.browser = _detectBrowser(ua)
environment.os = _detectOS(ua)
environment.isMobile = (if (environment.os) then true else false)
environment.screen =
width: window.innerWidth
height: window.innerHeight
environment
###
* Determine if the current environment is a mobile environment
*
* @method isMobile
*
* @return {boolean} true if is mobile environment, false if not.
###
isMobile = ->
_current_environment = _current_environment or _detectEnvironment()
_current_environment.isMobile
###
* Get from current environment
*
* Inspired by LungoJS
*
* @method environment
###
environment = (reload_environment) ->
_current_environment = (if reload_environment then _detectEnvironment() else _current_environment or _detectEnvironment())
_current_environment
###
* Reload current environment
*
* @method reloadEnvironment
###
reloadEnvironment = ->
environment true
###
* Detect if browser is online
*
* @method isOnline
###
isOnline = ->
navigator.onLine
isMobile: isMobile
environment: environment
isOnline: isOnline
reloadEnvironment: reloadEnvironment
)(COOLSTRAP) | true | ###
* Coolstrap Platform functions
*
* @namespace COOLSTRAP.Util
* @class Platform
*
* @author PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI> || @abraham_barrera
* Inspired by LungoJS
###
COOLSTRAP.Util.Platform = ((cool) ->
SUPPORTED_OS =
android: /(Android)\s+([\d.]+)/
ipad: /(iPad).*OS\s([\d_]+)/
iphone: /(iPhone\sOS)\s([\d_]+)/
blackberry: /(BlackBerry).*Version\/([\d.]+)/
webos: /(webOS|hpwOS)[\s\/]([\d.]+)/
windows: /(Windows Phone OS)[\s\/]([\d.]+)/
_current_environment = null
_detectBrowser = (user_agent) ->
is_webkit = user_agent.match(/WebKit\/([\d.]+)/)
(if (is_webkit) then is_webkit[0] else user_agent)
_detectOS = (user_agent) ->
detected_os = undefined
for os of SUPPORTED_OS
supported = user_agent.match(SUPPORTED_OS[os])
if supported
detected_os =
name: (if (os is "iphone" or os is "ipad") then "ios" else os)
version: supported[2].replace("_", ".")
break
detected_os
_detectEnvironment = ->
ua = navigator.userAgent
environment = {}
environment.browser = _detectBrowser(ua)
environment.os = _detectOS(ua)
environment.isMobile = (if (environment.os) then true else false)
environment.screen =
width: window.innerWidth
height: window.innerHeight
environment
###
* Determine if the current environment is a mobile environment
*
* @method isMobile
*
* @return {boolean} true if is mobile environment, false if not.
###
isMobile = ->
_current_environment = _current_environment or _detectEnvironment()
_current_environment.isMobile
###
* Get from current environment
*
* Inspired by LungoJS
*
* @method environment
###
environment = (reload_environment) ->
_current_environment = (if reload_environment then _detectEnvironment() else _current_environment or _detectEnvironment())
_current_environment
###
* Reload current environment
*
* @method reloadEnvironment
###
reloadEnvironment = ->
environment true
###
* Detect if browser is online
*
* @method isOnline
###
isOnline = ->
navigator.onLine
isMobile: isMobile
environment: environment
isOnline: isOnline
reloadEnvironment: reloadEnvironment
)(COOLSTRAP) |
[
{
"context": " 1.0.0\n@file RestDelete.js\n@author Welington Sampaio (http://welington.zaez.net/)\n@contact http://",
"end": 143,
"score": 0.9998928308486938,
"start": 126,
"tag": "NAME",
"value": "Welington Sampaio"
}
] | vendor/assets/javascripts/joker/RestDelete.coffee | zaeznet/joker-rails | 0 | ###
@summary Joker
@description Framework of RIAs applications
@version 1.0.0
@file RestDelete.js
@author Welington Sampaio (http://welington.zaez.net/)
@contact http://jokerjs.zaez.net/contato
@copyright Copyright 2013 Zaez Solucoes em Tecnologia, all rights reserved.
This source file is free software, under the license MIT, available at:
http://jokerjs.zaez.net/license
This source file is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the license files for details.
For details please refer to: http://jokerjs.zaez.net
###
###
###
class Joker.RestDelete extends Joker.Core
modal: undefined
overlay: undefined
params: undefined
constructor: ->
super
@setEvents()
###
Faz a requisição para excluir um
registro do Banco de dados
@param jQueryEvent event
@param Joker.Modal modal
###
ajaxDelete: (event, modal)->
el = event.currentTarget
new Joker.Ajax
url: el.href
method: "DELETE"
callbacks:
error: (error)->
modal.destroy()
console.log error
new Joker.Alert
type: Joker.Alert.TYPE_ERROR
message: Joker.I18n.t('joker.restDelete.error.ajax_delete')
success: =>
modal.destroy()
@libSupport(document.getElementById el.dataset.reference ).remove()
new Joker.Alert
type: Joker.Alert.TYPE_SUCCESS
message: Joker.I18n.t('joker.restDelete.successf.ajax_delete')
###
Responsavel por criar os elementos do
modal, sendo que cada um tem sua funcao
###
createModal: (e)->
new Joker.Ajax
url: e.currentTarget.href
callbacks:
error: (error)->
console.log error
new Joker.Alert
type: Joker.Alert.TYPE_ERROR
message: Joker.I18n.t('joker.restDelete.error.create_modal')
success: (data)=>
@debug 'Gerando o Modal de confirmação, com a configuração: ', data
modal = new Joker.Modal
title: e.currentTarget.dataset.title
content: @generateContent(data)
callbacks:
beforeCreate: (modal)=>
modal.modal.find('.btn-danger').on 'click', =>
@ajaxDelete(e, modal)
modal.modal.find('.btn-info').on 'click', @libSupport.proxy(modal.destroy, modal)
false
generateContent: (data)->
itens = []
for key, value of data
if Object.isString(value) or Object.isNumber(value)
itens.add @accessor('patternDestroyItens').assign
name: key.capitalize()
value: value
itensString = ''
itens.forEach (item)-> itensString += item
@debug itensString
@accessor('patternDestroyContainer').assign itens: itensString
###
Sets all events from the elements
###
setEvents: ->
@debug "Setando os eventos"
@libSupport(document).on 'click.modal', @accessor('defaultSelector'), @libSupport.proxy(@createModal, @)
###
Removendo todos os eventos do RestDelete
###
unsetEvents: ->
@debug 'Removendo os eventos'
@libSupport(document).off '.restdelete'
@debugPrefix: "Joker_RestDelete"
@className : "Joker_RestDelete"
@defaultSelector: "[data-destroy]"
@patternDestroyContainer: """
<dl>
{itens}
</dl>
<div class="row-fluid">
<div class="span6">
<button class="btn btn-danger btn-block">excluir</button>
</div>
<div class="span6">
<button class="btn btn-info btn-block">cancelar</button>
</div>
</div>
"""
@patternDestroyItens: """
<dt>{name}:</dt>
<dd>{value}</dd>
"""
###
@type [Joker.RestDelete]
###
@instance : undefined
###
Retorna a variavel unica para a instacia do objeto
@returns [Joker.RestDelete]
###
@getInstance: ->
Joker.RestDelete.instance = new Joker.RestDelete() unless Joker.RestDelete.instance?
Joker.RestDelete.instance | 215003 | ###
@summary Joker
@description Framework of RIAs applications
@version 1.0.0
@file RestDelete.js
@author <NAME> (http://welington.zaez.net/)
@contact http://jokerjs.zaez.net/contato
@copyright Copyright 2013 Zaez Solucoes em Tecnologia, all rights reserved.
This source file is free software, under the license MIT, available at:
http://jokerjs.zaez.net/license
This source file is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the license files for details.
For details please refer to: http://jokerjs.zaez.net
###
###
###
class Joker.RestDelete extends Joker.Core
modal: undefined
overlay: undefined
params: undefined
constructor: ->
super
@setEvents()
###
Faz a requisição para excluir um
registro do Banco de dados
@param jQueryEvent event
@param Joker.Modal modal
###
ajaxDelete: (event, modal)->
el = event.currentTarget
new Joker.Ajax
url: el.href
method: "DELETE"
callbacks:
error: (error)->
modal.destroy()
console.log error
new Joker.Alert
type: Joker.Alert.TYPE_ERROR
message: Joker.I18n.t('joker.restDelete.error.ajax_delete')
success: =>
modal.destroy()
@libSupport(document.getElementById el.dataset.reference ).remove()
new Joker.Alert
type: Joker.Alert.TYPE_SUCCESS
message: Joker.I18n.t('joker.restDelete.successf.ajax_delete')
###
Responsavel por criar os elementos do
modal, sendo que cada um tem sua funcao
###
createModal: (e)->
new Joker.Ajax
url: e.currentTarget.href
callbacks:
error: (error)->
console.log error
new Joker.Alert
type: Joker.Alert.TYPE_ERROR
message: Joker.I18n.t('joker.restDelete.error.create_modal')
success: (data)=>
@debug 'Gerando o Modal de confirmação, com a configuração: ', data
modal = new Joker.Modal
title: e.currentTarget.dataset.title
content: @generateContent(data)
callbacks:
beforeCreate: (modal)=>
modal.modal.find('.btn-danger').on 'click', =>
@ajaxDelete(e, modal)
modal.modal.find('.btn-info').on 'click', @libSupport.proxy(modal.destroy, modal)
false
generateContent: (data)->
itens = []
for key, value of data
if Object.isString(value) or Object.isNumber(value)
itens.add @accessor('patternDestroyItens').assign
name: key.capitalize()
value: value
itensString = ''
itens.forEach (item)-> itensString += item
@debug itensString
@accessor('patternDestroyContainer').assign itens: itensString
###
Sets all events from the elements
###
setEvents: ->
@debug "Setando os eventos"
@libSupport(document).on 'click.modal', @accessor('defaultSelector'), @libSupport.proxy(@createModal, @)
###
Removendo todos os eventos do RestDelete
###
unsetEvents: ->
@debug 'Removendo os eventos'
@libSupport(document).off '.restdelete'
@debugPrefix: "Joker_RestDelete"
@className : "Joker_RestDelete"
@defaultSelector: "[data-destroy]"
@patternDestroyContainer: """
<dl>
{itens}
</dl>
<div class="row-fluid">
<div class="span6">
<button class="btn btn-danger btn-block">excluir</button>
</div>
<div class="span6">
<button class="btn btn-info btn-block">cancelar</button>
</div>
</div>
"""
@patternDestroyItens: """
<dt>{name}:</dt>
<dd>{value}</dd>
"""
###
@type [Joker.RestDelete]
###
@instance : undefined
###
Retorna a variavel unica para a instacia do objeto
@returns [Joker.RestDelete]
###
@getInstance: ->
Joker.RestDelete.instance = new Joker.RestDelete() unless Joker.RestDelete.instance?
Joker.RestDelete.instance | true | ###
@summary Joker
@description Framework of RIAs applications
@version 1.0.0
@file RestDelete.js
@author PI:NAME:<NAME>END_PI (http://welington.zaez.net/)
@contact http://jokerjs.zaez.net/contato
@copyright Copyright 2013 Zaez Solucoes em Tecnologia, all rights reserved.
This source file is free software, under the license MIT, available at:
http://jokerjs.zaez.net/license
This source file is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the license files for details.
For details please refer to: http://jokerjs.zaez.net
###
###
###
class Joker.RestDelete extends Joker.Core
modal: undefined
overlay: undefined
params: undefined
constructor: ->
super
@setEvents()
###
Faz a requisição para excluir um
registro do Banco de dados
@param jQueryEvent event
@param Joker.Modal modal
###
ajaxDelete: (event, modal)->
el = event.currentTarget
new Joker.Ajax
url: el.href
method: "DELETE"
callbacks:
error: (error)->
modal.destroy()
console.log error
new Joker.Alert
type: Joker.Alert.TYPE_ERROR
message: Joker.I18n.t('joker.restDelete.error.ajax_delete')
success: =>
modal.destroy()
@libSupport(document.getElementById el.dataset.reference ).remove()
new Joker.Alert
type: Joker.Alert.TYPE_SUCCESS
message: Joker.I18n.t('joker.restDelete.successf.ajax_delete')
###
Responsavel por criar os elementos do
modal, sendo que cada um tem sua funcao
###
createModal: (e)->
new Joker.Ajax
url: e.currentTarget.href
callbacks:
error: (error)->
console.log error
new Joker.Alert
type: Joker.Alert.TYPE_ERROR
message: Joker.I18n.t('joker.restDelete.error.create_modal')
success: (data)=>
@debug 'Gerando o Modal de confirmação, com a configuração: ', data
modal = new Joker.Modal
title: e.currentTarget.dataset.title
content: @generateContent(data)
callbacks:
beforeCreate: (modal)=>
modal.modal.find('.btn-danger').on 'click', =>
@ajaxDelete(e, modal)
modal.modal.find('.btn-info').on 'click', @libSupport.proxy(modal.destroy, modal)
false
generateContent: (data)->
itens = []
for key, value of data
if Object.isString(value) or Object.isNumber(value)
itens.add @accessor('patternDestroyItens').assign
name: key.capitalize()
value: value
itensString = ''
itens.forEach (item)-> itensString += item
@debug itensString
@accessor('patternDestroyContainer').assign itens: itensString
###
Sets all events from the elements
###
setEvents: ->
@debug "Setando os eventos"
@libSupport(document).on 'click.modal', @accessor('defaultSelector'), @libSupport.proxy(@createModal, @)
###
Removendo todos os eventos do RestDelete
###
unsetEvents: ->
@debug 'Removendo os eventos'
@libSupport(document).off '.restdelete'
@debugPrefix: "Joker_RestDelete"
@className : "Joker_RestDelete"
@defaultSelector: "[data-destroy]"
@patternDestroyContainer: """
<dl>
{itens}
</dl>
<div class="row-fluid">
<div class="span6">
<button class="btn btn-danger btn-block">excluir</button>
</div>
<div class="span6">
<button class="btn btn-info btn-block">cancelar</button>
</div>
</div>
"""
@patternDestroyItens: """
<dt>{name}:</dt>
<dd>{value}</dd>
"""
###
@type [Joker.RestDelete]
###
@instance : undefined
###
Retorna a variavel unica para a instacia do objeto
@returns [Joker.RestDelete]
###
@getInstance: ->
Joker.RestDelete.instance = new Joker.RestDelete() unless Joker.RestDelete.instance?
Joker.RestDelete.instance |
[
{
"context": "(done)->\n client\n .setValue \"#username\", \"admin\"\n .setValue \"#password\", \"password\"\n .s",
"end": 556,
"score": 0.9038124680519104,
"start": 551,
"tag": "USERNAME",
"value": "admin"
},
{
"context": "#username\", \"admin\"\n .setValue \"... | test/UiSpec.coffee | dni/oophaga | 0 | webdriverio = require('webdriverio')
options = desiredCapabilities: browserName: 'firefox'
client = webdriverio.remote(options)
App = require "../app"
describe "UI - Usertests", ->
server = {}
apiUrl = "http://localhost:1666"
before (done)->
server = App
port: 1666
dbname: "oophaga-test"
adminroute: "/"
, ->
client
.init()
.url(apiUrl)
.call done
after (done)->
client.end()
server.close done
it "login should work", (done)->
client
.setValue "#username", "admin"
.setValue "#password", "password"
.submitForm "#loginform"
.call done
it "specrunner shouldnt fail", (done)->
client
.url("#{apiUrl}/specrunner.html")
.pause 3000
.call done
| 36105 | webdriverio = require('webdriverio')
options = desiredCapabilities: browserName: 'firefox'
client = webdriverio.remote(options)
App = require "../app"
describe "UI - Usertests", ->
server = {}
apiUrl = "http://localhost:1666"
before (done)->
server = App
port: 1666
dbname: "oophaga-test"
adminroute: "/"
, ->
client
.init()
.url(apiUrl)
.call done
after (done)->
client.end()
server.close done
it "login should work", (done)->
client
.setValue "#username", "admin"
.setValue "#password", "<PASSWORD>"
.submitForm "#loginform"
.call done
it "specrunner shouldnt fail", (done)->
client
.url("#{apiUrl}/specrunner.html")
.pause 3000
.call done
| true | webdriverio = require('webdriverio')
options = desiredCapabilities: browserName: 'firefox'
client = webdriverio.remote(options)
App = require "../app"
describe "UI - Usertests", ->
server = {}
apiUrl = "http://localhost:1666"
before (done)->
server = App
port: 1666
dbname: "oophaga-test"
adminroute: "/"
, ->
client
.init()
.url(apiUrl)
.call done
after (done)->
client.end()
server.close done
it "login should work", (done)->
client
.setValue "#username", "admin"
.setValue "#password", "PI:PASSWORD:<PASSWORD>END_PI"
.submitForm "#loginform"
.call done
it "specrunner shouldnt fail", (done)->
client
.url("#{apiUrl}/specrunner.html")
.pause 3000
.call done
|
[
{
"context": " email: @config.email ? \"\"\n password: @config.password ? \"\"\n group: @config.group ? \"pimatic\"\n ",
"end": 775,
"score": 0.9940234422683716,
"start": 759,
"tag": "PASSWORD",
"value": "@config.password"
},
{
"context": ": @plugin.smartnoraCon... | pimatic-smartnora.coffee | bertreb/pimatic-smartnora | 0 | module.exports = (env) ->
Promise = env.require 'bluebird'
assert = env.require 'cassert'
switchAdapter = require('./adapters/switch')(env)
lightAdapter = require('./adapters/light')(env)
#lightColorAdapter = require('./adapters/lightcolor')(env)
#sensorAdapter = require('./adapters/sensor')(env)
buttonAdapter = require('./adapters/button')(env)
#blindAdapter = require('./adapters/blinds')(env)
_ = require('lodash')
M = env.matcher
class SmartNoraPlugin extends env.plugins.Plugin
init: (app, @framework, @config) =>
pluginConfigDef = require './pimatic-smartnora-config-schema'
@configProperties = pluginConfigDef.properties
@smartnoraConfig =
email: @config.email ? ""
password: @config.password ? ""
group: @config.group ? "pimatic"
homename: @config.home ? ""
localexecution: @config.localexecution ? false
twofactor: @config.twofa ? "node"
twofactorpin: @config.twofapin ? "0000"
@adapters = {}
deviceConfigDef = require("./device-config-schema")
@framework.deviceManager.registerDeviceClass('SmartNoraDevice', {
configDef: deviceConfigDef.SmartNoraDevice,
createCallback: (config, lastState) => new SmartNoraDevice(config, lastState, @framework, @)
})
@pimaticReady = false
@framework.on 'server listen', (res)=>
@pimaticReady = true
@emit 'pimaticReady'
###
process.on "exit", () =>
tobeDestroyed = []
env.logger.debug "Close devices remove all adapters"
for i, adapter of @adapters
_i = i
tobeDestroyed.push removeAdapter = (_i)=>
return new Promise (resolve,reject) =>
@adapters[id].destroy()
.then (id)=>
env.logger.debug "Destroy executed"
delete @adapters[id]
env.logger.debug "Adapter #{id} deleted"
resolve()
env.logger.debug "Nr tobeDestroyed adapters: " + JSON.stringify(tobeDestroyed,null,2)
Promise.all(tobeDestroyed)
.then ()=>
env.logger.debug "Adapters deleted, waiting for exit..."
setTimeout(()=>
process.exit()
env.logger.debug "Stopping plugin SmartNora"
, 10000)
###
class SmartNoraDevice extends env.devices.PresenceSensor
constructor: (@config, lastState, @framework, plugin) ->
#@config = config
@id = @config.id
@name = @config.name
@plugin = plugin
@adapters = @plugin.adapters
@smartnoraConfig =
email: @plugin.smartnoraConfig.email
password: @plugin.smartnoraConfig.password
group: @plugin.smartnoraConfig.group
homename: @plugin.smartnoraConfig.homename
localexecution: @plugin.smartnoraConfig.localexecution
twofactorGlobal: @plugin.smartnoraConfig.twofactor ? "node"
twofactorpinGlobal: @plugin.smartnoraConfig.twofactorpin ? "0000"
twofactorLocal: "none"
twofactorpinLocal: "0000"
noraf: null
@_presence = lastState?.presence?.value or off
@devMgr = @framework.deviceManager
@configDevices = []
@nrOfDevices = 0
if @plugin.pimaticReady
env.logger.debug "Starting SmartNora..."
@initSmartNora()
@plugin.on 'pimaticReady', @pimaticReadyHandler = ()=>
env.logger.debug "Starting SmartNora..."
@initSmartNora()
@framework.on "deviceRemoved", (device) =>
if _.find(@config.devices, (d) => d.pimatic_device_id == device.id or d.pimatic_subdevice_id == device.id)
#throw new Error "Please remove device also in Assistant"
env.logger.info "Please remove device also in Smartnora!"
@framework.on "deviceChanged", (device) =>
if device.config.class is "ButtonsDevice"
_device = _.find(@config.devices, (d) => d.pimatic_device_id == device.id)
if _device?
unless _.find(device.config.buttons, (b)=> b.id == _device.pimatic_subdevice_id)
#throw new Error "Please remove device also in Assistant"
env.logger.info "Please remove button also in Smartnora!"
super()
initSmartNora: ()=>
checkMultipleDevices = []
for _device in @config.devices
do(_device) =>
if _.find(checkMultipleDevices, (d) => d.pimatic_device_id is _device.pimatic_device_id and d.pimatic_subdevice_id is _device.pimatic_device_id)?
env.logger.info "Pimatic device '#{_device.pimatic_device_id}' is already used"
else
_fullDevice = @framework.deviceManager.getDeviceById(_device.pimatic_device_id)
if _fullDevice?
if @selectAdapter(_fullDevice, _device.auxiliary, _device.auxiliary2)?
if _fullDevice.config.class is "ButtonsDevice"
_button = _.find(_fullDevice.config.buttons, (b) => _device.pimatic_subdevice_id == b.id)
if _button?
checkMultipleDevices.push _device
@configDevices.push _device
else
#throw new Error "Please remove button in Assistant"
env.logger.info "Please remove button also in Smartnora!"
else
checkMultipleDevices.push _device
@configDevices.push _device
else
env.logger.info "Pimatic device class '#{_fullDevice.config.class}' is not supported"
else
env.logger.info "Pimatic device '#{_device.pimatic_device_id}' does not excist"
@nrOfDevices = _.size(@configDevices)
if @nrOfDevices > 0 then @_setPresence(on) else @_setPresence(off)
@syncDevices(@configDevices)
syncDevices: (configDevices) =>
for i, adapter of @adapters
env.logger.debug "Adapter.id " + adapter.id + ", d.name: " + JSON.stringify(configDevices,null,2)
unless _.find(configDevices, (d)=> d.name is i)
adapter.destroy()
.then (id) =>
env.logger.debug "deleting adapter " + id
delete @adapters[id]
env.logger.debug "Remaining adapters: " + (_.keys(@adapters))
else
env.logger.debug "Adapter #{adapter.id} already exists"
addDevicesList = []
for device in configDevices
addDevicesList.push device
env.logger.debug "Smartnora device added: " + device.name
@addDevices(addDevicesList)
.then (newDevices) =>
@devices = newDevices
.catch (e) =>
env.logger.debug "error addDevices: " + JSON.stringify(e,null,2)
addDevices: (configDevices) =>
return new Promise((resolve,reject) =>
devices = {}
for _device, key in configDevices
pimaticDevice = @devMgr.getDeviceById(_device.pimatic_device_id)
_newDevice = null
if pimaticDevice?
pimaticDeviceId = _device.pimatic_device_id
env.logger.debug "pimaticDeviceId: " + pimaticDeviceId
if @plugin.smartnoraConfig.twofactor is "node"
# set device specific 2FA settings
@smartnoraConfig.twofactorLocal = _device.twofa ? "none"
@smartnoraConfig.twofactorpinLocal = _device.twofapin ? "0000"
#env.logger.debug "Device #{_device.id}, config3: " + JSON.stringify(@smartnoraConfig,null,2)
try
selectedAdapter = @selectAdapter(pimaticDevice, _device.auxiliary, _device.auxiliary2)
switch selectedAdapter
when "switch"
@adapters[pimaticDeviceId] = new switchAdapter(_device, pimaticDevice, @smartnoraConfig)
when "light"
@adapters[pimaticDeviceId] = new lightAdapter(_device, pimaticDevice, @smartnoraConfig)
when "button"
_pimaticDeviceId = pimaticDeviceId + '.' + _device.pimatic_subdevice_id
@adapters[_pimaticDeviceId] = new buttonAdapter(_device, pimaticDevice, @smartnoraConfig)
else
env.logger.debug "Device type #{pimaticDevice.config.class} is not supported!"
catch e
env.logger.debug "Error new adapter: " + JSON.stringify(e,null,2)
resolve(devices)
)
selectAdapter: (pimaticDevice, aux1, aux2) =>
_foundAdapter = null
if (pimaticDevice.config.class).toLowerCase().indexOf("switch") >= 0
_foundAdapter = "switch"
else if (pimaticDevice.config.class).toLowerCase().indexOf("dimmer") >= 0
_foundAdapter = "light"
else if pimaticDevice instanceof env.devices.ButtonsDevice
_foundAdapter = "button"
###
else if pimaticDevice instanceof env.devices.ShutterController
_foundAdapter = "blind"
if pimaticDevice.config.class is "MilightRGBWZone" or pimaticDevice.config.class is "MilightFullColorZone"
_foundAdapter = "lightColorMilight"
if ((pimaticDevice.config.class).toLowerCase()).indexOf("rgb") >= 0
_foundAdapter = "lightColor"
else if ((pimaticDevice.config.class).toLowerCase()).indexOf("ct") >= 0
_foundAdapter = "lightTemperature"
else if (pimaticDevice.config.class).indexOf("Dimmer") >= 0
_foundAdapter = "light"
else if ((pimaticDevice.config.id).toLowerCase()).indexOf("vacuum") >= 0
_foundAdapter = "vacuum"
else if (pimaticDevice.config.class).indexOf("Dimmer") >= 0
_foundAdapter = "light"
else if (pimaticDevice.config.name).indexOf("lock") >= 0
_foundAdapter = "lock"
if ((pimaticDevice.config.class).toLowerCase()).indexOf("led") >= 0
_foundAdapter = "lightColor"
else if (pimaticDevice.config.class).toLowerCase().indexOf("luftdaten") >= 0
_foundAdapter = "sensor"
else if pimaticDevice instanceof env.devices.DummyHeatingThermostat
_foundAdapter = "heatingThermostat"
else if pimaticDevice.config.class is "DummyThermostat"
_foundAdapter = "dummyThermostat"
else if pimaticDevice instanceof env.devices.DummyHeatingThermostat
_foundAdapter = "heatingThermostat"
else if pimaticDevice.config.class is "DummyThermostat"
_foundAdapter = "dummyThermostat"
else if pimaticDevice.hasAttribute(aux1)
_foundAdapter = "temperature"
###
if _foundAdapter?
env.logger.debug _foundAdapter + " device found"
return _foundAdapter
destroy: =>
@plugin.removeListener 'pimaticReady', @pimaticReadyHandler if @pimaticReadyHandler?
for i, adapter of @adapters
_i = i
@plugin.adapters[_i].destroy()
delete @plugin.adapters[_i]
super()
plugin = new SmartNoraPlugin
return plugin
| 159911 | module.exports = (env) ->
Promise = env.require 'bluebird'
assert = env.require 'cassert'
switchAdapter = require('./adapters/switch')(env)
lightAdapter = require('./adapters/light')(env)
#lightColorAdapter = require('./adapters/lightcolor')(env)
#sensorAdapter = require('./adapters/sensor')(env)
buttonAdapter = require('./adapters/button')(env)
#blindAdapter = require('./adapters/blinds')(env)
_ = require('lodash')
M = env.matcher
class SmartNoraPlugin extends env.plugins.Plugin
init: (app, @framework, @config) =>
pluginConfigDef = require './pimatic-smartnora-config-schema'
@configProperties = pluginConfigDef.properties
@smartnoraConfig =
email: @config.email ? ""
password: <PASSWORD> ? ""
group: @config.group ? "pimatic"
homename: @config.home ? ""
localexecution: @config.localexecution ? false
twofactor: @config.twofa ? "node"
twofactorpin: @config.twofapin ? "0000"
@adapters = {}
deviceConfigDef = require("./device-config-schema")
@framework.deviceManager.registerDeviceClass('SmartNoraDevice', {
configDef: deviceConfigDef.SmartNoraDevice,
createCallback: (config, lastState) => new SmartNoraDevice(config, lastState, @framework, @)
})
@pimaticReady = false
@framework.on 'server listen', (res)=>
@pimaticReady = true
@emit 'pimaticReady'
###
process.on "exit", () =>
tobeDestroyed = []
env.logger.debug "Close devices remove all adapters"
for i, adapter of @adapters
_i = i
tobeDestroyed.push removeAdapter = (_i)=>
return new Promise (resolve,reject) =>
@adapters[id].destroy()
.then (id)=>
env.logger.debug "Destroy executed"
delete @adapters[id]
env.logger.debug "Adapter #{id} deleted"
resolve()
env.logger.debug "Nr tobeDestroyed adapters: " + JSON.stringify(tobeDestroyed,null,2)
Promise.all(tobeDestroyed)
.then ()=>
env.logger.debug "Adapters deleted, waiting for exit..."
setTimeout(()=>
process.exit()
env.logger.debug "Stopping plugin SmartNora"
, 10000)
###
class SmartNoraDevice extends env.devices.PresenceSensor
constructor: (@config, lastState, @framework, plugin) ->
#@config = config
@id = @config.id
@name = @config.name
@plugin = plugin
@adapters = @plugin.adapters
@smartnoraConfig =
email: @plugin.smartnoraConfig.email
password: <PASSWORD>
group: @plugin.smartnoraConfig.group
homename: @plugin.smartnoraConfig.homename
localexecution: @plugin.smartnoraConfig.localexecution
twofactorGlobal: @plugin.smartnoraConfig.twofactor ? "node"
twofactorpinGlobal: @plugin.smartnoraConfig.twofactorpin ? "0000"
twofactorLocal: "none"
twofactorpinLocal: "0000"
noraf: null
@_presence = lastState?.presence?.value or off
@devMgr = @framework.deviceManager
@configDevices = []
@nrOfDevices = 0
if @plugin.pimaticReady
env.logger.debug "Starting SmartNora..."
@initSmartNora()
@plugin.on 'pimaticReady', @pimaticReadyHandler = ()=>
env.logger.debug "Starting SmartNora..."
@initSmartNora()
@framework.on "deviceRemoved", (device) =>
if _.find(@config.devices, (d) => d.pimatic_device_id == device.id or d.pimatic_subdevice_id == device.id)
#throw new Error "Please remove device also in Assistant"
env.logger.info "Please remove device also in Smartnora!"
@framework.on "deviceChanged", (device) =>
if device.config.class is "ButtonsDevice"
_device = _.find(@config.devices, (d) => d.pimatic_device_id == device.id)
if _device?
unless _.find(device.config.buttons, (b)=> b.id == _device.pimatic_subdevice_id)
#throw new Error "Please remove device also in Assistant"
env.logger.info "Please remove button also in Smartnora!"
super()
initSmartNora: ()=>
checkMultipleDevices = []
for _device in @config.devices
do(_device) =>
if _.find(checkMultipleDevices, (d) => d.pimatic_device_id is _device.pimatic_device_id and d.pimatic_subdevice_id is _device.pimatic_device_id)?
env.logger.info "Pimatic device '#{_device.pimatic_device_id}' is already used"
else
_fullDevice = @framework.deviceManager.getDeviceById(_device.pimatic_device_id)
if _fullDevice?
if @selectAdapter(_fullDevice, _device.auxiliary, _device.auxiliary2)?
if _fullDevice.config.class is "ButtonsDevice"
_button = _.find(_fullDevice.config.buttons, (b) => _device.pimatic_subdevice_id == b.id)
if _button?
checkMultipleDevices.push _device
@configDevices.push _device
else
#throw new Error "Please remove button in Assistant"
env.logger.info "Please remove button also in Smartnora!"
else
checkMultipleDevices.push _device
@configDevices.push _device
else
env.logger.info "Pimatic device class '#{_fullDevice.config.class}' is not supported"
else
env.logger.info "Pimatic device '#{_device.pimatic_device_id}' does not excist"
@nrOfDevices = _.size(@configDevices)
if @nrOfDevices > 0 then @_setPresence(on) else @_setPresence(off)
@syncDevices(@configDevices)
syncDevices: (configDevices) =>
for i, adapter of @adapters
env.logger.debug "Adapter.id " + adapter.id + ", d.name: " + JSON.stringify(configDevices,null,2)
unless _.find(configDevices, (d)=> d.name is i)
adapter.destroy()
.then (id) =>
env.logger.debug "deleting adapter " + id
delete @adapters[id]
env.logger.debug "Remaining adapters: " + (_.keys(@adapters))
else
env.logger.debug "Adapter #{adapter.id} already exists"
addDevicesList = []
for device in configDevices
addDevicesList.push device
env.logger.debug "Smartnora device added: " + device.name
@addDevices(addDevicesList)
.then (newDevices) =>
@devices = newDevices
.catch (e) =>
env.logger.debug "error addDevices: " + JSON.stringify(e,null,2)
addDevices: (configDevices) =>
return new Promise((resolve,reject) =>
devices = {}
for _device, key in configDevices
pimaticDevice = @devMgr.getDeviceById(_device.pimatic_device_id)
_newDevice = null
if pimaticDevice?
pimaticDeviceId = _device.pimatic_device_id
env.logger.debug "pimaticDeviceId: " + pimaticDeviceId
if @plugin.smartnoraConfig.twofactor is "node"
# set device specific 2FA settings
@smartnoraConfig.twofactorLocal = _device.twofa ? "none"
@smartnoraConfig.twofactorpinLocal = _device.twofapin ? "0000"
#env.logger.debug "Device #{_device.id}, config3: " + JSON.stringify(@smartnoraConfig,null,2)
try
selectedAdapter = @selectAdapter(pimaticDevice, _device.auxiliary, _device.auxiliary2)
switch selectedAdapter
when "switch"
@adapters[pimaticDeviceId] = new switchAdapter(_device, pimaticDevice, @smartnoraConfig)
when "light"
@adapters[pimaticDeviceId] = new lightAdapter(_device, pimaticDevice, @smartnoraConfig)
when "button"
_pimaticDeviceId = pimaticDeviceId + '.' + _device.pimatic_subdevice_id
@adapters[_pimaticDeviceId] = new buttonAdapter(_device, pimaticDevice, @smartnoraConfig)
else
env.logger.debug "Device type #{pimaticDevice.config.class} is not supported!"
catch e
env.logger.debug "Error new adapter: " + JSON.stringify(e,null,2)
resolve(devices)
)
selectAdapter: (pimaticDevice, aux1, aux2) =>
_foundAdapter = null
if (pimaticDevice.config.class).toLowerCase().indexOf("switch") >= 0
_foundAdapter = "switch"
else if (pimaticDevice.config.class).toLowerCase().indexOf("dimmer") >= 0
_foundAdapter = "light"
else if pimaticDevice instanceof env.devices.ButtonsDevice
_foundAdapter = "button"
###
else if pimaticDevice instanceof env.devices.ShutterController
_foundAdapter = "blind"
if pimaticDevice.config.class is "MilightRGBWZone" or pimaticDevice.config.class is "MilightFullColorZone"
_foundAdapter = "lightColorMilight"
if ((pimaticDevice.config.class).toLowerCase()).indexOf("rgb") >= 0
_foundAdapter = "lightColor"
else if ((pimaticDevice.config.class).toLowerCase()).indexOf("ct") >= 0
_foundAdapter = "lightTemperature"
else if (pimaticDevice.config.class).indexOf("Dimmer") >= 0
_foundAdapter = "light"
else if ((pimaticDevice.config.id).toLowerCase()).indexOf("vacuum") >= 0
_foundAdapter = "vacuum"
else if (pimaticDevice.config.class).indexOf("Dimmer") >= 0
_foundAdapter = "light"
else if (pimaticDevice.config.name).indexOf("lock") >= 0
_foundAdapter = "lock"
if ((pimaticDevice.config.class).toLowerCase()).indexOf("led") >= 0
_foundAdapter = "lightColor"
else if (pimaticDevice.config.class).toLowerCase().indexOf("luftdaten") >= 0
_foundAdapter = "sensor"
else if pimaticDevice instanceof env.devices.DummyHeatingThermostat
_foundAdapter = "heatingThermostat"
else if pimaticDevice.config.class is "DummyThermostat"
_foundAdapter = "dummyThermostat"
else if pimaticDevice instanceof env.devices.DummyHeatingThermostat
_foundAdapter = "heatingThermostat"
else if pimaticDevice.config.class is "DummyThermostat"
_foundAdapter = "dummyThermostat"
else if pimaticDevice.hasAttribute(aux1)
_foundAdapter = "temperature"
###
if _foundAdapter?
env.logger.debug _foundAdapter + " device found"
return _foundAdapter
destroy: =>
@plugin.removeListener 'pimaticReady', @pimaticReadyHandler if @pimaticReadyHandler?
for i, adapter of @adapters
_i = i
@plugin.adapters[_i].destroy()
delete @plugin.adapters[_i]
super()
plugin = new SmartNoraPlugin
return plugin
| true | module.exports = (env) ->
Promise = env.require 'bluebird'
assert = env.require 'cassert'
switchAdapter = require('./adapters/switch')(env)
lightAdapter = require('./adapters/light')(env)
#lightColorAdapter = require('./adapters/lightcolor')(env)
#sensorAdapter = require('./adapters/sensor')(env)
buttonAdapter = require('./adapters/button')(env)
#blindAdapter = require('./adapters/blinds')(env)
_ = require('lodash')
M = env.matcher
class SmartNoraPlugin extends env.plugins.Plugin
init: (app, @framework, @config) =>
pluginConfigDef = require './pimatic-smartnora-config-schema'
@configProperties = pluginConfigDef.properties
@smartnoraConfig =
email: @config.email ? ""
password: PI:PASSWORD:<PASSWORD>END_PI ? ""
group: @config.group ? "pimatic"
homename: @config.home ? ""
localexecution: @config.localexecution ? false
twofactor: @config.twofa ? "node"
twofactorpin: @config.twofapin ? "0000"
@adapters = {}
deviceConfigDef = require("./device-config-schema")
@framework.deviceManager.registerDeviceClass('SmartNoraDevice', {
configDef: deviceConfigDef.SmartNoraDevice,
createCallback: (config, lastState) => new SmartNoraDevice(config, lastState, @framework, @)
})
@pimaticReady = false
@framework.on 'server listen', (res)=>
@pimaticReady = true
@emit 'pimaticReady'
###
process.on "exit", () =>
tobeDestroyed = []
env.logger.debug "Close devices remove all adapters"
for i, adapter of @adapters
_i = i
tobeDestroyed.push removeAdapter = (_i)=>
return new Promise (resolve,reject) =>
@adapters[id].destroy()
.then (id)=>
env.logger.debug "Destroy executed"
delete @adapters[id]
env.logger.debug "Adapter #{id} deleted"
resolve()
env.logger.debug "Nr tobeDestroyed adapters: " + JSON.stringify(tobeDestroyed,null,2)
Promise.all(tobeDestroyed)
.then ()=>
env.logger.debug "Adapters deleted, waiting for exit..."
setTimeout(()=>
process.exit()
env.logger.debug "Stopping plugin SmartNora"
, 10000)
###
class SmartNoraDevice extends env.devices.PresenceSensor
constructor: (@config, lastState, @framework, plugin) ->
#@config = config
@id = @config.id
@name = @config.name
@plugin = plugin
@adapters = @plugin.adapters
@smartnoraConfig =
email: @plugin.smartnoraConfig.email
password: PI:PASSWORD:<PASSWORD>END_PI
group: @plugin.smartnoraConfig.group
homename: @plugin.smartnoraConfig.homename
localexecution: @plugin.smartnoraConfig.localexecution
twofactorGlobal: @plugin.smartnoraConfig.twofactor ? "node"
twofactorpinGlobal: @plugin.smartnoraConfig.twofactorpin ? "0000"
twofactorLocal: "none"
twofactorpinLocal: "0000"
noraf: null
@_presence = lastState?.presence?.value or off
@devMgr = @framework.deviceManager
@configDevices = []
@nrOfDevices = 0
if @plugin.pimaticReady
env.logger.debug "Starting SmartNora..."
@initSmartNora()
@plugin.on 'pimaticReady', @pimaticReadyHandler = ()=>
env.logger.debug "Starting SmartNora..."
@initSmartNora()
@framework.on "deviceRemoved", (device) =>
if _.find(@config.devices, (d) => d.pimatic_device_id == device.id or d.pimatic_subdevice_id == device.id)
#throw new Error "Please remove device also in Assistant"
env.logger.info "Please remove device also in Smartnora!"
@framework.on "deviceChanged", (device) =>
if device.config.class is "ButtonsDevice"
_device = _.find(@config.devices, (d) => d.pimatic_device_id == device.id)
if _device?
unless _.find(device.config.buttons, (b)=> b.id == _device.pimatic_subdevice_id)
#throw new Error "Please remove device also in Assistant"
env.logger.info "Please remove button also in Smartnora!"
super()
initSmartNora: ()=>
checkMultipleDevices = []
for _device in @config.devices
do(_device) =>
if _.find(checkMultipleDevices, (d) => d.pimatic_device_id is _device.pimatic_device_id and d.pimatic_subdevice_id is _device.pimatic_device_id)?
env.logger.info "Pimatic device '#{_device.pimatic_device_id}' is already used"
else
_fullDevice = @framework.deviceManager.getDeviceById(_device.pimatic_device_id)
if _fullDevice?
if @selectAdapter(_fullDevice, _device.auxiliary, _device.auxiliary2)?
if _fullDevice.config.class is "ButtonsDevice"
_button = _.find(_fullDevice.config.buttons, (b) => _device.pimatic_subdevice_id == b.id)
if _button?
checkMultipleDevices.push _device
@configDevices.push _device
else
#throw new Error "Please remove button in Assistant"
env.logger.info "Please remove button also in Smartnora!"
else
checkMultipleDevices.push _device
@configDevices.push _device
else
env.logger.info "Pimatic device class '#{_fullDevice.config.class}' is not supported"
else
env.logger.info "Pimatic device '#{_device.pimatic_device_id}' does not excist"
@nrOfDevices = _.size(@configDevices)
if @nrOfDevices > 0 then @_setPresence(on) else @_setPresence(off)
@syncDevices(@configDevices)
syncDevices: (configDevices) =>
for i, adapter of @adapters
env.logger.debug "Adapter.id " + adapter.id + ", d.name: " + JSON.stringify(configDevices,null,2)
unless _.find(configDevices, (d)=> d.name is i)
adapter.destroy()
.then (id) =>
env.logger.debug "deleting adapter " + id
delete @adapters[id]
env.logger.debug "Remaining adapters: " + (_.keys(@adapters))
else
env.logger.debug "Adapter #{adapter.id} already exists"
addDevicesList = []
for device in configDevices
addDevicesList.push device
env.logger.debug "Smartnora device added: " + device.name
@addDevices(addDevicesList)
.then (newDevices) =>
@devices = newDevices
.catch (e) =>
env.logger.debug "error addDevices: " + JSON.stringify(e,null,2)
addDevices: (configDevices) =>
return new Promise((resolve,reject) =>
devices = {}
for _device, key in configDevices
pimaticDevice = @devMgr.getDeviceById(_device.pimatic_device_id)
_newDevice = null
if pimaticDevice?
pimaticDeviceId = _device.pimatic_device_id
env.logger.debug "pimaticDeviceId: " + pimaticDeviceId
if @plugin.smartnoraConfig.twofactor is "node"
# set device specific 2FA settings
@smartnoraConfig.twofactorLocal = _device.twofa ? "none"
@smartnoraConfig.twofactorpinLocal = _device.twofapin ? "0000"
#env.logger.debug "Device #{_device.id}, config3: " + JSON.stringify(@smartnoraConfig,null,2)
try
selectedAdapter = @selectAdapter(pimaticDevice, _device.auxiliary, _device.auxiliary2)
switch selectedAdapter
when "switch"
@adapters[pimaticDeviceId] = new switchAdapter(_device, pimaticDevice, @smartnoraConfig)
when "light"
@adapters[pimaticDeviceId] = new lightAdapter(_device, pimaticDevice, @smartnoraConfig)
when "button"
_pimaticDeviceId = pimaticDeviceId + '.' + _device.pimatic_subdevice_id
@adapters[_pimaticDeviceId] = new buttonAdapter(_device, pimaticDevice, @smartnoraConfig)
else
env.logger.debug "Device type #{pimaticDevice.config.class} is not supported!"
catch e
env.logger.debug "Error new adapter: " + JSON.stringify(e,null,2)
resolve(devices)
)
selectAdapter: (pimaticDevice, aux1, aux2) =>
_foundAdapter = null
if (pimaticDevice.config.class).toLowerCase().indexOf("switch") >= 0
_foundAdapter = "switch"
else if (pimaticDevice.config.class).toLowerCase().indexOf("dimmer") >= 0
_foundAdapter = "light"
else if pimaticDevice instanceof env.devices.ButtonsDevice
_foundAdapter = "button"
###
else if pimaticDevice instanceof env.devices.ShutterController
_foundAdapter = "blind"
if pimaticDevice.config.class is "MilightRGBWZone" or pimaticDevice.config.class is "MilightFullColorZone"
_foundAdapter = "lightColorMilight"
if ((pimaticDevice.config.class).toLowerCase()).indexOf("rgb") >= 0
_foundAdapter = "lightColor"
else if ((pimaticDevice.config.class).toLowerCase()).indexOf("ct") >= 0
_foundAdapter = "lightTemperature"
else if (pimaticDevice.config.class).indexOf("Dimmer") >= 0
_foundAdapter = "light"
else if ((pimaticDevice.config.id).toLowerCase()).indexOf("vacuum") >= 0
_foundAdapter = "vacuum"
else if (pimaticDevice.config.class).indexOf("Dimmer") >= 0
_foundAdapter = "light"
else if (pimaticDevice.config.name).indexOf("lock") >= 0
_foundAdapter = "lock"
if ((pimaticDevice.config.class).toLowerCase()).indexOf("led") >= 0
_foundAdapter = "lightColor"
else if (pimaticDevice.config.class).toLowerCase().indexOf("luftdaten") >= 0
_foundAdapter = "sensor"
else if pimaticDevice instanceof env.devices.DummyHeatingThermostat
_foundAdapter = "heatingThermostat"
else if pimaticDevice.config.class is "DummyThermostat"
_foundAdapter = "dummyThermostat"
else if pimaticDevice instanceof env.devices.DummyHeatingThermostat
_foundAdapter = "heatingThermostat"
else if pimaticDevice.config.class is "DummyThermostat"
_foundAdapter = "dummyThermostat"
else if pimaticDevice.hasAttribute(aux1)
_foundAdapter = "temperature"
###
if _foundAdapter?
env.logger.debug _foundAdapter + " device found"
return _foundAdapter
destroy: =>
@plugin.removeListener 'pimaticReady', @pimaticReadyHandler if @pimaticReadyHandler?
for i, adapter of @adapters
_i = i
@plugin.adapters[_i].destroy()
delete @plugin.adapters[_i]
super()
plugin = new SmartNoraPlugin
return plugin
|
[
{
"context": "'insert text': (test) ->\n doc = new Doc {text:\"Hello there!\"}\n doc.at('text').insert 11, ', ShareJS'\n ",
"end": 1452,
"score": 0.5460481643676758,
"start": 1441,
"tag": "NAME",
"value": "Hello there"
}
] | node_modules/share/test/types/json-api.coffee | LaPingvino/rizzoma | 88 | assert = require 'assert'
json = require '../../src/types/json'
require '../../src/types/json-api'
MicroEvent = require '../../src/client/microevent'
Doc = (data) ->
@snapshot = data ? json.create()
@type = json
@submitOp = (op) ->
@snapshot = json.apply @snapshot, op
@emit 'change', op
@_register()
Doc.prototype = json.api
MicroEvent.mixin Doc
module.exports =
sanity: (test) ->
doc = new Doc 'hi'
assert.equal doc.get(), 'hi'
doc = new Doc {hello:'world'}
assert.equal doc.getAt(['hello']), 'world'
test.done()
'getAt': (test) ->
doc = new Doc {hi:[1,2,3]}
assert.equal doc.getAt(['hi', 2]), 3
test.done()
'sub-doc get': (test) ->
doc = new Doc {hi:[1,2,3]}
hi = doc.at 'hi'
assert.deepEqual hi.get(), [1,2,3]
assert.equal hi.at(2).get(), 3
test.done()
'object set': (test) ->
doc = new Doc
doc.at().set {hello:'world'}
assert.deepEqual doc.get(), {hello:'world'}
doc.at('hello').set 'blah'
assert.deepEqual doc.get(), {hello:'blah'}
test.done()
'list set': (test) ->
doc = new Doc [1,2,3]
doc.at(1).set 5
assert.deepEqual doc.get(), [1,5,3]
test.done()
'remove': (test) ->
doc = new Doc {hi:[1,2,3]}
hi = doc.at('hi')
hi.at(0).remove()
assert.deepEqual doc.get(), {hi:[2,3]}
hi.remove()
assert.deepEqual doc.get(), {}
test.done()
'insert text': (test) ->
doc = new Doc {text:"Hello there!"}
doc.at('text').insert 11, ', ShareJS'
assert.deepEqual doc.get(), {text:'Hello there, ShareJS!'}
test.done()
'delete text': (test) ->
doc = new Doc {text:"Sup, share?"}
doc.at('text').del(3, 7)
assert.deepEqual doc.get(), {text:'Sup?'}
test.done()
'list insert': (test) ->
doc = new Doc {nums:[1,2]}
doc.at('nums').insert 0, 4
assert.deepEqual doc.get(), {nums:[4,1,2]}
test.done()
'list push': (test) ->
doc = new Doc {nums:[1,2]}
doc.at('nums').push 3
assert.deepEqual doc.get(), {nums:[1,2,3]}
test.done()
'list move': (test) ->
doc = new Doc {list:[1,2,3,4]}
list = doc.at('list')
list.move(0,3)
assert.deepEqual doc.get(), {list:[2,3,4,1]}
test.done()
'number add': (test) ->
doc = new Doc [1]
doc.at(0).add(4)
assert.deepEqual doc.get(), [5]
test.done()
'basic listeners': (test) ->
doc = new Doc {list:[1]}
doc.at('list').on 'insert', (pos, num) ->
assert.equal num, 4
assert.equal pos, 0
test.done()
doc.emit 'remoteop', [{p:['list',0],li:4}], doc.get()
'object replace listener': (test) ->
doc = new Doc {foo:'bar'}
doc.at().on 'replace', (pos, before, after) ->
assert.equal before, 'bar'
assert.equal after, 'baz'
assert.equal pos, 'foo'
test.done()
doc.emit 'remoteop', [{p:['foo'],od:'bar',oi:'baz'}]
'list replace listener': (test) ->
doc = new Doc ['bar']
doc.at().on 'replace', (pos, before, after) ->
assert.equal before, 'bar'
assert.equal after, 'baz'
assert.equal pos, 0
test.done()
doc.emit 'remoteop', [{p:[0],ld:'bar',li:'baz'}]
'listener moves on li': (test) ->
doc = new Doc ['bar']
doc.at(0).on 'insert', (i, s) ->
assert.equal s, 'foo'
assert.equal i, 0
test.done()
doc.at().insert 0, 'asdf'
doc.emit 'remoteop', [{p:[1,0], si:'foo'}]
'listener moves on ld': (test) ->
doc = new Doc ['asdf','bar']
doc.at(1).on 'insert', (i, s) ->
assert.equal s, 'foo'
assert.equal i, 0
test.done()
doc.at(0).remove()
doc.emit 'remoteop', [{p:[0,0], si:'foo'}]
'listener moves on lm': (test) ->
doc = new Doc ['asdf','bar']
doc.at(1).on 'insert', (i, s) ->
assert.equal s, 'foo'
assert.equal i, 0
test.done()
doc.at().move(0,1)
doc.emit 'remoteop', [{p:[0,0], si:'foo'}]
'listener drops on ld': (test) ->
doc = new Doc [1]
doc.at(0).on 'add', (x) ->
assert.ok false
doc.at(0).set 3
doc.emit 'remoteop', [{p:[0], na:1}]
test.done()
'listener drops on od': (test) ->
doc = new Doc {foo:'bar'}
doc.at('foo').on 'text-insert', (text, pos) ->
assert.ok false
doc.at('foo').set('baz')
doc.emit 'remoteop', [{p:['foo',0], si:'asdf'}]
test.done()
'child op one level': (test) ->
doc = new Doc {foo:'bar'}
doc.at().on 'child op', (p, op) ->
assert.deepEqual p, ['foo',0]
assert.equal op.si, 'baz'
test.done()
doc.emit 'remoteop', [{p:['foo',0], si:'baz'}]
'child op two levels': (test) ->
doc = new Doc {foo:['bar']}
doc.at().on 'child op', (p, op) ->
assert.deepEqual p, ['foo',0,3]
assert.deepEqual op.si, 'baz'
test.done()
doc.emit 'remoteop', [{p:['foo',0,3],si:'baz'}]
'child op path snipping': (test) ->
doc = new Doc {foo:['bar']}
doc.at('foo').on 'child op', (p, op) ->
assert.deepEqual p, [0,3]
assert.deepEqual op.si, 'baz'
test.done()
doc.emit 'remoteop', [{p:['foo',0,3],si:'baz'}]
'child op not sent when op outside node': (test) ->
doc = new Doc {foo:['bar']}
doc.at('foo').on 'child op', ->
assert.ok false
doc.at('baz').set('hi')
test.done()
| 9028 | assert = require 'assert'
json = require '../../src/types/json'
require '../../src/types/json-api'
MicroEvent = require '../../src/client/microevent'
Doc = (data) ->
@snapshot = data ? json.create()
@type = json
@submitOp = (op) ->
@snapshot = json.apply @snapshot, op
@emit 'change', op
@_register()
Doc.prototype = json.api
MicroEvent.mixin Doc
module.exports =
sanity: (test) ->
doc = new Doc 'hi'
assert.equal doc.get(), 'hi'
doc = new Doc {hello:'world'}
assert.equal doc.getAt(['hello']), 'world'
test.done()
'getAt': (test) ->
doc = new Doc {hi:[1,2,3]}
assert.equal doc.getAt(['hi', 2]), 3
test.done()
'sub-doc get': (test) ->
doc = new Doc {hi:[1,2,3]}
hi = doc.at 'hi'
assert.deepEqual hi.get(), [1,2,3]
assert.equal hi.at(2).get(), 3
test.done()
'object set': (test) ->
doc = new Doc
doc.at().set {hello:'world'}
assert.deepEqual doc.get(), {hello:'world'}
doc.at('hello').set 'blah'
assert.deepEqual doc.get(), {hello:'blah'}
test.done()
'list set': (test) ->
doc = new Doc [1,2,3]
doc.at(1).set 5
assert.deepEqual doc.get(), [1,5,3]
test.done()
'remove': (test) ->
doc = new Doc {hi:[1,2,3]}
hi = doc.at('hi')
hi.at(0).remove()
assert.deepEqual doc.get(), {hi:[2,3]}
hi.remove()
assert.deepEqual doc.get(), {}
test.done()
'insert text': (test) ->
doc = new Doc {text:"<NAME>!"}
doc.at('text').insert 11, ', ShareJS'
assert.deepEqual doc.get(), {text:'Hello there, ShareJS!'}
test.done()
'delete text': (test) ->
doc = new Doc {text:"Sup, share?"}
doc.at('text').del(3, 7)
assert.deepEqual doc.get(), {text:'Sup?'}
test.done()
'list insert': (test) ->
doc = new Doc {nums:[1,2]}
doc.at('nums').insert 0, 4
assert.deepEqual doc.get(), {nums:[4,1,2]}
test.done()
'list push': (test) ->
doc = new Doc {nums:[1,2]}
doc.at('nums').push 3
assert.deepEqual doc.get(), {nums:[1,2,3]}
test.done()
'list move': (test) ->
doc = new Doc {list:[1,2,3,4]}
list = doc.at('list')
list.move(0,3)
assert.deepEqual doc.get(), {list:[2,3,4,1]}
test.done()
'number add': (test) ->
doc = new Doc [1]
doc.at(0).add(4)
assert.deepEqual doc.get(), [5]
test.done()
'basic listeners': (test) ->
doc = new Doc {list:[1]}
doc.at('list').on 'insert', (pos, num) ->
assert.equal num, 4
assert.equal pos, 0
test.done()
doc.emit 'remoteop', [{p:['list',0],li:4}], doc.get()
'object replace listener': (test) ->
doc = new Doc {foo:'bar'}
doc.at().on 'replace', (pos, before, after) ->
assert.equal before, 'bar'
assert.equal after, 'baz'
assert.equal pos, 'foo'
test.done()
doc.emit 'remoteop', [{p:['foo'],od:'bar',oi:'baz'}]
'list replace listener': (test) ->
doc = new Doc ['bar']
doc.at().on 'replace', (pos, before, after) ->
assert.equal before, 'bar'
assert.equal after, 'baz'
assert.equal pos, 0
test.done()
doc.emit 'remoteop', [{p:[0],ld:'bar',li:'baz'}]
'listener moves on li': (test) ->
doc = new Doc ['bar']
doc.at(0).on 'insert', (i, s) ->
assert.equal s, 'foo'
assert.equal i, 0
test.done()
doc.at().insert 0, 'asdf'
doc.emit 'remoteop', [{p:[1,0], si:'foo'}]
'listener moves on ld': (test) ->
doc = new Doc ['asdf','bar']
doc.at(1).on 'insert', (i, s) ->
assert.equal s, 'foo'
assert.equal i, 0
test.done()
doc.at(0).remove()
doc.emit 'remoteop', [{p:[0,0], si:'foo'}]
'listener moves on lm': (test) ->
doc = new Doc ['asdf','bar']
doc.at(1).on 'insert', (i, s) ->
assert.equal s, 'foo'
assert.equal i, 0
test.done()
doc.at().move(0,1)
doc.emit 'remoteop', [{p:[0,0], si:'foo'}]
'listener drops on ld': (test) ->
doc = new Doc [1]
doc.at(0).on 'add', (x) ->
assert.ok false
doc.at(0).set 3
doc.emit 'remoteop', [{p:[0], na:1}]
test.done()
'listener drops on od': (test) ->
doc = new Doc {foo:'bar'}
doc.at('foo').on 'text-insert', (text, pos) ->
assert.ok false
doc.at('foo').set('baz')
doc.emit 'remoteop', [{p:['foo',0], si:'asdf'}]
test.done()
'child op one level': (test) ->
doc = new Doc {foo:'bar'}
doc.at().on 'child op', (p, op) ->
assert.deepEqual p, ['foo',0]
assert.equal op.si, 'baz'
test.done()
doc.emit 'remoteop', [{p:['foo',0], si:'baz'}]
'child op two levels': (test) ->
doc = new Doc {foo:['bar']}
doc.at().on 'child op', (p, op) ->
assert.deepEqual p, ['foo',0,3]
assert.deepEqual op.si, 'baz'
test.done()
doc.emit 'remoteop', [{p:['foo',0,3],si:'baz'}]
'child op path snipping': (test) ->
doc = new Doc {foo:['bar']}
doc.at('foo').on 'child op', (p, op) ->
assert.deepEqual p, [0,3]
assert.deepEqual op.si, 'baz'
test.done()
doc.emit 'remoteop', [{p:['foo',0,3],si:'baz'}]
'child op not sent when op outside node': (test) ->
doc = new Doc {foo:['bar']}
doc.at('foo').on 'child op', ->
assert.ok false
doc.at('baz').set('hi')
test.done()
| true | assert = require 'assert'
json = require '../../src/types/json'
require '../../src/types/json-api'
MicroEvent = require '../../src/client/microevent'
Doc = (data) ->
@snapshot = data ? json.create()
@type = json
@submitOp = (op) ->
@snapshot = json.apply @snapshot, op
@emit 'change', op
@_register()
Doc.prototype = json.api
MicroEvent.mixin Doc
module.exports =
sanity: (test) ->
doc = new Doc 'hi'
assert.equal doc.get(), 'hi'
doc = new Doc {hello:'world'}
assert.equal doc.getAt(['hello']), 'world'
test.done()
'getAt': (test) ->
doc = new Doc {hi:[1,2,3]}
assert.equal doc.getAt(['hi', 2]), 3
test.done()
'sub-doc get': (test) ->
doc = new Doc {hi:[1,2,3]}
hi = doc.at 'hi'
assert.deepEqual hi.get(), [1,2,3]
assert.equal hi.at(2).get(), 3
test.done()
'object set': (test) ->
doc = new Doc
doc.at().set {hello:'world'}
assert.deepEqual doc.get(), {hello:'world'}
doc.at('hello').set 'blah'
assert.deepEqual doc.get(), {hello:'blah'}
test.done()
'list set': (test) ->
doc = new Doc [1,2,3]
doc.at(1).set 5
assert.deepEqual doc.get(), [1,5,3]
test.done()
'remove': (test) ->
doc = new Doc {hi:[1,2,3]}
hi = doc.at('hi')
hi.at(0).remove()
assert.deepEqual doc.get(), {hi:[2,3]}
hi.remove()
assert.deepEqual doc.get(), {}
test.done()
'insert text': (test) ->
doc = new Doc {text:"PI:NAME:<NAME>END_PI!"}
doc.at('text').insert 11, ', ShareJS'
assert.deepEqual doc.get(), {text:'Hello there, ShareJS!'}
test.done()
'delete text': (test) ->
doc = new Doc {text:"Sup, share?"}
doc.at('text').del(3, 7)
assert.deepEqual doc.get(), {text:'Sup?'}
test.done()
'list insert': (test) ->
doc = new Doc {nums:[1,2]}
doc.at('nums').insert 0, 4
assert.deepEqual doc.get(), {nums:[4,1,2]}
test.done()
'list push': (test) ->
doc = new Doc {nums:[1,2]}
doc.at('nums').push 3
assert.deepEqual doc.get(), {nums:[1,2,3]}
test.done()
'list move': (test) ->
doc = new Doc {list:[1,2,3,4]}
list = doc.at('list')
list.move(0,3)
assert.deepEqual doc.get(), {list:[2,3,4,1]}
test.done()
'number add': (test) ->
doc = new Doc [1]
doc.at(0).add(4)
assert.deepEqual doc.get(), [5]
test.done()
'basic listeners': (test) ->
doc = new Doc {list:[1]}
doc.at('list').on 'insert', (pos, num) ->
assert.equal num, 4
assert.equal pos, 0
test.done()
doc.emit 'remoteop', [{p:['list',0],li:4}], doc.get()
'object replace listener': (test) ->
doc = new Doc {foo:'bar'}
doc.at().on 'replace', (pos, before, after) ->
assert.equal before, 'bar'
assert.equal after, 'baz'
assert.equal pos, 'foo'
test.done()
doc.emit 'remoteop', [{p:['foo'],od:'bar',oi:'baz'}]
'list replace listener': (test) ->
doc = new Doc ['bar']
doc.at().on 'replace', (pos, before, after) ->
assert.equal before, 'bar'
assert.equal after, 'baz'
assert.equal pos, 0
test.done()
doc.emit 'remoteop', [{p:[0],ld:'bar',li:'baz'}]
'listener moves on li': (test) ->
doc = new Doc ['bar']
doc.at(0).on 'insert', (i, s) ->
assert.equal s, 'foo'
assert.equal i, 0
test.done()
doc.at().insert 0, 'asdf'
doc.emit 'remoteop', [{p:[1,0], si:'foo'}]
'listener moves on ld': (test) ->
doc = new Doc ['asdf','bar']
doc.at(1).on 'insert', (i, s) ->
assert.equal s, 'foo'
assert.equal i, 0
test.done()
doc.at(0).remove()
doc.emit 'remoteop', [{p:[0,0], si:'foo'}]
'listener moves on lm': (test) ->
doc = new Doc ['asdf','bar']
doc.at(1).on 'insert', (i, s) ->
assert.equal s, 'foo'
assert.equal i, 0
test.done()
doc.at().move(0,1)
doc.emit 'remoteop', [{p:[0,0], si:'foo'}]
'listener drops on ld': (test) ->
doc = new Doc [1]
doc.at(0).on 'add', (x) ->
assert.ok false
doc.at(0).set 3
doc.emit 'remoteop', [{p:[0], na:1}]
test.done()
'listener drops on od': (test) ->
doc = new Doc {foo:'bar'}
doc.at('foo').on 'text-insert', (text, pos) ->
assert.ok false
doc.at('foo').set('baz')
doc.emit 'remoteop', [{p:['foo',0], si:'asdf'}]
test.done()
'child op one level': (test) ->
doc = new Doc {foo:'bar'}
doc.at().on 'child op', (p, op) ->
assert.deepEqual p, ['foo',0]
assert.equal op.si, 'baz'
test.done()
doc.emit 'remoteop', [{p:['foo',0], si:'baz'}]
'child op two levels': (test) ->
doc = new Doc {foo:['bar']}
doc.at().on 'child op', (p, op) ->
assert.deepEqual p, ['foo',0,3]
assert.deepEqual op.si, 'baz'
test.done()
doc.emit 'remoteop', [{p:['foo',0,3],si:'baz'}]
'child op path snipping': (test) ->
doc = new Doc {foo:['bar']}
doc.at('foo').on 'child op', (p, op) ->
assert.deepEqual p, [0,3]
assert.deepEqual op.si, 'baz'
test.done()
doc.emit 'remoteop', [{p:['foo',0,3],si:'baz'}]
'child op not sent when op outside node': (test) ->
doc = new Doc {foo:['bar']}
doc.at('foo').on 'child op', ->
assert.ok false
doc.at('baz').set('hi')
test.done()
|
[
{
"context": "functions for React components detection\n# @author Yannick Croissant\n###\n'use strict'\n\nutil = require 'util'\ndoctrine ",
"end": 107,
"score": 0.9998360872268677,
"start": 90,
"tag": "NAME",
"value": "Yannick Croissant"
},
{
"context": "odes on initial AST traversa... | src/util/react/Components.coffee | danielbayley/eslint-plugin-coffee | 21 | ###*
# @fileoverview Utility class and functions for React components detection
# @author Yannick Croissant
###
'use strict'
util = require 'util'
doctrine = require 'doctrine'
variableUtil = require './variable'
pragmaUtil = require 'eslint-plugin-react/lib/util/pragma'
astUtil = require './ast'
propTypes = require './propTypes'
getId = (node) -> node?.range.join ':'
usedPropTypesAreEquivalent = (propA, propB) ->
if propA.name is propB.name
if not propA.allNames and not propB.allNames
return yes
return yes if (
Array.isArray(propA.allNames) and
Array.isArray(propB.allNames) and
propA.allNames.join('') is propB.allNames.join ''
)
return no
no
mergeUsedPropTypes = (propsList, newPropsList) ->
propsToAdd = []
newPropsList.forEach (newProp) ->
newPropisAlreadyInTheList = propsList.some (prop) ->
usedPropTypesAreEquivalent prop, newProp
unless newPropisAlreadyInTheList then propsToAdd.push newProp
propsList.concat propsToAdd
###*
# Components
###
class Components
constructor: ->
@_list = {}
###*
# Add a node to the components list, or update it if it's already in the list
#
# @param {ASTNode} node The AST node being added.
# @param {Number} confidence Confidence in the component detection (0=banned, 1=maybe, 2=yes)
# @returns {Object} Added component object
###
add: (node, confidence) ->
id = getId node
if @_list[id]
if confidence is 0 or @_list[id].confidence is 0
@_list[id].confidence = 0
else
@_list[id].confidence = Math.max @_list[id].confidence, confidence
return @_list[id]
@_list[id] = {node, confidence}
@_list[id]
###*
# Find a component in the list using its node
#
# @param {ASTNode} node The AST node being searched.
# @returns {Object} Component object, undefined if the component is not found or has confidence value of 0.
###
get: (node) ->
id = getId node
return @_list[id] if @_list[id]?.confidence >= 1
null
###*
# Update a component in the list
#
# @param {ASTNode} node The AST node being updated.
# @param {Object} props Additional properties to add to the component.
###
set: (node, props) ->
node = node.parent while node and not @_list[getId node]
return unless node
id = getId node
if @_list[id]
# usedPropTypes is an array. _extend replaces existing array with a new one which caused issue #1309.
# preserving original array so it can be merged later on.
copyUsedPropTypes = @_list[id].usedPropTypes?.slice()
@_list[id] = util._extend @_list[id], props
if @_list[id] and props.usedPropTypes
@_list[id].usedPropTypes = mergeUsedPropTypes(
copyUsedPropTypes or []
props.usedPropTypes
)
###*
# Return the components list
# Components for which we are not confident are not returned
#
# @returns {Object} Components list
###
list: ->
list = {}
usedPropTypes = {}
# Find props used in components for which we are not confident
for own _, comp of @_list
continue if comp.confidence >= 2
component = null
node = null
{node} = comp
while not component and node.parent
node = node.parent
# Stop moving up if we reach a decorator
break if node.type is 'Decorator'
component = @get node
if component
newUsedProps = (comp.usedPropTypes or []).filter (propType) ->
not propType.node or propType.node.kind isnt 'init'
componentId = getId component.node
usedPropTypes[componentId] = (usedPropTypes[componentId] or []).concat(
newUsedProps
)
# Assign used props in not confident components to the parent component
for own j, comp of @_list when comp.confidence >= 2
id = getId comp.node
list[j] = comp
if usedPropTypes[id]
list[j].usedPropTypes = (list[j].usedPropTypes or []).concat(
usedPropTypes[id]
)
list
###*
# Return the length of the components list
# Components for which we are not confident are not counted
#
# @returns {Number} Components list length
###
length: ->
length = 0
for own i of @_list when @_list[i].confidence >= 2
length++
length
componentRule = (rule, context) ->
createClass = pragmaUtil.getCreateClassFromContext context
pragma = pragmaUtil.getFromContext context
sourceCode = context.getSourceCode()
components = new Components()
# Utilities for component detection
utils =
###*
# Check if the node is a React ES5 component
#
# @param {ASTNode} node The AST node being checked.
# @returns {Boolean} True if the node is a React ES5 component, false if not
###
isES5Component: (node) ->
return no unless node.parent
///^(#{pragma}\.)?#{createClass}$///.test(
sourceCode.getText node.parent.callee
)
###*
# Check if the node is a React ES6 component
#
# @param {ASTNode} node The AST node being checked.
# @returns {Boolean} True if the node is a React ES6 component, false if not
###
isES6Component: (node) ->
return yes if utils.isExplicitComponent node
return no unless node.superClass
///^(#{pragma}\.)?(Pure)?Component$///.test(
sourceCode.getText node.superClass
)
###*
# Check if the node is explicitly declared as a descendant of a React Component
#
# @param {ASTNode} node The AST node being checked (can be a ReturnStatement or an ArrowFunctionExpression).
# @returns {Boolean} True if the node is explicitly declared as a descendant of a React Component, false if not
###
isExplicitComponent: (node) ->
# Sometimes the passed node may not have been parsed yet by eslint, and this function call crashes.
# Can be removed when eslint sets "parent" property for all nodes on initial AST traversal: https://github.com/eslint/eslint-scope/issues/27
# eslint-disable-next-line no-warning-comments
# FIXME: Remove try/catch when https://github.com/eslint/eslint-scope/issues/27 is implemented.
try
comment = sourceCode.getJSDocComment node
catch e
comment = null
return no if comment is null
commentAst = doctrine.parse comment.value,
unwrap: yes
tags: ['extends', 'augments']
relevantTags = commentAst.tags.filter (tag) ->
tag.name in ['React.Component', 'React.PureComponent']
relevantTags.length > 0
###*
# Checks to see if our component extends React.PureComponent
#
# @param {ASTNode} node The AST node being checked.
# @returns {Boolean} True if node extends React.PureComponent, false if not
###
isPureComponent: (node) ->
return ///^(#{pragma}\.)?PureComponent$///.test(
sourceCode.getText node.superClass
) if node.superClass
no
###*
# Check if createElement is destructured from React import
#
# @returns {Boolean} True if createElement is destructured from React
###
hasDestructuredReactCreateElement: ->
variables = variableUtil.variablesInScope context
variable = variableUtil.getVariable variables, 'createElement'
if variable
map = variable.scope.set
return yes if map.has 'React'
no
###*
# Checks to see if node is called within React.createElement
#
# @param {ASTNode} node The AST node being checked.
# @returns {Boolean} True if React.createElement called
###
isReactCreateElement: (node) ->
calledOnReact =
node?.callee?.object?.name is 'React' and
node.callee.property?.name is 'createElement'
calledDirectly = node?.callee?.name is 'createElement'
return (
calledDirectly or calledOnReact
) if @hasDestructuredReactCreateElement()
calledOnReact
getReturnPropertyAndNode: (ASTnode) ->
node = ASTnode
return {node, property: 'expression'} if (
node.type is 'ExpressionStatement' and node.expression.returns
)
switch node.type
when 'ReturnStatement'
property = 'argument'
when 'ArrowFunctionExpression'
property = 'body'
if node[property] and node[property].type is 'BlockStatement'
{node, property} = utils.findReturnStatement node
else
{node, property} = utils.findReturnStatement node
{node, property}
###*
# Check if the node is returning JSX
#
# @param {ASTNode} ASTnode The AST node being checked
# @param {Boolean} strict If true, in a ternary condition the node must return JSX in both cases
# @returns {Boolean} True if the node is returning JSX, false if not
###
isReturningJSX: (ASTnode, strict) ->
nodeAndProperty = utils.getReturnPropertyAndNode ASTnode
{node, property} = nodeAndProperty
return no unless node
returnsConditionalJSXConsequent =
node[property] and
node[property].type is 'ConditionalExpression' and
node[property].consequent.type is 'JSXElement'
returnsConditionalJSXAlternate =
node[property] and
node[property].type is 'ConditionalExpression' and
node[property].alternate?.type is 'JSXElement'
returnsConditionalJSX =
if strict
returnsConditionalJSXConsequent and returnsConditionalJSXAlternate
else
returnsConditionalJSXConsequent or returnsConditionalJSXAlternate
returnsJSX = node[property] and node[property].type is 'JSXElement'
returnsReactCreateElement = @isReactCreateElement node[property]
Boolean returnsConditionalJSX or returnsJSX or returnsReactCreateElement
###*
# Check if the node is returning null
#
# @param {ASTNode} ASTnode The AST node being checked
# @returns {Boolean} True if the node is returning null, false if not
###
isReturningNull: (ASTnode) ->
nodeAndProperty = utils.getReturnPropertyAndNode ASTnode
{property, node} = nodeAndProperty
return no unless node
node[property] and node[property].value is null
###*
# Check if the node is returning JSX or null
#
# @param {ASTNode} ASTnode The AST node being checked
# @param {Boolean} strict If true, in a ternary condition the node must return JSX in both cases
# @returns {Boolean} True if the node is returning JSX or null, false if not
###
isReturningJSXOrNull: (ASTNode, strict) ->
utils.isReturningJSX(ASTNode, strict) or utils.isReturningNull ASTNode
###*
# Find a return statment in the current node
#
# @param {ASTNode} ASTnode The AST node being checked
###
findReturnStatement: astUtil.findReturnStatement
###*
# Get the parent component node from the current scope
#
# @returns {ASTNode} component node, null if we are not in a component
###
getParentComponent: ->
utils.getParentES6Component() or
utils.getParentES5Component() or
utils.getParentStatelessComponent()
###*
# Get the parent ES5 component node from the current scope
#
# @returns {ASTNode} component node, null if we are not in a component
###
getParentES5Component: ->
# eslint-disable-next-line coffee/destructuring-assignment
scope = context.getScope()
while scope
node = scope.block?.parent?.parent
return node if node and utils.isES5Component node
scope = scope.upper
null
###*
# Get the parent ES6 component node from the current scope
#
# @returns {ASTNode} component node, null if we are not in a component
###
getParentES6Component: ->
scope = context.getScope()
while scope and scope.type isnt 'class'
scope = scope.upper
node = scope?.block
return null if not node or not utils.isES6Component node
node
###*
# Get the parent stateless component node from the current scope
#
# @returns {ASTNode} component node, null if we are not in a component
###
getParentStatelessComponent: ->
# eslint-disable-next-line coffee/destructuring-assignment
scope = context.getScope()
while scope
node = scope.block
isClass = node.type is 'ClassExpression'
isFunction = /Function/.test node.type # Functions
isMethod = node.parent?.type is 'MethodDefinition' # Classes methods
isArgument =
node.parent?.type is 'CallExpression' or
(node.parent?.type is 'UnaryExpression' and
node.parent.operator is 'do') # Arguments (callback, etc.)
# Attribute Expressions inside JSX Elements (<button onClick={() => props.handleClick()}></button>)
isJSXExpressionContainer = node.parent?.type is 'JSXExpressionContainer'
# Stop moving up if we reach a class or an argument (like a callback)
return null if isClass or isArgument
# Return the node if it is a function that is not a class method and is not inside a JSX Element
return node if (
isFunction and
not isMethod and
not isJSXExpressionContainer and
utils.isReturningJSXOrNull node
)
scope = scope.upper
null
###*
# Get the related component from a node
#
# @param {ASTNode} node The AST node being checked (must be a MemberExpression).
# @returns {ASTNode} component node, null if we cannot find the component
###
getRelatedComponent: (node) ->
# Get the component path
componentPath = []
while node
if node.property and node.property.type is 'Identifier'
componentPath.push node.property.name
if node.object and node.object.type is 'Identifier'
componentPath.push node.object.name
node = node.object
componentPath.reverse()
componentName = componentPath.slice(0, componentPath.length - 1).join '.'
# Find the variable in the current scope
variableName = componentPath.shift()
return null unless variableName
variables = variableUtil.variablesInScope context
for variable in variables
if variable.name is variableName
variableInScope = variable
break
return null unless variableInScope
# Try to find the component using variable references
for ref in variableInScope.references
refId = ref.identifier
if refId.parent and refId.parent.type is 'MemberExpression'
refId = refId.parent
continue unless sourceCode.getText(refId) is componentName
if refId.type is 'MemberExpression'
componentNode = refId.parent.right
else if refId.parent and refId.parent.type is 'VariableDeclarator'
componentNode = refId.parent.init
else if (
refId.declaration and refId.parent.type is 'AssignmentExpression'
)
componentNode = refId.parent.right
break
# Return the component
return components.add componentNode, 1 if componentNode
# Try to find the component using variable declarations
for def in variableInScope.defs
if def.type in ['ClassName', 'FunctionName', 'Variable']
defInScope = def
break
return null unless defInScope?.node
componentNode =
defInScope.node.init or
(defInScope.node.declaration and
defInScope.node.parent.type is 'AssignmentExpression' and
defInScope.node.parent.right) or
defInScope.node
# Traverse the node properties to the component declaration
for componentPathSegment in componentPath
continue unless componentNode.properties
for prop in componentNode.properties
if prop.key?.name is componentPathSegment
componentNode = prop
break
return null if not componentNode or not componentNode.value
componentNode = componentNode.value
# Return the component
components.add componentNode, 1
# Component detection instructions
detectionInstructions =
ClassExpression: (node) ->
return unless utils.isES6Component node
components.add node, 2
ClassDeclaration: (node) ->
return unless utils.isES6Component node
components.add node, 2
ClassProperty: (node) ->
node = utils.getParentComponent()
return unless node
components.add node, 2
ObjectExpression: (node) ->
return unless utils.isES5Component node
components.add node, 2
FunctionExpression: (node) ->
if node.async
components.add node, 0
return
component = utils.getParentComponent()
if (
not component or
(component.parent and component.parent.type is 'JSXExpressionContainer')
)
# Ban the node if we cannot find a parent component
components.add node, 0
return
components.add component, 1
FunctionDeclaration: (node) ->
if node.async
components.add node, 0
return
node = utils.getParentComponent()
return unless node
components.add node, 1
ArrowFunctionExpression: (node) ->
if node.async
components.add node, 0
return
component = utils.getParentComponent()
if (
not component or
(component.parent and component.parent.type is 'JSXExpressionContainer')
)
# Ban the node if we cannot find a parent component
components.add node, 0
return
if component.expression and utils.isReturningJSX component
components.add component, 2
else
components.add component, 1
ThisExpression: (node) ->
component = utils.getParentComponent()
return if (
not component or
not /Function/.test(component.type) or
not node.parent.property
)
# Ban functions accessing a property on a ThisExpression
components.add node, 0
ReturnStatement: (node) ->
return unless utils.isReturningJSX node
node = utils.getParentComponent()
unless node
scope = context.getScope()
components.add scope.block, 1
return
components.add node, 2
ExpressionStatement: (node) ->
return unless utils.isReturningJSX node
node = utils.getParentComponent()
unless node
scope = context.getScope()
components.add scope.block, 1
return
components.add node, 2
# Update the provided rule instructions to add the component detection
ruleInstructions = rule context, components, utils
updatedRuleInstructions = util._extend {}, ruleInstructions
propTypesInstructions = propTypes context, components, utils
allKeys = new Set(
Object.keys(detectionInstructions).concat Object.keys propTypesInstructions
)
allKeys.forEach (instruction) ->
updatedRuleInstructions[instruction] = (node) ->
if instruction of detectionInstructions
detectionInstructions[instruction] node
if instruction of propTypesInstructions
propTypesInstructions[instruction] node
if ruleInstructions[instruction]
ruleInstructions[instruction] node
else
undefined
# Return the updated rule instructions
updatedRuleInstructions
module.exports = Object.assign Components,
detect: (rule) -> componentRule.bind @, rule
| 86867 | ###*
# @fileoverview Utility class and functions for React components detection
# @author <NAME>
###
'use strict'
util = require 'util'
doctrine = require 'doctrine'
variableUtil = require './variable'
pragmaUtil = require 'eslint-plugin-react/lib/util/pragma'
astUtil = require './ast'
propTypes = require './propTypes'
getId = (node) -> node?.range.join ':'
usedPropTypesAreEquivalent = (propA, propB) ->
if propA.name is propB.name
if not propA.allNames and not propB.allNames
return yes
return yes if (
Array.isArray(propA.allNames) and
Array.isArray(propB.allNames) and
propA.allNames.join('') is propB.allNames.join ''
)
return no
no
mergeUsedPropTypes = (propsList, newPropsList) ->
propsToAdd = []
newPropsList.forEach (newProp) ->
newPropisAlreadyInTheList = propsList.some (prop) ->
usedPropTypesAreEquivalent prop, newProp
unless newPropisAlreadyInTheList then propsToAdd.push newProp
propsList.concat propsToAdd
###*
# Components
###
class Components
constructor: ->
@_list = {}
###*
# Add a node to the components list, or update it if it's already in the list
#
# @param {ASTNode} node The AST node being added.
# @param {Number} confidence Confidence in the component detection (0=banned, 1=maybe, 2=yes)
# @returns {Object} Added component object
###
add: (node, confidence) ->
id = getId node
if @_list[id]
if confidence is 0 or @_list[id].confidence is 0
@_list[id].confidence = 0
else
@_list[id].confidence = Math.max @_list[id].confidence, confidence
return @_list[id]
@_list[id] = {node, confidence}
@_list[id]
###*
# Find a component in the list using its node
#
# @param {ASTNode} node The AST node being searched.
# @returns {Object} Component object, undefined if the component is not found or has confidence value of 0.
###
get: (node) ->
id = getId node
return @_list[id] if @_list[id]?.confidence >= 1
null
###*
# Update a component in the list
#
# @param {ASTNode} node The AST node being updated.
# @param {Object} props Additional properties to add to the component.
###
set: (node, props) ->
node = node.parent while node and not @_list[getId node]
return unless node
id = getId node
if @_list[id]
# usedPropTypes is an array. _extend replaces existing array with a new one which caused issue #1309.
# preserving original array so it can be merged later on.
copyUsedPropTypes = @_list[id].usedPropTypes?.slice()
@_list[id] = util._extend @_list[id], props
if @_list[id] and props.usedPropTypes
@_list[id].usedPropTypes = mergeUsedPropTypes(
copyUsedPropTypes or []
props.usedPropTypes
)
###*
# Return the components list
# Components for which we are not confident are not returned
#
# @returns {Object} Components list
###
list: ->
list = {}
usedPropTypes = {}
# Find props used in components for which we are not confident
for own _, comp of @_list
continue if comp.confidence >= 2
component = null
node = null
{node} = comp
while not component and node.parent
node = node.parent
# Stop moving up if we reach a decorator
break if node.type is 'Decorator'
component = @get node
if component
newUsedProps = (comp.usedPropTypes or []).filter (propType) ->
not propType.node or propType.node.kind isnt 'init'
componentId = getId component.node
usedPropTypes[componentId] = (usedPropTypes[componentId] or []).concat(
newUsedProps
)
# Assign used props in not confident components to the parent component
for own j, comp of @_list when comp.confidence >= 2
id = getId comp.node
list[j] = comp
if usedPropTypes[id]
list[j].usedPropTypes = (list[j].usedPropTypes or []).concat(
usedPropTypes[id]
)
list
###*
# Return the length of the components list
# Components for which we are not confident are not counted
#
# @returns {Number} Components list length
###
length: ->
length = 0
for own i of @_list when @_list[i].confidence >= 2
length++
length
componentRule = (rule, context) ->
createClass = pragmaUtil.getCreateClassFromContext context
pragma = pragmaUtil.getFromContext context
sourceCode = context.getSourceCode()
components = new Components()
# Utilities for component detection
utils =
###*
# Check if the node is a React ES5 component
#
# @param {ASTNode} node The AST node being checked.
# @returns {Boolean} True if the node is a React ES5 component, false if not
###
isES5Component: (node) ->
return no unless node.parent
///^(#{pragma}\.)?#{createClass}$///.test(
sourceCode.getText node.parent.callee
)
###*
# Check if the node is a React ES6 component
#
# @param {ASTNode} node The AST node being checked.
# @returns {Boolean} True if the node is a React ES6 component, false if not
###
isES6Component: (node) ->
return yes if utils.isExplicitComponent node
return no unless node.superClass
///^(#{pragma}\.)?(Pure)?Component$///.test(
sourceCode.getText node.superClass
)
###*
# Check if the node is explicitly declared as a descendant of a React Component
#
# @param {ASTNode} node The AST node being checked (can be a ReturnStatement or an ArrowFunctionExpression).
# @returns {Boolean} True if the node is explicitly declared as a descendant of a React Component, false if not
###
isExplicitComponent: (node) ->
# Sometimes the passed node may not have been parsed yet by eslint, and this function call crashes.
# Can be removed when eslint sets "parent" property for all nodes on initial AST traversal: https://github.com/eslint/eslint-scope/issues/27
# eslint-disable-next-line no-warning-comments
# FIXME: Remove try/catch when https://github.com/eslint/eslint-scope/issues/27 is implemented.
try
comment = sourceCode.getJSDocComment node
catch e
comment = null
return no if comment is null
commentAst = doctrine.parse comment.value,
unwrap: yes
tags: ['extends', 'augments']
relevantTags = commentAst.tags.filter (tag) ->
tag.name in ['React.Component', 'React.PureComponent']
relevantTags.length > 0
###*
# Checks to see if our component extends React.PureComponent
#
# @param {ASTNode} node The AST node being checked.
# @returns {Boolean} True if node extends React.PureComponent, false if not
###
isPureComponent: (node) ->
return ///^(#{pragma}\.)?PureComponent$///.test(
sourceCode.getText node.superClass
) if node.superClass
no
###*
# Check if createElement is destructured from React import
#
# @returns {Boolean} True if createElement is destructured from React
###
hasDestructuredReactCreateElement: ->
variables = variableUtil.variablesInScope context
variable = variableUtil.getVariable variables, 'createElement'
if variable
map = variable.scope.set
return yes if map.has 'React'
no
###*
# Checks to see if node is called within React.createElement
#
# @param {ASTNode} node The AST node being checked.
# @returns {Boolean} True if React.createElement called
###
isReactCreateElement: (node) ->
calledOnReact =
node?.callee?.object?.name is 'React' and
node.callee.property?.name is 'createElement'
calledDirectly = node?.callee?.name is 'createElement'
return (
calledDirectly or calledOnReact
) if @hasDestructuredReactCreateElement()
calledOnReact
getReturnPropertyAndNode: (ASTnode) ->
node = ASTnode
return {node, property: 'expression'} if (
node.type is 'ExpressionStatement' and node.expression.returns
)
switch node.type
when 'ReturnStatement'
property = 'argument'
when 'ArrowFunctionExpression'
property = 'body'
if node[property] and node[property].type is 'BlockStatement'
{node, property} = utils.findReturnStatement node
else
{node, property} = utils.findReturnStatement node
{node, property}
###*
# Check if the node is returning JSX
#
# @param {ASTNode} ASTnode The AST node being checked
# @param {Boolean} strict If true, in a ternary condition the node must return JSX in both cases
# @returns {Boolean} True if the node is returning JSX, false if not
###
isReturningJSX: (ASTnode, strict) ->
nodeAndProperty = utils.getReturnPropertyAndNode ASTnode
{node, property} = nodeAndProperty
return no unless node
returnsConditionalJSXConsequent =
node[property] and
node[property].type is 'ConditionalExpression' and
node[property].consequent.type is 'JSXElement'
returnsConditionalJSXAlternate =
node[property] and
node[property].type is 'ConditionalExpression' and
node[property].alternate?.type is 'JSXElement'
returnsConditionalJSX =
if strict
returnsConditionalJSXConsequent and returnsConditionalJSXAlternate
else
returnsConditionalJSXConsequent or returnsConditionalJSXAlternate
returnsJSX = node[property] and node[property].type is 'JSXElement'
returnsReactCreateElement = @isReactCreateElement node[property]
Boolean returnsConditionalJSX or returnsJSX or returnsReactCreateElement
###*
# Check if the node is returning null
#
# @param {ASTNode} ASTnode The AST node being checked
# @returns {Boolean} True if the node is returning null, false if not
###
isReturningNull: (ASTnode) ->
nodeAndProperty = utils.getReturnPropertyAndNode ASTnode
{property, node} = nodeAndProperty
return no unless node
node[property] and node[property].value is null
###*
# Check if the node is returning JSX or null
#
# @param {ASTNode} ASTnode The AST node being checked
# @param {Boolean} strict If true, in a ternary condition the node must return JSX in both cases
# @returns {Boolean} True if the node is returning JSX or null, false if not
###
isReturningJSXOrNull: (ASTNode, strict) ->
utils.isReturningJSX(ASTNode, strict) or utils.isReturningNull ASTNode
###*
# Find a return statment in the current node
#
# @param {ASTNode} ASTnode The AST node being checked
###
findReturnStatement: astUtil.findReturnStatement
###*
# Get the parent component node from the current scope
#
# @returns {ASTNode} component node, null if we are not in a component
###
getParentComponent: ->
utils.getParentES6Component() or
utils.getParentES5Component() or
utils.getParentStatelessComponent()
###*
# Get the parent ES5 component node from the current scope
#
# @returns {ASTNode} component node, null if we are not in a component
###
getParentES5Component: ->
# eslint-disable-next-line coffee/destructuring-assignment
scope = context.getScope()
while scope
node = scope.block?.parent?.parent
return node if node and utils.isES5Component node
scope = scope.upper
null
###*
# Get the parent ES6 component node from the current scope
#
# @returns {ASTNode} component node, null if we are not in a component
###
getParentES6Component: ->
scope = context.getScope()
while scope and scope.type isnt 'class'
scope = scope.upper
node = scope?.block
return null if not node or not utils.isES6Component node
node
###*
# Get the parent stateless component node from the current scope
#
# @returns {ASTNode} component node, null if we are not in a component
###
getParentStatelessComponent: ->
# eslint-disable-next-line coffee/destructuring-assignment
scope = context.getScope()
while scope
node = scope.block
isClass = node.type is 'ClassExpression'
isFunction = /Function/.test node.type # Functions
isMethod = node.parent?.type is 'MethodDefinition' # Classes methods
isArgument =
node.parent?.type is 'CallExpression' or
(node.parent?.type is 'UnaryExpression' and
node.parent.operator is 'do') # Arguments (callback, etc.)
# Attribute Expressions inside JSX Elements (<button onClick={() => props.handleClick()}></button>)
isJSXExpressionContainer = node.parent?.type is 'JSXExpressionContainer'
# Stop moving up if we reach a class or an argument (like a callback)
return null if isClass or isArgument
# Return the node if it is a function that is not a class method and is not inside a JSX Element
return node if (
isFunction and
not isMethod and
not isJSXExpressionContainer and
utils.isReturningJSXOrNull node
)
scope = scope.upper
null
###*
# Get the related component from a node
#
# @param {ASTNode} node The AST node being checked (must be a MemberExpression).
# @returns {ASTNode} component node, null if we cannot find the component
###
getRelatedComponent: (node) ->
# Get the component path
componentPath = []
while node
if node.property and node.property.type is 'Identifier'
componentPath.push node.property.name
if node.object and node.object.type is 'Identifier'
componentPath.push node.object.name
node = node.object
componentPath.reverse()
componentName = componentPath.slice(0, componentPath.length - 1).join '.'
# Find the variable in the current scope
variableName = componentPath.shift()
return null unless variableName
variables = variableUtil.variablesInScope context
for variable in variables
if variable.name is variableName
variableInScope = variable
break
return null unless variableInScope
# Try to find the component using variable references
for ref in variableInScope.references
refId = ref.identifier
if refId.parent and refId.parent.type is 'MemberExpression'
refId = refId.parent
continue unless sourceCode.getText(refId) is componentName
if refId.type is 'MemberExpression'
componentNode = refId.parent.right
else if refId.parent and refId.parent.type is 'VariableDeclarator'
componentNode = refId.parent.init
else if (
refId.declaration and refId.parent.type is 'AssignmentExpression'
)
componentNode = refId.parent.right
break
# Return the component
return components.add componentNode, 1 if componentNode
# Try to find the component using variable declarations
for def in variableInScope.defs
if def.type in ['ClassName', 'FunctionName', 'Variable']
defInScope = def
break
return null unless defInScope?.node
componentNode =
defInScope.node.init or
(defInScope.node.declaration and
defInScope.node.parent.type is 'AssignmentExpression' and
defInScope.node.parent.right) or
defInScope.node
# Traverse the node properties to the component declaration
for componentPathSegment in componentPath
continue unless componentNode.properties
for prop in componentNode.properties
if prop.key?.name is componentPathSegment
componentNode = prop
break
return null if not componentNode or not componentNode.value
componentNode = componentNode.value
# Return the component
components.add componentNode, 1
# Component detection instructions
detectionInstructions =
ClassExpression: (node) ->
return unless utils.isES6Component node
components.add node, 2
ClassDeclaration: (node) ->
return unless utils.isES6Component node
components.add node, 2
ClassProperty: (node) ->
node = utils.getParentComponent()
return unless node
components.add node, 2
ObjectExpression: (node) ->
return unless utils.isES5Component node
components.add node, 2
FunctionExpression: (node) ->
if node.async
components.add node, 0
return
component = utils.getParentComponent()
if (
not component or
(component.parent and component.parent.type is 'JSXExpressionContainer')
)
# Ban the node if we cannot find a parent component
components.add node, 0
return
components.add component, 1
FunctionDeclaration: (node) ->
if node.async
components.add node, 0
return
node = utils.getParentComponent()
return unless node
components.add node, 1
ArrowFunctionExpression: (node) ->
if node.async
components.add node, 0
return
component = utils.getParentComponent()
if (
not component or
(component.parent and component.parent.type is 'JSXExpressionContainer')
)
# Ban the node if we cannot find a parent component
components.add node, 0
return
if component.expression and utils.isReturningJSX component
components.add component, 2
else
components.add component, 1
ThisExpression: (node) ->
component = utils.getParentComponent()
return if (
not component or
not /Function/.test(component.type) or
not node.parent.property
)
# Ban functions accessing a property on a ThisExpression
components.add node, 0
ReturnStatement: (node) ->
return unless utils.isReturningJSX node
node = utils.getParentComponent()
unless node
scope = context.getScope()
components.add scope.block, 1
return
components.add node, 2
ExpressionStatement: (node) ->
return unless utils.isReturningJSX node
node = utils.getParentComponent()
unless node
scope = context.getScope()
components.add scope.block, 1
return
components.add node, 2
# Update the provided rule instructions to add the component detection
ruleInstructions = rule context, components, utils
updatedRuleInstructions = util._extend {}, ruleInstructions
propTypesInstructions = propTypes context, components, utils
allKeys = new Set(
Object.keys(detectionInstructions).concat Object.keys propTypesInstructions
)
allKeys.forEach (instruction) ->
updatedRuleInstructions[instruction] = (node) ->
if instruction of detectionInstructions
detectionInstructions[instruction] node
if instruction of propTypesInstructions
propTypesInstructions[instruction] node
if ruleInstructions[instruction]
ruleInstructions[instruction] node
else
undefined
# Return the updated rule instructions
updatedRuleInstructions
module.exports = Object.assign Components,
detect: (rule) -> componentRule.bind @, rule
| true | ###*
# @fileoverview Utility class and functions for React components detection
# @author PI:NAME:<NAME>END_PI
###
'use strict'
util = require 'util'
doctrine = require 'doctrine'
variableUtil = require './variable'
pragmaUtil = require 'eslint-plugin-react/lib/util/pragma'
astUtil = require './ast'
propTypes = require './propTypes'
getId = (node) -> node?.range.join ':'
usedPropTypesAreEquivalent = (propA, propB) ->
if propA.name is propB.name
if not propA.allNames and not propB.allNames
return yes
return yes if (
Array.isArray(propA.allNames) and
Array.isArray(propB.allNames) and
propA.allNames.join('') is propB.allNames.join ''
)
return no
no
mergeUsedPropTypes = (propsList, newPropsList) ->
propsToAdd = []
newPropsList.forEach (newProp) ->
newPropisAlreadyInTheList = propsList.some (prop) ->
usedPropTypesAreEquivalent prop, newProp
unless newPropisAlreadyInTheList then propsToAdd.push newProp
propsList.concat propsToAdd
###*
# Components
###
class Components
constructor: ->
@_list = {}
###*
# Add a node to the components list, or update it if it's already in the list
#
# @param {ASTNode} node The AST node being added.
# @param {Number} confidence Confidence in the component detection (0=banned, 1=maybe, 2=yes)
# @returns {Object} Added component object
###
add: (node, confidence) ->
id = getId node
if @_list[id]
if confidence is 0 or @_list[id].confidence is 0
@_list[id].confidence = 0
else
@_list[id].confidence = Math.max @_list[id].confidence, confidence
return @_list[id]
@_list[id] = {node, confidence}
@_list[id]
###*
# Find a component in the list using its node
#
# @param {ASTNode} node The AST node being searched.
# @returns {Object} Component object, undefined if the component is not found or has confidence value of 0.
###
get: (node) ->
id = getId node
return @_list[id] if @_list[id]?.confidence >= 1
null
###*
# Update a component in the list
#
# @param {ASTNode} node The AST node being updated.
# @param {Object} props Additional properties to add to the component.
###
set: (node, props) ->
node = node.parent while node and not @_list[getId node]
return unless node
id = getId node
if @_list[id]
# usedPropTypes is an array. _extend replaces existing array with a new one which caused issue #1309.
# preserving original array so it can be merged later on.
copyUsedPropTypes = @_list[id].usedPropTypes?.slice()
@_list[id] = util._extend @_list[id], props
if @_list[id] and props.usedPropTypes
@_list[id].usedPropTypes = mergeUsedPropTypes(
copyUsedPropTypes or []
props.usedPropTypes
)
###*
# Return the components list
# Components for which we are not confident are not returned
#
# @returns {Object} Components list
###
list: ->
list = {}
usedPropTypes = {}
# Find props used in components for which we are not confident
for own _, comp of @_list
continue if comp.confidence >= 2
component = null
node = null
{node} = comp
while not component and node.parent
node = node.parent
# Stop moving up if we reach a decorator
break if node.type is 'Decorator'
component = @get node
if component
newUsedProps = (comp.usedPropTypes or []).filter (propType) ->
not propType.node or propType.node.kind isnt 'init'
componentId = getId component.node
usedPropTypes[componentId] = (usedPropTypes[componentId] or []).concat(
newUsedProps
)
# Assign used props in not confident components to the parent component
for own j, comp of @_list when comp.confidence >= 2
id = getId comp.node
list[j] = comp
if usedPropTypes[id]
list[j].usedPropTypes = (list[j].usedPropTypes or []).concat(
usedPropTypes[id]
)
list
###*
# Return the length of the components list
# Components for which we are not confident are not counted
#
# @returns {Number} Components list length
###
length: ->
length = 0
for own i of @_list when @_list[i].confidence >= 2
length++
length
componentRule = (rule, context) ->
createClass = pragmaUtil.getCreateClassFromContext context
pragma = pragmaUtil.getFromContext context
sourceCode = context.getSourceCode()
components = new Components()
# Utilities for component detection
utils =
###*
# Check if the node is a React ES5 component
#
# @param {ASTNode} node The AST node being checked.
# @returns {Boolean} True if the node is a React ES5 component, false if not
###
isES5Component: (node) ->
return no unless node.parent
///^(#{pragma}\.)?#{createClass}$///.test(
sourceCode.getText node.parent.callee
)
###*
# Check if the node is a React ES6 component
#
# @param {ASTNode} node The AST node being checked.
# @returns {Boolean} True if the node is a React ES6 component, false if not
###
isES6Component: (node) ->
return yes if utils.isExplicitComponent node
return no unless node.superClass
///^(#{pragma}\.)?(Pure)?Component$///.test(
sourceCode.getText node.superClass
)
###*
# Check if the node is explicitly declared as a descendant of a React Component
#
# @param {ASTNode} node The AST node being checked (can be a ReturnStatement or an ArrowFunctionExpression).
# @returns {Boolean} True if the node is explicitly declared as a descendant of a React Component, false if not
###
isExplicitComponent: (node) ->
# Sometimes the passed node may not have been parsed yet by eslint, and this function call crashes.
# Can be removed when eslint sets "parent" property for all nodes on initial AST traversal: https://github.com/eslint/eslint-scope/issues/27
# eslint-disable-next-line no-warning-comments
# FIXME: Remove try/catch when https://github.com/eslint/eslint-scope/issues/27 is implemented.
try
comment = sourceCode.getJSDocComment node
catch e
comment = null
return no if comment is null
commentAst = doctrine.parse comment.value,
unwrap: yes
tags: ['extends', 'augments']
relevantTags = commentAst.tags.filter (tag) ->
tag.name in ['React.Component', 'React.PureComponent']
relevantTags.length > 0
###*
# Checks to see if our component extends React.PureComponent
#
# @param {ASTNode} node The AST node being checked.
# @returns {Boolean} True if node extends React.PureComponent, false if not
###
isPureComponent: (node) ->
return ///^(#{pragma}\.)?PureComponent$///.test(
sourceCode.getText node.superClass
) if node.superClass
no
###*
# Check if createElement is destructured from React import
#
# @returns {Boolean} True if createElement is destructured from React
###
hasDestructuredReactCreateElement: ->
variables = variableUtil.variablesInScope context
variable = variableUtil.getVariable variables, 'createElement'
if variable
map = variable.scope.set
return yes if map.has 'React'
no
###*
# Checks to see if node is called within React.createElement
#
# @param {ASTNode} node The AST node being checked.
# @returns {Boolean} True if React.createElement called
###
isReactCreateElement: (node) ->
calledOnReact =
node?.callee?.object?.name is 'React' and
node.callee.property?.name is 'createElement'
calledDirectly = node?.callee?.name is 'createElement'
return (
calledDirectly or calledOnReact
) if @hasDestructuredReactCreateElement()
calledOnReact
getReturnPropertyAndNode: (ASTnode) ->
node = ASTnode
return {node, property: 'expression'} if (
node.type is 'ExpressionStatement' and node.expression.returns
)
switch node.type
when 'ReturnStatement'
property = 'argument'
when 'ArrowFunctionExpression'
property = 'body'
if node[property] and node[property].type is 'BlockStatement'
{node, property} = utils.findReturnStatement node
else
{node, property} = utils.findReturnStatement node
{node, property}
###*
# Check if the node is returning JSX
#
# @param {ASTNode} ASTnode The AST node being checked
# @param {Boolean} strict If true, in a ternary condition the node must return JSX in both cases
# @returns {Boolean} True if the node is returning JSX, false if not
###
isReturningJSX: (ASTnode, strict) ->
nodeAndProperty = utils.getReturnPropertyAndNode ASTnode
{node, property} = nodeAndProperty
return no unless node
returnsConditionalJSXConsequent =
node[property] and
node[property].type is 'ConditionalExpression' and
node[property].consequent.type is 'JSXElement'
returnsConditionalJSXAlternate =
node[property] and
node[property].type is 'ConditionalExpression' and
node[property].alternate?.type is 'JSXElement'
returnsConditionalJSX =
if strict
returnsConditionalJSXConsequent and returnsConditionalJSXAlternate
else
returnsConditionalJSXConsequent or returnsConditionalJSXAlternate
returnsJSX = node[property] and node[property].type is 'JSXElement'
returnsReactCreateElement = @isReactCreateElement node[property]
Boolean returnsConditionalJSX or returnsJSX or returnsReactCreateElement
###*
# Check if the node is returning null
#
# @param {ASTNode} ASTnode The AST node being checked
# @returns {Boolean} True if the node is returning null, false if not
###
isReturningNull: (ASTnode) ->
nodeAndProperty = utils.getReturnPropertyAndNode ASTnode
{property, node} = nodeAndProperty
return no unless node
node[property] and node[property].value is null
###*
# Check if the node is returning JSX or null
#
# @param {ASTNode} ASTnode The AST node being checked
# @param {Boolean} strict If true, in a ternary condition the node must return JSX in both cases
# @returns {Boolean} True if the node is returning JSX or null, false if not
###
isReturningJSXOrNull: (ASTNode, strict) ->
utils.isReturningJSX(ASTNode, strict) or utils.isReturningNull ASTNode
###*
# Find a return statment in the current node
#
# @param {ASTNode} ASTnode The AST node being checked
###
findReturnStatement: astUtil.findReturnStatement
###*
# Get the parent component node from the current scope
#
# @returns {ASTNode} component node, null if we are not in a component
###
getParentComponent: ->
utils.getParentES6Component() or
utils.getParentES5Component() or
utils.getParentStatelessComponent()
###*
# Get the parent ES5 component node from the current scope
#
# @returns {ASTNode} component node, null if we are not in a component
###
getParentES5Component: ->
# eslint-disable-next-line coffee/destructuring-assignment
scope = context.getScope()
while scope
node = scope.block?.parent?.parent
return node if node and utils.isES5Component node
scope = scope.upper
null
###*
# Get the parent ES6 component node from the current scope
#
# @returns {ASTNode} component node, null if we are not in a component
###
getParentES6Component: ->
scope = context.getScope()
while scope and scope.type isnt 'class'
scope = scope.upper
node = scope?.block
return null if not node or not utils.isES6Component node
node
###*
# Get the parent stateless component node from the current scope
#
# @returns {ASTNode} component node, null if we are not in a component
###
getParentStatelessComponent: ->
# eslint-disable-next-line coffee/destructuring-assignment
scope = context.getScope()
while scope
node = scope.block
isClass = node.type is 'ClassExpression'
isFunction = /Function/.test node.type # Functions
isMethod = node.parent?.type is 'MethodDefinition' # Classes methods
isArgument =
node.parent?.type is 'CallExpression' or
(node.parent?.type is 'UnaryExpression' and
node.parent.operator is 'do') # Arguments (callback, etc.)
# Attribute Expressions inside JSX Elements (<button onClick={() => props.handleClick()}></button>)
isJSXExpressionContainer = node.parent?.type is 'JSXExpressionContainer'
# Stop moving up if we reach a class or an argument (like a callback)
return null if isClass or isArgument
# Return the node if it is a function that is not a class method and is not inside a JSX Element
return node if (
isFunction and
not isMethod and
not isJSXExpressionContainer and
utils.isReturningJSXOrNull node
)
scope = scope.upper
null
###*
# Get the related component from a node
#
# @param {ASTNode} node The AST node being checked (must be a MemberExpression).
# @returns {ASTNode} component node, null if we cannot find the component
###
getRelatedComponent: (node) ->
# Get the component path
componentPath = []
while node
if node.property and node.property.type is 'Identifier'
componentPath.push node.property.name
if node.object and node.object.type is 'Identifier'
componentPath.push node.object.name
node = node.object
componentPath.reverse()
componentName = componentPath.slice(0, componentPath.length - 1).join '.'
# Find the variable in the current scope
variableName = componentPath.shift()
return null unless variableName
variables = variableUtil.variablesInScope context
for variable in variables
if variable.name is variableName
variableInScope = variable
break
return null unless variableInScope
# Try to find the component using variable references
for ref in variableInScope.references
refId = ref.identifier
if refId.parent and refId.parent.type is 'MemberExpression'
refId = refId.parent
continue unless sourceCode.getText(refId) is componentName
if refId.type is 'MemberExpression'
componentNode = refId.parent.right
else if refId.parent and refId.parent.type is 'VariableDeclarator'
componentNode = refId.parent.init
else if (
refId.declaration and refId.parent.type is 'AssignmentExpression'
)
componentNode = refId.parent.right
break
# Return the component
return components.add componentNode, 1 if componentNode
# Try to find the component using variable declarations
for def in variableInScope.defs
if def.type in ['ClassName', 'FunctionName', 'Variable']
defInScope = def
break
return null unless defInScope?.node
componentNode =
defInScope.node.init or
(defInScope.node.declaration and
defInScope.node.parent.type is 'AssignmentExpression' and
defInScope.node.parent.right) or
defInScope.node
# Traverse the node properties to the component declaration
for componentPathSegment in componentPath
continue unless componentNode.properties
for prop in componentNode.properties
if prop.key?.name is componentPathSegment
componentNode = prop
break
return null if not componentNode or not componentNode.value
componentNode = componentNode.value
# Return the component
components.add componentNode, 1
# Component detection instructions
detectionInstructions =
ClassExpression: (node) ->
return unless utils.isES6Component node
components.add node, 2
ClassDeclaration: (node) ->
return unless utils.isES6Component node
components.add node, 2
ClassProperty: (node) ->
node = utils.getParentComponent()
return unless node
components.add node, 2
ObjectExpression: (node) ->
return unless utils.isES5Component node
components.add node, 2
FunctionExpression: (node) ->
if node.async
components.add node, 0
return
component = utils.getParentComponent()
if (
not component or
(component.parent and component.parent.type is 'JSXExpressionContainer')
)
# Ban the node if we cannot find a parent component
components.add node, 0
return
components.add component, 1
FunctionDeclaration: (node) ->
if node.async
components.add node, 0
return
node = utils.getParentComponent()
return unless node
components.add node, 1
ArrowFunctionExpression: (node) ->
if node.async
components.add node, 0
return
component = utils.getParentComponent()
if (
not component or
(component.parent and component.parent.type is 'JSXExpressionContainer')
)
# Ban the node if we cannot find a parent component
components.add node, 0
return
if component.expression and utils.isReturningJSX component
components.add component, 2
else
components.add component, 1
ThisExpression: (node) ->
component = utils.getParentComponent()
return if (
not component or
not /Function/.test(component.type) or
not node.parent.property
)
# Ban functions accessing a property on a ThisExpression
components.add node, 0
ReturnStatement: (node) ->
return unless utils.isReturningJSX node
node = utils.getParentComponent()
unless node
scope = context.getScope()
components.add scope.block, 1
return
components.add node, 2
ExpressionStatement: (node) ->
return unless utils.isReturningJSX node
node = utils.getParentComponent()
unless node
scope = context.getScope()
components.add scope.block, 1
return
components.add node, 2
# Update the provided rule instructions to add the component detection
ruleInstructions = rule context, components, utils
updatedRuleInstructions = util._extend {}, ruleInstructions
propTypesInstructions = propTypes context, components, utils
allKeys = new Set(
Object.keys(detectionInstructions).concat Object.keys propTypesInstructions
)
allKeys.forEach (instruction) ->
updatedRuleInstructions[instruction] = (node) ->
if instruction of detectionInstructions
detectionInstructions[instruction] node
if instruction of propTypesInstructions
propTypesInstructions[instruction] node
if ruleInstructions[instruction]
ruleInstructions[instruction] node
else
undefined
# Return the updated rule instructions
updatedRuleInstructions
module.exports = Object.assign Components,
detect: (rule) -> componentRule.bind @, rule
|
[
{
"context": "icle-section-artworks').html().should.containEql 'Govinda Sah'\n @$('.article-section-artworks').html().shoul",
"end": 2195,
"score": 0.9997644424438477,
"start": 2184,
"tag": "NAME",
"value": "Govinda Sah"
},
{
"context": "icle-section-artworks').html().should.conta... | mobile/components/article/test/templates.coffee | dblock/force | 1 | _ = require 'underscore'
fs = require 'fs'
jade = require 'jade'
path = require 'path'
cheerio = require 'cheerio'
Backbone = require 'backbone'
{ resolve } = require 'path'
Article = require '../../../models/article'
Section = require '../../../models/section'
Articles = require '../../../collections/articles'
fixtures = require '../../../test/helpers/fixtures'
render = (templateName) ->
filename = path.resolve __dirname, "../templates/#{templateName}.jade"
jade.compile(
fs.readFileSync(filename),
{ filename: filename }
)
describe 'article page', ->
before ->
@article = new Article require('./fixtures/article')
_.extend @article.attributes, { section_ids: ['03434'] }
_.extend @article.attributes,
hero_section:
type: 'fullscreen'
background_url: 'http://video.mp4'
@featuredArticles = new Articles
@featuredArticles.model = Article
@featuredArticles.add [_.extend(_.clone(fixtures.article), title: 'Featured Section Article Title')]
@sectionArticles = new Articles
@sectionArticles.model = Article
@sectionArticles.add [_.extend(_.clone(fixtures.article), title: 'Section Article Title')]
html = render('index')
sd: {}
resize: ->
article: @article
footerArticles: new Backbone.Collection
featuredSection: new Section _.extend _.clone(fixtures.section), title: 'Moo Bar'
featuredSectionArticles: @sectionArticles
@$ = cheerio.load html
it 'renders the headline', ->
@$('h1').text().should.equal 'SPRING/BREAK Offers a Breath of Fresh Air for Weary Fairgoers'
it 'renders the sections', ->
@article.get('sections').should.have.lengthOf 12
@$('.article-section').should.have.lengthOf 12
it 'renders artworks', ->
@$('.article-section-artworks').should.have.lengthOf 1
@$('.article-section-artworks').html().should.containEql 'govinda-sah-azad-matter-slash-nothing-slash-matter'
@$('.article-section-artworks').html().should.containEql 'https://d32dm0rphc51dk.cloudfront.net/UhkwvicwkJGgGoPIAP5pVA/larger.jpg'
it 'can render artworks with two artists', ->
@$('.article-section-artworks').html().should.containEql 'Govinda Sah'
@$('.article-section-artworks').html().should.containEql 'Andy Warhol'
| 118907 | _ = require 'underscore'
fs = require 'fs'
jade = require 'jade'
path = require 'path'
cheerio = require 'cheerio'
Backbone = require 'backbone'
{ resolve } = require 'path'
Article = require '../../../models/article'
Section = require '../../../models/section'
Articles = require '../../../collections/articles'
fixtures = require '../../../test/helpers/fixtures'
render = (templateName) ->
filename = path.resolve __dirname, "../templates/#{templateName}.jade"
jade.compile(
fs.readFileSync(filename),
{ filename: filename }
)
describe 'article page', ->
before ->
@article = new Article require('./fixtures/article')
_.extend @article.attributes, { section_ids: ['03434'] }
_.extend @article.attributes,
hero_section:
type: 'fullscreen'
background_url: 'http://video.mp4'
@featuredArticles = new Articles
@featuredArticles.model = Article
@featuredArticles.add [_.extend(_.clone(fixtures.article), title: 'Featured Section Article Title')]
@sectionArticles = new Articles
@sectionArticles.model = Article
@sectionArticles.add [_.extend(_.clone(fixtures.article), title: 'Section Article Title')]
html = render('index')
sd: {}
resize: ->
article: @article
footerArticles: new Backbone.Collection
featuredSection: new Section _.extend _.clone(fixtures.section), title: 'Moo Bar'
featuredSectionArticles: @sectionArticles
@$ = cheerio.load html
it 'renders the headline', ->
@$('h1').text().should.equal 'SPRING/BREAK Offers a Breath of Fresh Air for Weary Fairgoers'
it 'renders the sections', ->
@article.get('sections').should.have.lengthOf 12
@$('.article-section').should.have.lengthOf 12
it 'renders artworks', ->
@$('.article-section-artworks').should.have.lengthOf 1
@$('.article-section-artworks').html().should.containEql 'govinda-sah-azad-matter-slash-nothing-slash-matter'
@$('.article-section-artworks').html().should.containEql 'https://d32dm0rphc51dk.cloudfront.net/UhkwvicwkJGgGoPIAP5pVA/larger.jpg'
it 'can render artworks with two artists', ->
@$('.article-section-artworks').html().should.containEql '<NAME>'
@$('.article-section-artworks').html().should.containEql '<NAME>'
| true | _ = require 'underscore'
fs = require 'fs'
jade = require 'jade'
path = require 'path'
cheerio = require 'cheerio'
Backbone = require 'backbone'
{ resolve } = require 'path'
Article = require '../../../models/article'
Section = require '../../../models/section'
Articles = require '../../../collections/articles'
fixtures = require '../../../test/helpers/fixtures'
render = (templateName) ->
filename = path.resolve __dirname, "../templates/#{templateName}.jade"
jade.compile(
fs.readFileSync(filename),
{ filename: filename }
)
describe 'article page', ->
before ->
@article = new Article require('./fixtures/article')
_.extend @article.attributes, { section_ids: ['03434'] }
_.extend @article.attributes,
hero_section:
type: 'fullscreen'
background_url: 'http://video.mp4'
@featuredArticles = new Articles
@featuredArticles.model = Article
@featuredArticles.add [_.extend(_.clone(fixtures.article), title: 'Featured Section Article Title')]
@sectionArticles = new Articles
@sectionArticles.model = Article
@sectionArticles.add [_.extend(_.clone(fixtures.article), title: 'Section Article Title')]
html = render('index')
sd: {}
resize: ->
article: @article
footerArticles: new Backbone.Collection
featuredSection: new Section _.extend _.clone(fixtures.section), title: 'Moo Bar'
featuredSectionArticles: @sectionArticles
@$ = cheerio.load html
it 'renders the headline', ->
@$('h1').text().should.equal 'SPRING/BREAK Offers a Breath of Fresh Air for Weary Fairgoers'
it 'renders the sections', ->
@article.get('sections').should.have.lengthOf 12
@$('.article-section').should.have.lengthOf 12
it 'renders artworks', ->
@$('.article-section-artworks').should.have.lengthOf 1
@$('.article-section-artworks').html().should.containEql 'govinda-sah-azad-matter-slash-nothing-slash-matter'
@$('.article-section-artworks').html().should.containEql 'https://d32dm0rphc51dk.cloudfront.net/UhkwvicwkJGgGoPIAP5pVA/larger.jpg'
it 'can render artworks with two artists', ->
@$('.article-section-artworks').html().should.containEql 'PI:NAME:<NAME>END_PI'
@$('.article-section-artworks').html().should.containEql 'PI:NAME:<NAME>END_PI'
|
[
{
"context": "Mail sender module:\", ->\n\n destinationEmail = 'mail@gertuproject.info'\n\n before (done) ->\n\n done()\n\n describ",
"end": 187,
"score": 0.9999293088912964,
"start": 165,
"tag": "EMAIL",
"value": "mail@gertuproject.info"
}
] | test/tools/mailer.coffee | gertu/gertu | 1 | should = require "should"
Mailer = require "../../server/tools/mailer"
describe "<Unit Test>", ->
describe "Mail sender module:", ->
destinationEmail = 'mail@gertuproject.info'
before (done) ->
done()
describe "The mail module", ->
it "should be send it to address " + destinationEmail, (done) ->
Mailer.send destinationEmail,
'This is test#1',
'Body for test#1',
() =>
done()
(error) =>
done()
it "should send email to address " + destinationEmail + ' from a template without replacing anything', (done) ->
Mailer.sendTemplate destinationEmail,
'This is test#2',
'testMail',
null,
() =>
done()
(error) =>
done()
it "should send email to address " + destinationEmail + ' from a template replacing fields', (done) ->
fields =
emailTest: 'text replaced in test'
result = Mailer.sendTemplate destinationEmail,
'This is test#3',
'testMail',
fields,
() =>
done()
(error) =>
done()
after (done) ->
done()
| 166725 | should = require "should"
Mailer = require "../../server/tools/mailer"
describe "<Unit Test>", ->
describe "Mail sender module:", ->
destinationEmail = '<EMAIL>'
before (done) ->
done()
describe "The mail module", ->
it "should be send it to address " + destinationEmail, (done) ->
Mailer.send destinationEmail,
'This is test#1',
'Body for test#1',
() =>
done()
(error) =>
done()
it "should send email to address " + destinationEmail + ' from a template without replacing anything', (done) ->
Mailer.sendTemplate destinationEmail,
'This is test#2',
'testMail',
null,
() =>
done()
(error) =>
done()
it "should send email to address " + destinationEmail + ' from a template replacing fields', (done) ->
fields =
emailTest: 'text replaced in test'
result = Mailer.sendTemplate destinationEmail,
'This is test#3',
'testMail',
fields,
() =>
done()
(error) =>
done()
after (done) ->
done()
| true | should = require "should"
Mailer = require "../../server/tools/mailer"
describe "<Unit Test>", ->
describe "Mail sender module:", ->
destinationEmail = 'PI:EMAIL:<EMAIL>END_PI'
before (done) ->
done()
describe "The mail module", ->
it "should be send it to address " + destinationEmail, (done) ->
Mailer.send destinationEmail,
'This is test#1',
'Body for test#1',
() =>
done()
(error) =>
done()
it "should send email to address " + destinationEmail + ' from a template without replacing anything', (done) ->
Mailer.sendTemplate destinationEmail,
'This is test#2',
'testMail',
null,
() =>
done()
(error) =>
done()
it "should send email to address " + destinationEmail + ' from a template replacing fields', (done) ->
fields =
emailTest: 'text replaced in test'
result = Mailer.sendTemplate destinationEmail,
'This is test#3',
'testMail',
fields,
() =>
done()
(error) =>
done()
after (done) ->
done()
|
[
{
"context": "04d0000d94f87', 'slug': 'emptiness', 'creator': '5388f9ac9a904d0000d94f87', 'original': '53a0a1e2d9048dbc3a793c81', 'watche",
"end": 457,
"score": 0.7241109013557434,
"start": 434,
"tag": "PASSWORD",
"value": "388f9ac9a904d0000d94f87"
}
] | test/app/views/editor/level/EditorLevelView.spec.coffee | Melondonut/codecombat | 1 | EditorLevelView = require 'views/editor/level/edit'
emptyLevel = {'_id': '53a0a1e2d9048dbc3a793c81', 'name': 'Emptiness', 'description': 'Tis nothing..', 'documentation': {'generalArticles': [], 'specificArticles': []}, 'scripts': [], 'thangs': [], 'systems': [], 'victory': {}, 'version': {'minor': 0, 'major': 0, 'isLatestMajor': true, 'isLatestMinor': true}, 'index': '5388f9ac9a904d0000d94f87', 'slug': 'emptiness', 'creator': '5388f9ac9a904d0000d94f87', 'original': '53a0a1e2d9048dbc3a793c81', 'watchers': ['5388f9ac9a904d0000d94f87'], '__v': 0, 'created': '2014-06-17T20:15:30.207Z', 'permissions': [{'access': 'owner', 'target': '5388f9ac9a904d0000d94f87'}]}
describe 'EditorLevelView', ->
describe 'revert button', ->
it 'opens just one modal when you click it', ->
view = new EditorLevelView({}, 'something')
request = jasmine.Ajax.requests.first()
request.response {status: 200, responseText: JSON.stringify(emptyLevel)}
view.render()
spyOn(view, 'openModalView')
view.$el.find('#revert-button').click()
expect(view.openModalView.calls.count()).toBe(1)
| 129821 | EditorLevelView = require 'views/editor/level/edit'
emptyLevel = {'_id': '53a0a1e2d9048dbc3a793c81', 'name': 'Emptiness', 'description': 'Tis nothing..', 'documentation': {'generalArticles': [], 'specificArticles': []}, 'scripts': [], 'thangs': [], 'systems': [], 'victory': {}, 'version': {'minor': 0, 'major': 0, 'isLatestMajor': true, 'isLatestMinor': true}, 'index': '5388f9ac9a904d0000d94f87', 'slug': 'emptiness', 'creator': '5<PASSWORD>', 'original': '53a0a1e2d9048dbc3a793c81', 'watchers': ['5388f9ac9a904d0000d94f87'], '__v': 0, 'created': '2014-06-17T20:15:30.207Z', 'permissions': [{'access': 'owner', 'target': '5388f9ac9a904d0000d94f87'}]}
describe 'EditorLevelView', ->
describe 'revert button', ->
it 'opens just one modal when you click it', ->
view = new EditorLevelView({}, 'something')
request = jasmine.Ajax.requests.first()
request.response {status: 200, responseText: JSON.stringify(emptyLevel)}
view.render()
spyOn(view, 'openModalView')
view.$el.find('#revert-button').click()
expect(view.openModalView.calls.count()).toBe(1)
| true | EditorLevelView = require 'views/editor/level/edit'
emptyLevel = {'_id': '53a0a1e2d9048dbc3a793c81', 'name': 'Emptiness', 'description': 'Tis nothing..', 'documentation': {'generalArticles': [], 'specificArticles': []}, 'scripts': [], 'thangs': [], 'systems': [], 'victory': {}, 'version': {'minor': 0, 'major': 0, 'isLatestMajor': true, 'isLatestMinor': true}, 'index': '5388f9ac9a904d0000d94f87', 'slug': 'emptiness', 'creator': '5PI:PASSWORD:<PASSWORD>END_PI', 'original': '53a0a1e2d9048dbc3a793c81', 'watchers': ['5388f9ac9a904d0000d94f87'], '__v': 0, 'created': '2014-06-17T20:15:30.207Z', 'permissions': [{'access': 'owner', 'target': '5388f9ac9a904d0000d94f87'}]}
describe 'EditorLevelView', ->
describe 'revert button', ->
it 'opens just one modal when you click it', ->
view = new EditorLevelView({}, 'something')
request = jasmine.Ajax.requests.first()
request.response {status: 200, responseText: JSON.stringify(emptyLevel)}
view.render()
spyOn(view, 'openModalView')
view.$el.find('#revert-button').click()
expect(view.openModalView.calls.count()).toBe(1)
|
[
{
"context": "rdHash = hashedPassword.hash\n @passwordSalt = hashedPassword.salt\n @password = undefined\n next()\n",
"end": 740,
"score": 0.8000674247741699,
"start": 726,
"tag": "PASSWORD",
"value": "hashedPassword"
},
{
"context": "rdSalt = hashedPassword.salt\n... | server/models/user.coffee | paulbjensen/shogun | 1 | bcrypt = require 'bcrypt'
toLower = (value) -> value.toLowerCase()
hashPassword = (password, cb) ->
bcrypt.genSalt 10, (err, salt) ->
bcrypt.hash password, salt, (err, hash) ->
cb hash: hash, salt: salt
global.Users = new Schema
username : type: String, set: toLower
email : type: String, set: toLower
password : String
passwordHash : String
passwordSalt : String
createdAt : type: Date, default: Date.now
updatedAt : type: Date, default: Date.now
# Encrypt the password
Users.pre 'save', (next) ->
if @isNew
hashPassword @password, (hashedPassword) =>
@passwordHash = hashedPassword.hash
@passwordSalt = hashedPassword.salt
@password = undefined
next()
else
next()
# Remove all of the user's projects [tidy]
Users.pre 'remove', (next) ->
Project.find {userId: @_id}, (err, projects) ->
for project in projects
project.remove (err) ->
next new Error(err) if err?
next() if projects.indexOf(project) is projects.length-1
global.User = mongoose.model 'User', Users | 30581 | bcrypt = require 'bcrypt'
toLower = (value) -> value.toLowerCase()
hashPassword = (password, cb) ->
bcrypt.genSalt 10, (err, salt) ->
bcrypt.hash password, salt, (err, hash) ->
cb hash: hash, salt: salt
global.Users = new Schema
username : type: String, set: toLower
email : type: String, set: toLower
password : String
passwordHash : String
passwordSalt : String
createdAt : type: Date, default: Date.now
updatedAt : type: Date, default: Date.now
# Encrypt the password
Users.pre 'save', (next) ->
if @isNew
hashPassword @password, (hashedPassword) =>
@passwordHash = hashedPassword.hash
@passwordSalt = <PASSWORD>.salt
@password = <PASSWORD>
next()
else
next()
# Remove all of the user's projects [tidy]
Users.pre 'remove', (next) ->
Project.find {userId: @_id}, (err, projects) ->
for project in projects
project.remove (err) ->
next new Error(err) if err?
next() if projects.indexOf(project) is projects.length-1
global.User = mongoose.model 'User', Users | true | bcrypt = require 'bcrypt'
toLower = (value) -> value.toLowerCase()
hashPassword = (password, cb) ->
bcrypt.genSalt 10, (err, salt) ->
bcrypt.hash password, salt, (err, hash) ->
cb hash: hash, salt: salt
global.Users = new Schema
username : type: String, set: toLower
email : type: String, set: toLower
password : String
passwordHash : String
passwordSalt : String
createdAt : type: Date, default: Date.now
updatedAt : type: Date, default: Date.now
# Encrypt the password
Users.pre 'save', (next) ->
if @isNew
hashPassword @password, (hashedPassword) =>
@passwordHash = hashedPassword.hash
@passwordSalt = PI:PASSWORD:<PASSWORD>END_PI.salt
@password = PI:PASSWORD:<PASSWORD>END_PI
next()
else
next()
# Remove all of the user's projects [tidy]
Users.pre 'remove', (next) ->
Project.find {userId: @_id}, (err, projects) ->
for project in projects
project.remove (err) ->
next new Error(err) if err?
next() if projects.indexOf(project) is projects.length-1
global.User = mongoose.model 'User', Users |
[
{
"context": "assert = chai.assert\n\nVISIBILITY_KEY ='hypothesis.visibility'\nVISIBILITY_PUBLIC = 'public'\nVISIBILITY_PRIVATE ",
"end": 60,
"score": 0.7227658033370972,
"start": 39,
"tag": "KEY",
"value": "hypothesis.visibility"
},
{
"context": ".create()\n\n fakeAuth = {\n ... | tests/js/directives/privacy-test.coffee | Treora/h | 0 | assert = chai.assert
VISIBILITY_KEY ='hypothesis.visibility'
VISIBILITY_PUBLIC = 'public'
VISIBILITY_PRIVATE = 'private'
describe 'h.directives.privacy', ->
$window = null
$scope = null
$compile = null
$injector = null
$element = null
$isolateScope = null
beforeEach module('h')
beforeEach module('h.templates')
describe 'memory fallback', ->
fakeAuth = null
sandbox = null
beforeEach module ($provide) ->
sandbox = sinon.sandbox.create()
fakeAuth = {
user: 'acct:angry.joe@texas.com'
}
$provide.value 'auth', fakeAuth
return
afterEach ->
sandbox.restore()
describe 'has memory fallback', ->
$window = null
$scope2 = null
beforeEach inject (_$compile_, _$rootScope_, _$window_) ->
$compile = _$compile_
$scope = _$rootScope_.$new()
$scope2 = _$rootScope_.$new()
$window = _$window_
$window.localStorage = null
it 'stores the default visibility level when it changes', ->
$scope.permissions = {read: ['acct:user@example.com']}
$element = $compile('<privacy ng-model="permissions">')($scope)
$scope.$digest()
$isolateScope = $element.isolateScope()
$isolateScope.setLevel(name: VISIBILITY_PUBLIC)
$scope2.permissions = {read: []}
$element = $compile('<privacy ng-model="permissions">')($scope2)
$scope2.$digest()
# Roundabout way: the storage works because the directive
# could read out the privacy level
readPermissions = $scope2.permissions.read[0]
assert.equal readPermissions, 'group:__world__'
describe 'has localStorage', ->
sandbox = null
fakeAuth = null
beforeEach module ($provide) ->
sandbox = sinon.sandbox.create()
fakeAuth = {
user: 'acct:angry.joe@texas.com'
}
$provide.value 'auth', fakeAuth
return
afterEach ->
sandbox.restore()
beforeEach inject (_$compile_, _$rootScope_, _$injector_, _$window_) ->
$compile = _$compile_
$scope = _$rootScope_.$new()
$injector = _$injector_
$window = _$window_
describe 'storage', ->
store = null
beforeEach ->
store = $window.localStorage
it 'stores the default visibility level when it changes', ->
$scope.permissions = {read: ['acct:user@example.com']}
$element = $compile('<privacy ng-model="permissions">')($scope)
$scope.$digest()
$isolateScope = $element.isolateScope()
$isolateScope.setLevel(name: VISIBILITY_PUBLIC)
expected = VISIBILITY_PUBLIC
stored = store.getItem VISIBILITY_KEY
assert.equal stored, expected
describe 'setting permissions', ->
store = null
modelCtrl = null
beforeEach ->
store = $window.localStorage
describe 'when no setting is stored', ->
beforeEach ->
store.removeItem VISIBILITY_KEY
it 'defaults to public', ->
$scope.permissions = {read: []}
$element = $compile('<privacy ng-model="permissions">')($scope)
$scope.$digest()
$isolateScope = $element.isolateScope()
assert.equal $isolateScope.level.name, VISIBILITY_PUBLIC
describe 'when permissions.read is empty', ->
beforeEach ->
store.setItem VISIBILITY_KEY, VISIBILITY_PUBLIC
$scope.permissions = {read: []}
$element = $compile('<privacy ng-model="permissions">')($scope)
$scope.$digest()
$isolateScope = $element.isolateScope()
it 'sets the initial permissions based on the stored privacy level', ->
assert.equal $isolateScope.level.name, VISIBILITY_PUBLIC
it 'does not alter the level on subsequent renderings', ->
modelCtrl = $element.controller('ngModel')
store.setItem VISIBILITY_KEY, VISIBILITY_PRIVATE
$scope.permissions.read = ['acct:user@example.com']
$scope.$digest()
assert.equal $isolateScope.level.name, VISIBILITY_PUBLIC
describe 'when permissions.read is filled', ->
it 'does not alter the level', ->
store.setItem VISIBILITY_KEY, VISIBILITY_PRIVATE
$scope.permissions = {read: ['group:__world__']}
$element = $compile('<privacy ng-model="permissions">')($scope)
$scope.$digest()
$isolateScope = $element.isolateScope()
assert.equal($isolateScope.level.name, VISIBILITY_PUBLIC)
describe 'user attribute', ->
beforeEach ->
$scope.permissions = {read: []}
it 'fills the permissions fields with the auth.user name', ->
store.setItem VISIBILITY_KEY, VISIBILITY_PRIVATE
$element = $compile('<privacy ng-model="permissions">')($scope)
$scope.$digest()
readPermissions = $scope.permissions.read[0]
updatePermissions = $scope.permissions.update[0]
deletePermissions = $scope.permissions.delete[0]
adminPermissions = $scope.permissions.admin[0]
assert.equal readPermissions, fakeAuth.user
assert.equal updatePermissions, fakeAuth.user
assert.equal deletePermissions, fakeAuth.user
assert.equal adminPermissions, fakeAuth.user
it 'puts group_world into the read permissions for public visibility', ->
store.setItem VISIBILITY_KEY, VISIBILITY_PUBLIC
$element = $compile('<privacy ng-model="permissions">')($scope)
$scope.$digest()
readPermissions = $scope.permissions.read[0]
updatePermissions = $scope.permissions.update[0]
deletePermissions = $scope.permissions.delete[0]
adminPermissions = $scope.permissions.admin[0]
assert.equal readPermissions, 'group:__world__'
assert.equal updatePermissions, fakeAuth.user
assert.equal deletePermissions, fakeAuth.user
assert.equal adminPermissions, fakeAuth.user
| 37329 | assert = chai.assert
VISIBILITY_KEY ='<KEY>'
VISIBILITY_PUBLIC = 'public'
VISIBILITY_PRIVATE = 'private'
describe 'h.directives.privacy', ->
$window = null
$scope = null
$compile = null
$injector = null
$element = null
$isolateScope = null
beforeEach module('h')
beforeEach module('h.templates')
describe 'memory fallback', ->
fakeAuth = null
sandbox = null
beforeEach module ($provide) ->
sandbox = sinon.sandbox.create()
fakeAuth = {
user: 'acct:<EMAIL>'
}
$provide.value 'auth', fakeAuth
return
afterEach ->
sandbox.restore()
describe 'has memory fallback', ->
$window = null
$scope2 = null
beforeEach inject (_$compile_, _$rootScope_, _$window_) ->
$compile = _$compile_
$scope = _$rootScope_.$new()
$scope2 = _$rootScope_.$new()
$window = _$window_
$window.localStorage = null
it 'stores the default visibility level when it changes', ->
$scope.permissions = {read: ['acct:<EMAIL>']}
$element = $compile('<privacy ng-model="permissions">')($scope)
$scope.$digest()
$isolateScope = $element.isolateScope()
$isolateScope.setLevel(name: VISIBILITY_PUBLIC)
$scope2.permissions = {read: []}
$element = $compile('<privacy ng-model="permissions">')($scope2)
$scope2.$digest()
# Roundabout way: the storage works because the directive
# could read out the privacy level
readPermissions = $scope2.permissions.read[0]
assert.equal readPermissions, 'group:__world__'
describe 'has localStorage', ->
sandbox = null
fakeAuth = null
beforeEach module ($provide) ->
sandbox = sinon.sandbox.create()
fakeAuth = {
user: 'acct:<EMAIL>'
}
$provide.value 'auth', fakeAuth
return
afterEach ->
sandbox.restore()
beforeEach inject (_$compile_, _$rootScope_, _$injector_, _$window_) ->
$compile = _$compile_
$scope = _$rootScope_.$new()
$injector = _$injector_
$window = _$window_
describe 'storage', ->
store = null
beforeEach ->
store = $window.localStorage
it 'stores the default visibility level when it changes', ->
$scope.permissions = {read: ['acct:<EMAIL>']}
$element = $compile('<privacy ng-model="permissions">')($scope)
$scope.$digest()
$isolateScope = $element.isolateScope()
$isolateScope.setLevel(name: VISIBILITY_PUBLIC)
expected = VISIBILITY_PUBLIC
stored = store.getItem VISIBILITY_KEY
assert.equal stored, expected
describe 'setting permissions', ->
store = null
modelCtrl = null
beforeEach ->
store = $window.localStorage
describe 'when no setting is stored', ->
beforeEach ->
store.removeItem VISIBILITY_KEY
it 'defaults to public', ->
$scope.permissions = {read: []}
$element = $compile('<privacy ng-model="permissions">')($scope)
$scope.$digest()
$isolateScope = $element.isolateScope()
assert.equal $isolateScope.level.name, VISIBILITY_PUBLIC
describe 'when permissions.read is empty', ->
beforeEach ->
store.setItem VISIBILITY_KEY, VISIBILITY_PUBLIC
$scope.permissions = {read: []}
$element = $compile('<privacy ng-model="permissions">')($scope)
$scope.$digest()
$isolateScope = $element.isolateScope()
it 'sets the initial permissions based on the stored privacy level', ->
assert.equal $isolateScope.level.name, VISIBILITY_PUBLIC
it 'does not alter the level on subsequent renderings', ->
modelCtrl = $element.controller('ngModel')
store.setItem VISIBILITY_KEY, VISIBILITY_PRIVATE
$scope.permissions.read = ['acct:<EMAIL>']
$scope.$digest()
assert.equal $isolateScope.level.name, VISIBILITY_PUBLIC
describe 'when permissions.read is filled', ->
it 'does not alter the level', ->
store.setItem VISIBILITY_KEY, VISIBILITY_PRIVATE
$scope.permissions = {read: ['group:__world__']}
$element = $compile('<privacy ng-model="permissions">')($scope)
$scope.$digest()
$isolateScope = $element.isolateScope()
assert.equal($isolateScope.level.name, VISIBILITY_PUBLIC)
describe 'user attribute', ->
beforeEach ->
$scope.permissions = {read: []}
it 'fills the permissions fields with the auth.user name', ->
store.setItem VISIBILITY_KEY, VISIBILITY_PRIVATE
$element = $compile('<privacy ng-model="permissions">')($scope)
$scope.$digest()
readPermissions = $scope.permissions.read[0]
updatePermissions = $scope.permissions.update[0]
deletePermissions = $scope.permissions.delete[0]
adminPermissions = $scope.permissions.admin[0]
assert.equal readPermissions, fakeAuth.user
assert.equal updatePermissions, fakeAuth.user
assert.equal deletePermissions, fakeAuth.user
assert.equal adminPermissions, fakeAuth.user
it 'puts group_world into the read permissions for public visibility', ->
store.setItem VISIBILITY_KEY, VISIBILITY_PUBLIC
$element = $compile('<privacy ng-model="permissions">')($scope)
$scope.$digest()
readPermissions = $scope.permissions.read[0]
updatePermissions = $scope.permissions.update[0]
deletePermissions = $scope.permissions.delete[0]
adminPermissions = $scope.permissions.admin[0]
assert.equal readPermissions, 'group:__world__'
assert.equal updatePermissions, fakeAuth.user
assert.equal deletePermissions, fakeAuth.user
assert.equal adminPermissions, fakeAuth.user
| true | assert = chai.assert
VISIBILITY_KEY ='PI:KEY:<KEY>END_PI'
VISIBILITY_PUBLIC = 'public'
VISIBILITY_PRIVATE = 'private'
describe 'h.directives.privacy', ->
$window = null
$scope = null
$compile = null
$injector = null
$element = null
$isolateScope = null
beforeEach module('h')
beforeEach module('h.templates')
describe 'memory fallback', ->
fakeAuth = null
sandbox = null
beforeEach module ($provide) ->
sandbox = sinon.sandbox.create()
fakeAuth = {
user: 'acct:PI:EMAIL:<EMAIL>END_PI'
}
$provide.value 'auth', fakeAuth
return
afterEach ->
sandbox.restore()
describe 'has memory fallback', ->
$window = null
$scope2 = null
beforeEach inject (_$compile_, _$rootScope_, _$window_) ->
$compile = _$compile_
$scope = _$rootScope_.$new()
$scope2 = _$rootScope_.$new()
$window = _$window_
$window.localStorage = null
it 'stores the default visibility level when it changes', ->
$scope.permissions = {read: ['acct:PI:EMAIL:<EMAIL>END_PI']}
$element = $compile('<privacy ng-model="permissions">')($scope)
$scope.$digest()
$isolateScope = $element.isolateScope()
$isolateScope.setLevel(name: VISIBILITY_PUBLIC)
$scope2.permissions = {read: []}
$element = $compile('<privacy ng-model="permissions">')($scope2)
$scope2.$digest()
# Roundabout way: the storage works because the directive
# could read out the privacy level
readPermissions = $scope2.permissions.read[0]
assert.equal readPermissions, 'group:__world__'
describe 'has localStorage', ->
sandbox = null
fakeAuth = null
beforeEach module ($provide) ->
sandbox = sinon.sandbox.create()
fakeAuth = {
user: 'acct:PI:EMAIL:<EMAIL>END_PI'
}
$provide.value 'auth', fakeAuth
return
afterEach ->
sandbox.restore()
beforeEach inject (_$compile_, _$rootScope_, _$injector_, _$window_) ->
$compile = _$compile_
$scope = _$rootScope_.$new()
$injector = _$injector_
$window = _$window_
describe 'storage', ->
store = null
beforeEach ->
store = $window.localStorage
it 'stores the default visibility level when it changes', ->
$scope.permissions = {read: ['acct:PI:EMAIL:<EMAIL>END_PI']}
$element = $compile('<privacy ng-model="permissions">')($scope)
$scope.$digest()
$isolateScope = $element.isolateScope()
$isolateScope.setLevel(name: VISIBILITY_PUBLIC)
expected = VISIBILITY_PUBLIC
stored = store.getItem VISIBILITY_KEY
assert.equal stored, expected
describe 'setting permissions', ->
store = null
modelCtrl = null
beforeEach ->
store = $window.localStorage
describe 'when no setting is stored', ->
beforeEach ->
store.removeItem VISIBILITY_KEY
it 'defaults to public', ->
$scope.permissions = {read: []}
$element = $compile('<privacy ng-model="permissions">')($scope)
$scope.$digest()
$isolateScope = $element.isolateScope()
assert.equal $isolateScope.level.name, VISIBILITY_PUBLIC
describe 'when permissions.read is empty', ->
beforeEach ->
store.setItem VISIBILITY_KEY, VISIBILITY_PUBLIC
$scope.permissions = {read: []}
$element = $compile('<privacy ng-model="permissions">')($scope)
$scope.$digest()
$isolateScope = $element.isolateScope()
it 'sets the initial permissions based on the stored privacy level', ->
assert.equal $isolateScope.level.name, VISIBILITY_PUBLIC
it 'does not alter the level on subsequent renderings', ->
modelCtrl = $element.controller('ngModel')
store.setItem VISIBILITY_KEY, VISIBILITY_PRIVATE
$scope.permissions.read = ['acct:PI:EMAIL:<EMAIL>END_PI']
$scope.$digest()
assert.equal $isolateScope.level.name, VISIBILITY_PUBLIC
describe 'when permissions.read is filled', ->
it 'does not alter the level', ->
store.setItem VISIBILITY_KEY, VISIBILITY_PRIVATE
$scope.permissions = {read: ['group:__world__']}
$element = $compile('<privacy ng-model="permissions">')($scope)
$scope.$digest()
$isolateScope = $element.isolateScope()
assert.equal($isolateScope.level.name, VISIBILITY_PUBLIC)
describe 'user attribute', ->
beforeEach ->
$scope.permissions = {read: []}
it 'fills the permissions fields with the auth.user name', ->
store.setItem VISIBILITY_KEY, VISIBILITY_PRIVATE
$element = $compile('<privacy ng-model="permissions">')($scope)
$scope.$digest()
readPermissions = $scope.permissions.read[0]
updatePermissions = $scope.permissions.update[0]
deletePermissions = $scope.permissions.delete[0]
adminPermissions = $scope.permissions.admin[0]
assert.equal readPermissions, fakeAuth.user
assert.equal updatePermissions, fakeAuth.user
assert.equal deletePermissions, fakeAuth.user
assert.equal adminPermissions, fakeAuth.user
it 'puts group_world into the read permissions for public visibility', ->
store.setItem VISIBILITY_KEY, VISIBILITY_PUBLIC
$element = $compile('<privacy ng-model="permissions">')($scope)
$scope.$digest()
readPermissions = $scope.permissions.read[0]
updatePermissions = $scope.permissions.update[0]
deletePermissions = $scope.permissions.delete[0]
adminPermissions = $scope.permissions.admin[0]
assert.equal readPermissions, 'group:__world__'
assert.equal updatePermissions, fakeAuth.user
assert.equal deletePermissions, fakeAuth.user
assert.equal adminPermissions, fakeAuth.user
|
[
{
"context": "alert \"hello, #{= @first_name} #{== @last_name}\"",
"end": 29,
"score": 0.7282513380050659,
"start": 18,
"tag": "USERNAME",
"value": "@first_name"
},
{
"context": "alert \"hello, #{= @first_name} #{== @last_name}\"",
"end": 35,
"score": 0.4120033383369446,
... | spec/dummy/app/views/events/index.js.coffee | lmgtfy/coffee-views | 8 | alert "hello, #{= @first_name} #{== @last_name}" | 52674 | alert "hello, #{= @first_name} #{==<NAME> @last_name}" | true | alert "hello, #{= @first_name} #{==PI:NAME:<NAME>END_PI @last_name}" |
[
{
"context": "---\n---\nfirst_word = \"Hello\"\nboth_words = \"#{first_word} World\"\n",
"end": 27,
"score": 0.8234581351280212,
"start": 22,
"tag": "NAME",
"value": "Hello"
}
] | js/trycoffee.coffee | AYCHERA/AYCHERA.github.io | 0 | ---
---
first_word = "Hello"
both_words = "#{first_word} World"
| 197462 | ---
---
first_word = "<NAME>"
both_words = "#{first_word} World"
| true | ---
---
first_word = "PI:NAME:<NAME>END_PI"
both_words = "#{first_word} World"
|
[
{
"context": "e: Model.String.required\n\t# \t\t# toString: -> \"User [@fullName]\"\n\t# \t\t# hobies:[\n\t# \t\t# \ttitle: String\n\t# \t\t# \ts",
"end": 3346,
"score": 0.9970686435699463,
"start": 3336,
"tag": "USERNAME",
"value": "[@fullName"
},
{
"context": "d: Boolean\n\t# \t\t#... | assets-test/browser/schema.coffee | gridfw/gridfw-model | 1 | try
Model= new ModelClass()
# ...
console.log 'Test schema ========================>'
userSchema= Model.freeze.value
_id: Model.Hex.default('445a54c566b4e')
id: Model.alias '_id'
firstName: Model.String.required
lastName: Model.String
status: Model.virtual.extensible.value
st1: Model.default(5)
st2: Model.value
st21: Boolean
st22: Model.list {
cc:
c2:String
}
bshi: -> 'yasit blu ghrfis'
age: Model.virtual.Int.default(18).gte(18).lt(99)
jobs: [
title: String
subTitle: String
period:
from: Date
to: Date
]
jaja: Model.list(String).proto({
cc: -> 'hello'
khalid: -> 'khalid'
})
map: Model.proto({cc:15}).map String,
a: String
b: Model.Int
modelSignature: -> 'hello user'
# userSchema= Model.value
# _id: Model.ObjectId
# id: Model.alias '_id'
# firstName: Model.required.jsonIgnoreStringify.String
# lastName: Model.virtual.required.jsonIgnoreStringify.String
# age: Model.Number.default(15)
# jobs:[
# title: String
# subTitle: String
# period:
# from: Model.Date.required
# to: Date
# ]
# skills: [String]
# networking:
# Model.required
# .list [Number]
# .proto
# test1: ->
# test2: (a,b,c)->
# method1: ->
# console.log '>> user schema descriptor: ', userSchema
# compile schema
User = Model.define 'User', userSchema
console.log '---- USER: ', User[Model.SCHEMA]
console.log User.toString() # Print User format to console
# console.log '>> user signature:\n', User.modelSignature()
# # override schema
# Model.override 'User',
# id2: String
# cc:
# kk: Number
# bb: Boolean
# _id:
# timestamp: Model.Unsigned
# rand: Math.random
# date: Date.now
# jobs:[
# title: Number
# period:
# to: Model.required
# title: Model.String.max(500).min(0)
# yoga: Model.of('Yoga').freeze.value
# jj: Number
# nk: Date.now
# ]
# # addedAttr: Model.String.required
# # firstName: Model.optional
# # skills: [
# # skillTitle: String
# # skillLevel: Number
# # ]
# networking:[[String]]
# net: [[[[cc:Number, f:String]]]]
# console.log '---- USER overrided: '
# console.log User.toString() # Print User format to console
# console.log Model.all.Yoga.toString() # Print User format to console
# # override 2
# Model.override
# name: 'user'
# schema:
# skills: Model.clear.value
# skillCC: String
# skill2: Boolean
# console.log '>> Overrided user signature:\n', User.modelSignature()
# console.log '>> user schema: ', User[Model.SCHEMA]
# console.log '-----', Object.getOwnPropertyDescriptors Model.__proto__
Skills = Model.define 'Skills', [String]
console.log '------ Skills'
console.log Skills.toString()
# # Extends
# Member= Model.extends 'Member', 'User',
# since: Date
# godfather: Model.of('User')
# _id:
# herro: String
# console.log Member.toString()
# console.log User.toString()
# # print user
# console.log "------- user signature: \n", User.modelSignature()
# # test: Model.freeze.value
# # kk:
# # cc: Date
# # lastName: Model.String
# # email: Model.Email
# # fullName: Model.getter -> "#{@firstName} #{@lastName}"
# # class: Model.Int.required.jsonIgnore
# # website: Model.String.required
# # toString: -> "User [@fullName]"
# # hobies:[
# # title: String
# # score:
# # max: Number
# # avg: Model.Unsigned.required
# # doBa: ->
# # console.log 'baba'
# # def: Model.Hex.default -> 'a45f'
# # validated: Boolean
# # ]
# Model.override
# name: 'user'
# schema:
# jobs: Model.required.list
# job1: Model.String.required
# work:
# work1: Number
# Model.override
# name: 'user'
# schema:
# jobs: [
# job2: Model.String
# ]
# phone: [Number]
# work:
# work2: Number
# call: (name)-> console.log '---- hello: ', name
# # # console.log "User schema>>", User[Model.SCHEMA]
# # # console.log "User1>>", user1.__proto__
# # # create user instance
# # # Model
# console.log '-------------- user 1'
# user1=
# name: 'khalid'
# firstName: 'khalid'
# lastName: 'rafik'
# age: 854
# company: 'coredigix'
# jobs:[
# {
# job1: true
# job2: 'test2'
# job3: 'test3'
# },
# {
# job1: 'Stest1'
# job2: 'Stest2'
# job3: 'Stest3'
# }
# ]
# phone: ['10215', '0231525', '421541252', '41524251']
# emails:[{
# type:'office'
# value: 'khalid@coredigix.com'
# }]
# result = User.fromJSON user1
# console.log '--user: ', user1
# console.log '--result: ', result
# console.log '---------- test 2 =====================================================>'
# test2 = Model.from
# name: 'test2'
# schema: [
# yy: String
# name: String
# ]
# console.log '---- test2: ', test2
# obj = [{name: 'abc'}, {name: 'def'}]
# test2.fromJSON obj
# console.log '----obj: ', obj
# console.log '---- end test'
# console.log 'check "toJSON" =====================================================>'
# UserModel2= Model.from
# name: 'user2'
# schema:
# name: String
# id: Model.ObjectId.toJSON (data)->
# console.log '--- call toJSON of id'
# data
# list: [
# attr: Model.String.toJSON (data)->
# console.log '------ call nested toJSON'
# '****'
# ]
# list2: Model
# .toJSON (data)->
# console.log '--* call listed toJSON'
# data.map (l)-> l.id
# .list
# id: Number
# name: String
# info: Model
# .toJSON (data)->
# console.log '--[ call info toJSON'
# {'obj': 'pppp'}
# .value
# name: String
# age: Number
# console.log '---- model UserModel2 created'
# userA=
# name: 'khalid'
# id: '555555555'
# list:[
# {id: 'mmmm'}
# ]
# list2:[
# {id:1, name:'wij'}, {id:2, name: 'hak'}, {id:3, name: 'raf'}
# ]
# info:
# name: 'medo'
# age: 36
# err= UserModel2.fromJSON userA
# console.log '----err: ', err
# # console.log '----user JSON: ',
# console.log 'DB Test ========================>'
# UserModel3= Model.from
# name: 'user3'
# schema:
# name: Model.String.fromJSON (data)-> 'received: '+ data
# lastName: Model.String
# docs: Model.list Model.String.fromJSON (data)-> 'lst>> '+ data #.toJSON -> 'cccc'.jsonIgnore
# user3= {name: 'khalid', lastName: 'rafik', age: 15, docs: ['u', 'k', 'iii']}
# UserModel3.fromJSON user3
# console.log 'user3: ', JSON.stringify user3
console.log 'DATA TEST: ══════════════════════════════════════════════════════════════════'
user=
name: 'Khalid'
firstName: 'khalid'
fullName: 'RAFIK khalid'
# age: 31
hobbies: ['sleep', 'sellp2']
status:
st1:'st shit'
cc:'ops'
# hello: -> '7mar'
skills: [{
name: 'skill1'
},{
name: 'skill2'
}]
jobs: title: 'hello'
user2= User.fromJSON user
console.log '------- User2: ', JSON.stringify(user2, null, "\t")
console.log '------- DB: ', user2.exportDB()
console.log '>>>>>', User.fromJsonToDb user
catch err
console.error "uncaught error:", err
| 185975 | try
Model= new ModelClass()
# ...
console.log 'Test schema ========================>'
userSchema= Model.freeze.value
_id: Model.Hex.default('445a54c566b4e')
id: Model.alias '_id'
firstName: Model.String.required
lastName: Model.String
status: Model.virtual.extensible.value
st1: Model.default(5)
st2: Model.value
st21: Boolean
st22: Model.list {
cc:
c2:String
}
bshi: -> 'yasit blu ghrfis'
age: Model.virtual.Int.default(18).gte(18).lt(99)
jobs: [
title: String
subTitle: String
period:
from: Date
to: Date
]
jaja: Model.list(String).proto({
cc: -> 'hello'
khalid: -> 'khalid'
})
map: Model.proto({cc:15}).map String,
a: String
b: Model.Int
modelSignature: -> 'hello user'
# userSchema= Model.value
# _id: Model.ObjectId
# id: Model.alias '_id'
# firstName: Model.required.jsonIgnoreStringify.String
# lastName: Model.virtual.required.jsonIgnoreStringify.String
# age: Model.Number.default(15)
# jobs:[
# title: String
# subTitle: String
# period:
# from: Model.Date.required
# to: Date
# ]
# skills: [String]
# networking:
# Model.required
# .list [Number]
# .proto
# test1: ->
# test2: (a,b,c)->
# method1: ->
# console.log '>> user schema descriptor: ', userSchema
# compile schema
User = Model.define 'User', userSchema
console.log '---- USER: ', User[Model.SCHEMA]
console.log User.toString() # Print User format to console
# console.log '>> user signature:\n', User.modelSignature()
# # override schema
# Model.override 'User',
# id2: String
# cc:
# kk: Number
# bb: Boolean
# _id:
# timestamp: Model.Unsigned
# rand: Math.random
# date: Date.now
# jobs:[
# title: Number
# period:
# to: Model.required
# title: Model.String.max(500).min(0)
# yoga: Model.of('Yoga').freeze.value
# jj: Number
# nk: Date.now
# ]
# # addedAttr: Model.String.required
# # firstName: Model.optional
# # skills: [
# # skillTitle: String
# # skillLevel: Number
# # ]
# networking:[[String]]
# net: [[[[cc:Number, f:String]]]]
# console.log '---- USER overrided: '
# console.log User.toString() # Print User format to console
# console.log Model.all.Yoga.toString() # Print User format to console
# # override 2
# Model.override
# name: 'user'
# schema:
# skills: Model.clear.value
# skillCC: String
# skill2: Boolean
# console.log '>> Overrided user signature:\n', User.modelSignature()
# console.log '>> user schema: ', User[Model.SCHEMA]
# console.log '-----', Object.getOwnPropertyDescriptors Model.__proto__
Skills = Model.define 'Skills', [String]
console.log '------ Skills'
console.log Skills.toString()
# # Extends
# Member= Model.extends 'Member', 'User',
# since: Date
# godfather: Model.of('User')
# _id:
# herro: String
# console.log Member.toString()
# console.log User.toString()
# # print user
# console.log "------- user signature: \n", User.modelSignature()
# # test: Model.freeze.value
# # kk:
# # cc: Date
# # lastName: Model.String
# # email: Model.Email
# # fullName: Model.getter -> "#{@firstName} #{@lastName}"
# # class: Model.Int.required.jsonIgnore
# # website: Model.String.required
# # toString: -> "User [@fullName]"
# # hobies:[
# # title: String
# # score:
# # max: Number
# # avg: Model.Unsigned.required
# # doBa: ->
# # console.log 'baba'
# # def: Model.Hex.default -> 'a45f'
# # validated: Boolean
# # ]
# Model.override
# name: 'user'
# schema:
# jobs: Model.required.list
# job1: Model.String.required
# work:
# work1: Number
# Model.override
# name: 'user'
# schema:
# jobs: [
# job2: Model.String
# ]
# phone: [Number]
# work:
# work2: Number
# call: (name)-> console.log '---- hello: ', name
# # # console.log "User schema>>", User[Model.SCHEMA]
# # # console.log "User1>>", user1.__proto__
# # # create user instance
# # # Model
# console.log '-------------- user 1'
# user1=
# name: '<NAME>'
# firstName: '<NAME>'
# lastName: '<NAME>'
# age: 854
# company: 'coredigix'
# jobs:[
# {
# job1: true
# job2: 'test2'
# job3: 'test3'
# },
# {
# job1: 'Stest1'
# job2: 'Stest2'
# job3: 'Stest3'
# }
# ]
# phone: ['10215', '0231525', '421541252', '41524251']
# emails:[{
# type:'office'
# value: '<EMAIL>'
# }]
# result = User.fromJSON user1
# console.log '--user: ', user1
# console.log '--result: ', result
# console.log '---------- test 2 =====================================================>'
# test2 = Model.from
# name: '<NAME>'
# schema: [
# yy: String
# name: String
# ]
# console.log '---- test2: ', test2
# obj = [{name: 'abc'}, {name: 'def'}]
# test2.fromJSON obj
# console.log '----obj: ', obj
# console.log '---- end test'
# console.log 'check "toJSON" =====================================================>'
# UserModel2= Model.from
# name: 'user2'
# schema:
# name: String
# id: Model.ObjectId.toJSON (data)->
# console.log '--- call toJSON of id'
# data
# list: [
# attr: Model.String.toJSON (data)->
# console.log '------ call nested toJSON'
# '****'
# ]
# list2: Model
# .toJSON (data)->
# console.log '--* call listed toJSON'
# data.map (l)-> l.id
# .list
# id: Number
# name: String
# info: Model
# .toJSON (data)->
# console.log '--[ call info toJSON'
# {'obj': 'pppp'}
# .value
# name: String
# age: Number
# console.log '---- model UserModel2 created'
# userA=
# name: 'khalid'
# id: '555555555'
# list:[
# {id: 'mmmm'}
# ]
# list2:[
# {id:1, name:'<NAME>'}, {id:2, name: '<NAME>'}, {id:3, name: '<NAME>'}
# ]
# info:
# name: '<NAME>do'
# age: 36
# err= UserModel2.fromJSON userA
# console.log '----err: ', err
# # console.log '----user JSON: ',
# console.log 'DB Test ========================>'
# UserModel3= Model.from
# name: 'user3'
# schema:
# name: Model.String.fromJSON (data)-> 'received: '+ data
# lastName: Model.String
# docs: Model.list Model.String.fromJSON (data)-> 'lst>> '+ data #.toJSON -> 'cccc'.jsonIgnore
# user3= {name: '<NAME>', lastName: '<NAME>', age: 15, docs: ['u', 'k', 'iii']}
# UserModel3.fromJSON user3
# console.log 'user3: ', JSON.stringify user3
console.log 'DATA TEST: ══════════════════════════════════════════════════════════════════'
user=
name: '<NAME>'
firstName: '<NAME>'
fullName: '<NAME>'
# age: 31
hobbies: ['sleep', 'sellp2']
status:
st1:'st shit'
cc:'ops'
# hello: -> '7mar'
skills: [{
name: 'skill1'
},{
name: 'skill2'
}]
jobs: title: 'hello'
user2= User.fromJSON user
console.log '------- User2: ', JSON.stringify(user2, null, "\t")
console.log '------- DB: ', user2.exportDB()
console.log '>>>>>', User.fromJsonToDb user
catch err
console.error "uncaught error:", err
| true | try
Model= new ModelClass()
# ...
console.log 'Test schema ========================>'
userSchema= Model.freeze.value
_id: Model.Hex.default('445a54c566b4e')
id: Model.alias '_id'
firstName: Model.String.required
lastName: Model.String
status: Model.virtual.extensible.value
st1: Model.default(5)
st2: Model.value
st21: Boolean
st22: Model.list {
cc:
c2:String
}
bshi: -> 'yasit blu ghrfis'
age: Model.virtual.Int.default(18).gte(18).lt(99)
jobs: [
title: String
subTitle: String
period:
from: Date
to: Date
]
jaja: Model.list(String).proto({
cc: -> 'hello'
khalid: -> 'khalid'
})
map: Model.proto({cc:15}).map String,
a: String
b: Model.Int
modelSignature: -> 'hello user'
# userSchema= Model.value
# _id: Model.ObjectId
# id: Model.alias '_id'
# firstName: Model.required.jsonIgnoreStringify.String
# lastName: Model.virtual.required.jsonIgnoreStringify.String
# age: Model.Number.default(15)
# jobs:[
# title: String
# subTitle: String
# period:
# from: Model.Date.required
# to: Date
# ]
# skills: [String]
# networking:
# Model.required
# .list [Number]
# .proto
# test1: ->
# test2: (a,b,c)->
# method1: ->
# console.log '>> user schema descriptor: ', userSchema
# compile schema
User = Model.define 'User', userSchema
console.log '---- USER: ', User[Model.SCHEMA]
console.log User.toString() # Print User format to console
# console.log '>> user signature:\n', User.modelSignature()
# # override schema
# Model.override 'User',
# id2: String
# cc:
# kk: Number
# bb: Boolean
# _id:
# timestamp: Model.Unsigned
# rand: Math.random
# date: Date.now
# jobs:[
# title: Number
# period:
# to: Model.required
# title: Model.String.max(500).min(0)
# yoga: Model.of('Yoga').freeze.value
# jj: Number
# nk: Date.now
# ]
# # addedAttr: Model.String.required
# # firstName: Model.optional
# # skills: [
# # skillTitle: String
# # skillLevel: Number
# # ]
# networking:[[String]]
# net: [[[[cc:Number, f:String]]]]
# console.log '---- USER overrided: '
# console.log User.toString() # Print User format to console
# console.log Model.all.Yoga.toString() # Print User format to console
# # override 2
# Model.override
# name: 'user'
# schema:
# skills: Model.clear.value
# skillCC: String
# skill2: Boolean
# console.log '>> Overrided user signature:\n', User.modelSignature()
# console.log '>> user schema: ', User[Model.SCHEMA]
# console.log '-----', Object.getOwnPropertyDescriptors Model.__proto__
Skills = Model.define 'Skills', [String]
console.log '------ Skills'
console.log Skills.toString()
# # Extends
# Member= Model.extends 'Member', 'User',
# since: Date
# godfather: Model.of('User')
# _id:
# herro: String
# console.log Member.toString()
# console.log User.toString()
# # print user
# console.log "------- user signature: \n", User.modelSignature()
# # test: Model.freeze.value
# # kk:
# # cc: Date
# # lastName: Model.String
# # email: Model.Email
# # fullName: Model.getter -> "#{@firstName} #{@lastName}"
# # class: Model.Int.required.jsonIgnore
# # website: Model.String.required
# # toString: -> "User [@fullName]"
# # hobies:[
# # title: String
# # score:
# # max: Number
# # avg: Model.Unsigned.required
# # doBa: ->
# # console.log 'baba'
# # def: Model.Hex.default -> 'a45f'
# # validated: Boolean
# # ]
# Model.override
# name: 'user'
# schema:
# jobs: Model.required.list
# job1: Model.String.required
# work:
# work1: Number
# Model.override
# name: 'user'
# schema:
# jobs: [
# job2: Model.String
# ]
# phone: [Number]
# work:
# work2: Number
# call: (name)-> console.log '---- hello: ', name
# # # console.log "User schema>>", User[Model.SCHEMA]
# # # console.log "User1>>", user1.__proto__
# # # create user instance
# # # Model
# console.log '-------------- user 1'
# user1=
# name: 'PI:NAME:<NAME>END_PI'
# firstName: 'PI:NAME:<NAME>END_PI'
# lastName: 'PI:NAME:<NAME>END_PI'
# age: 854
# company: 'coredigix'
# jobs:[
# {
# job1: true
# job2: 'test2'
# job3: 'test3'
# },
# {
# job1: 'Stest1'
# job2: 'Stest2'
# job3: 'Stest3'
# }
# ]
# phone: ['10215', '0231525', '421541252', '41524251']
# emails:[{
# type:'office'
# value: 'PI:EMAIL:<EMAIL>END_PI'
# }]
# result = User.fromJSON user1
# console.log '--user: ', user1
# console.log '--result: ', result
# console.log '---------- test 2 =====================================================>'
# test2 = Model.from
# name: 'PI:NAME:<NAME>END_PI'
# schema: [
# yy: String
# name: String
# ]
# console.log '---- test2: ', test2
# obj = [{name: 'abc'}, {name: 'def'}]
# test2.fromJSON obj
# console.log '----obj: ', obj
# console.log '---- end test'
# console.log 'check "toJSON" =====================================================>'
# UserModel2= Model.from
# name: 'user2'
# schema:
# name: String
# id: Model.ObjectId.toJSON (data)->
# console.log '--- call toJSON of id'
# data
# list: [
# attr: Model.String.toJSON (data)->
# console.log '------ call nested toJSON'
# '****'
# ]
# list2: Model
# .toJSON (data)->
# console.log '--* call listed toJSON'
# data.map (l)-> l.id
# .list
# id: Number
# name: String
# info: Model
# .toJSON (data)->
# console.log '--[ call info toJSON'
# {'obj': 'pppp'}
# .value
# name: String
# age: Number
# console.log '---- model UserModel2 created'
# userA=
# name: 'khalid'
# id: '555555555'
# list:[
# {id: 'mmmm'}
# ]
# list2:[
# {id:1, name:'PI:NAME:<NAME>END_PI'}, {id:2, name: 'PI:NAME:<NAME>END_PI'}, {id:3, name: 'PI:NAME:<NAME>END_PI'}
# ]
# info:
# name: 'PI:NAME:<NAME>END_PIdo'
# age: 36
# err= UserModel2.fromJSON userA
# console.log '----err: ', err
# # console.log '----user JSON: ',
# console.log 'DB Test ========================>'
# UserModel3= Model.from
# name: 'user3'
# schema:
# name: Model.String.fromJSON (data)-> 'received: '+ data
# lastName: Model.String
# docs: Model.list Model.String.fromJSON (data)-> 'lst>> '+ data #.toJSON -> 'cccc'.jsonIgnore
# user3= {name: 'PI:NAME:<NAME>END_PI', lastName: 'PI:NAME:<NAME>END_PI', age: 15, docs: ['u', 'k', 'iii']}
# UserModel3.fromJSON user3
# console.log 'user3: ', JSON.stringify user3
console.log 'DATA TEST: ══════════════════════════════════════════════════════════════════'
user=
name: 'PI:NAME:<NAME>END_PI'
firstName: 'PI:NAME:<NAME>END_PI'
fullName: 'PI:NAME:<NAME>END_PI'
# age: 31
hobbies: ['sleep', 'sellp2']
status:
st1:'st shit'
cc:'ops'
# hello: -> '7mar'
skills: [{
name: 'skill1'
},{
name: 'skill2'
}]
jobs: title: 'hello'
user2= User.fromJSON user
console.log '------- User2: ', JSON.stringify(user2, null, "\t")
console.log '------- DB: ', user2.exportDB()
console.log '>>>>>', User.fromJsonToDb user
catch err
console.error "uncaught error:", err
|
[
{
"context": "s/\"))\n view_engine: 'jade'\n\n state_key: 'spotify_auth_state'\n access_key: 'spotify_access_token'\n refres",
"end": 838,
"score": 0.975411057472229,
"start": 820,
"tag": "KEY",
"value": "spotify_auth_state"
},
{
"context": "e_key: 'spotify_auth_sta... | back/src/config.coffee | dropofwill/upm-ui | 0 | parse = require('url-parse')
url = require('url')
path = require('path')
###
# If these environment variable are not set, we cannot continue
# Throw a warning and kill the process
###
if (not process.env.SPOTIFY_CLIENT_ID and
not process.env.SPOTIFY_CLIENT_SECRET and
not process.env.ECHO_API_KEY and
not process.env.UPM_REDIRECT_URI)
utils.envError()
redirect_uri = process.env.UPM_REDIRECT_URI
port = process.env.PORT || parse(redirect_uri).port
module.exports =
client_id: process.env.SPOTIFY_CLIENT_ID
client_secret: process.env.SPOTIFY_CLIENT_SECRET
echo_api_key: process.env.ECHO_API_KEY
redirect_uri: redirect_uri
port: port
views_dir: path.resolve(path.join(__dirname, "../../views/"))
view_engine: 'jade'
state_key: 'spotify_auth_state'
access_key: 'spotify_access_token'
refresh_key: 'spotify_refresh_token'
accounts_host: 'accounts.spotify.com'
api_host: 'api.spotify.com'
auth_path: '/authorize'
token_path: '/api/token'
api_path: '/v1'
scopes: ['user-read-private', 'user-read-email',
'playlist-modify-public', 'playlist-modify-private',
'playlist-read-private', 'playlist-modify']
| 29431 | parse = require('url-parse')
url = require('url')
path = require('path')
###
# If these environment variable are not set, we cannot continue
# Throw a warning and kill the process
###
if (not process.env.SPOTIFY_CLIENT_ID and
not process.env.SPOTIFY_CLIENT_SECRET and
not process.env.ECHO_API_KEY and
not process.env.UPM_REDIRECT_URI)
utils.envError()
redirect_uri = process.env.UPM_REDIRECT_URI
port = process.env.PORT || parse(redirect_uri).port
module.exports =
client_id: process.env.SPOTIFY_CLIENT_ID
client_secret: process.env.SPOTIFY_CLIENT_SECRET
echo_api_key: process.env.ECHO_API_KEY
redirect_uri: redirect_uri
port: port
views_dir: path.resolve(path.join(__dirname, "../../views/"))
view_engine: 'jade'
state_key: '<KEY>'
access_key: '<KEY>_token'
refresh_key: '<KEY>_<KEY>'
accounts_host: 'accounts.spotify.com'
api_host: 'api.spotify.com'
auth_path: '/authorize'
token_path: '/api/token'
api_path: '/v1'
scopes: ['user-read-private', 'user-read-email',
'playlist-modify-public', 'playlist-modify-private',
'playlist-read-private', 'playlist-modify']
| true | parse = require('url-parse')
url = require('url')
path = require('path')
###
# If these environment variable are not set, we cannot continue
# Throw a warning and kill the process
###
if (not process.env.SPOTIFY_CLIENT_ID and
not process.env.SPOTIFY_CLIENT_SECRET and
not process.env.ECHO_API_KEY and
not process.env.UPM_REDIRECT_URI)
utils.envError()
redirect_uri = process.env.UPM_REDIRECT_URI
port = process.env.PORT || parse(redirect_uri).port
module.exports =
client_id: process.env.SPOTIFY_CLIENT_ID
client_secret: process.env.SPOTIFY_CLIENT_SECRET
echo_api_key: process.env.ECHO_API_KEY
redirect_uri: redirect_uri
port: port
views_dir: path.resolve(path.join(__dirname, "../../views/"))
view_engine: 'jade'
state_key: 'PI:KEY:<KEY>END_PI'
access_key: 'PI:KEY:<KEY>END_PI_token'
refresh_key: 'PI:KEY:<KEY>END_PI_PI:KEY:<KEY>END_PI'
accounts_host: 'accounts.spotify.com'
api_host: 'api.spotify.com'
auth_path: '/authorize'
token_path: '/api/token'
api_path: '/v1'
scopes: ['user-read-private', 'user-read-email',
'playlist-modify-public', 'playlist-modify-private',
'playlist-read-private', 'playlist-modify']
|
[
{
"context": "# Description:\n# Motivation from Ron Swanson\n#\n# Dependencies:\n# None\n#\n# Configuration:\n# ",
"end": 46,
"score": 0.9442520141601562,
"start": 35,
"tag": "NAME",
"value": "Ron Swanson"
},
{
"context": "- Motivates you to be more awesome\n#\n# Author:\n# da... | src/scripts/swanson.coffee | Reelhouse/hubot-scripts | 1,450 | # Description:
# Motivation from Ron Swanson
#
# Dependencies:
# None
#
# Configuration:
# None
#
# Commands:
# hubot swanson me - Motivates you to be more awesome
#
# Author:
# danielmurphy
module.exports = (robot) ->
robot.respond /swanson me$/i, (msg) ->
images = [
"http://i.imgur.com/kW0f7.jpg",
"http://i.imgur.com/vw9gZ.jpg",
"http://i.imgur.com/aV6ju.jpg",
"http://i.imgur.com/AQBJW.jpg",
"http://i.imgur.com/tKkRO.png",
"http://i.imgur.com/lkbGP.png",
"http://i.imgur.com/mx54e.jpg",
"http://i.imgur.com/LASrK.jpg",
"http://i.imgur.com/zvUBG.jpg",
"http://i.imgur.com/tjqca.jpg",
"http://i.imgur.com/q5CYv.jpg",
"http://i.imgur.com/HsoXm.jpg",
"http://i.imgur.com/6EGQm.jpg",
"http://i.imgur.com/DxpKu.jpg",
"http://i.imgur.com/h2c7L.jpg",
"http://i.imgur.com/jNyXL.jpg",
"http://i.imgur.com/K09cJ.jpg",
"http://i.imgur.com/mO0UE.jpg",
"http://i.imgur.com/9hhkx.jpg"]
msg.send msg.random images
| 195694 | # Description:
# Motivation from <NAME>
#
# Dependencies:
# None
#
# Configuration:
# None
#
# Commands:
# hubot swanson me - Motivates you to be more awesome
#
# Author:
# danielmurphy
module.exports = (robot) ->
robot.respond /swanson me$/i, (msg) ->
images = [
"http://i.imgur.com/kW0f7.jpg",
"http://i.imgur.com/vw9gZ.jpg",
"http://i.imgur.com/aV6ju.jpg",
"http://i.imgur.com/AQBJW.jpg",
"http://i.imgur.com/tKkRO.png",
"http://i.imgur.com/lkbGP.png",
"http://i.imgur.com/mx54e.jpg",
"http://i.imgur.com/LASrK.jpg",
"http://i.imgur.com/zvUBG.jpg",
"http://i.imgur.com/tjqca.jpg",
"http://i.imgur.com/q5CYv.jpg",
"http://i.imgur.com/HsoXm.jpg",
"http://i.imgur.com/6EGQm.jpg",
"http://i.imgur.com/DxpKu.jpg",
"http://i.imgur.com/h2c7L.jpg",
"http://i.imgur.com/jNyXL.jpg",
"http://i.imgur.com/K09cJ.jpg",
"http://i.imgur.com/mO0UE.jpg",
"http://i.imgur.com/9hhkx.jpg"]
msg.send msg.random images
| true | # Description:
# Motivation from PI:NAME:<NAME>END_PI
#
# Dependencies:
# None
#
# Configuration:
# None
#
# Commands:
# hubot swanson me - Motivates you to be more awesome
#
# Author:
# danielmurphy
module.exports = (robot) ->
robot.respond /swanson me$/i, (msg) ->
images = [
"http://i.imgur.com/kW0f7.jpg",
"http://i.imgur.com/vw9gZ.jpg",
"http://i.imgur.com/aV6ju.jpg",
"http://i.imgur.com/AQBJW.jpg",
"http://i.imgur.com/tKkRO.png",
"http://i.imgur.com/lkbGP.png",
"http://i.imgur.com/mx54e.jpg",
"http://i.imgur.com/LASrK.jpg",
"http://i.imgur.com/zvUBG.jpg",
"http://i.imgur.com/tjqca.jpg",
"http://i.imgur.com/q5CYv.jpg",
"http://i.imgur.com/HsoXm.jpg",
"http://i.imgur.com/6EGQm.jpg",
"http://i.imgur.com/DxpKu.jpg",
"http://i.imgur.com/h2c7L.jpg",
"http://i.imgur.com/jNyXL.jpg",
"http://i.imgur.com/K09cJ.jpg",
"http://i.imgur.com/mO0UE.jpg",
"http://i.imgur.com/9hhkx.jpg"]
msg.send msg.random images
|
[
{
"context": "# Copyright Joyent, Inc. and other Node contributors.\n#\n# Permission ",
"end": 18,
"score": 0.9085404872894287,
"start": 12,
"tag": "NAME",
"value": "Joyent"
}
] | test/simple/test-timers-uncaught-exception.coffee | lxe/io.coffee | 0 | # Copyright Joyent, Inc. and other Node contributors.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the
# following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
# NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
# USE OR OTHER DEALINGS IN THE SOFTWARE.
# the first timer throws...
# ...but the second one should still run
uncaughtException = (err) ->
console.error "uncaught handler"
assert.equal err.message, "BAM!"
exceptions++
return
common = require("../common")
assert = require("assert")
exceptions = 0
timer1 = 0
timer2 = 0
console.error "set first timer"
setTimeout (->
console.error "first timer"
timer1++
throw new Error("BAM!")return
), 100
console.error "set second timer"
setTimeout (->
console.error "second timer"
assert.equal timer1, 1
timer2++
return
), 100
process.on "uncaughtException", uncaughtException
exited = false
process.on "exit", ->
assert not exited
exited = true
process.removeListener "uncaughtException", uncaughtException
assert.equal exceptions, 1
assert.equal timer1, 1
assert.equal timer2, 1
return
| 135003 | # Copyright <NAME>, Inc. and other Node contributors.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the
# following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
# NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
# USE OR OTHER DEALINGS IN THE SOFTWARE.
# the first timer throws...
# ...but the second one should still run
uncaughtException = (err) ->
console.error "uncaught handler"
assert.equal err.message, "BAM!"
exceptions++
return
common = require("../common")
assert = require("assert")
exceptions = 0
timer1 = 0
timer2 = 0
console.error "set first timer"
setTimeout (->
console.error "first timer"
timer1++
throw new Error("BAM!")return
), 100
console.error "set second timer"
setTimeout (->
console.error "second timer"
assert.equal timer1, 1
timer2++
return
), 100
process.on "uncaughtException", uncaughtException
exited = false
process.on "exit", ->
assert not exited
exited = true
process.removeListener "uncaughtException", uncaughtException
assert.equal exceptions, 1
assert.equal timer1, 1
assert.equal timer2, 1
return
| true | # Copyright PI:NAME:<NAME>END_PI, Inc. and other Node contributors.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the
# following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
# NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
# USE OR OTHER DEALINGS IN THE SOFTWARE.
# the first timer throws...
# ...but the second one should still run
uncaughtException = (err) ->
console.error "uncaught handler"
assert.equal err.message, "BAM!"
exceptions++
return
common = require("../common")
assert = require("assert")
exceptions = 0
timer1 = 0
timer2 = 0
console.error "set first timer"
setTimeout (->
console.error "first timer"
timer1++
throw new Error("BAM!")return
), 100
console.error "set second timer"
setTimeout (->
console.error "second timer"
assert.equal timer1, 1
timer2++
return
), 100
process.on "uncaughtException", uncaughtException
exited = false
process.on "exit", ->
assert not exited
exited = true
process.removeListener "uncaughtException", uncaughtException
assert.equal exceptions, 1
assert.equal timer1, 1
assert.equal timer2, 1
return
|
[
{
"context": " super(5) + ' by slithering'\n\nsam = new Snake 'Sam'\nsam.move()\n\n# Trace:\n# 1: before Animal=<fun",
"end": 189,
"score": 0.9965795874595642,
"start": 186,
"tag": "NAME",
"value": "Sam"
},
{
"context": "ter\n# 11: before sam=/\n# 2: enter @name='Sam'... | test/traces/coffee/inheritance.coffee | paigeruten/pencil-tracer | 1 | class Animal
constructor: (@name) ->
move: (meters) ->
@name + ' moved ' + meters + 'm'
class Snake extends Animal
move: ->
super(5) + ' by slithering'
sam = new Snake 'Sam'
sam.move()
# Trace:
# 1: before Animal=<function>
# 1: after Animal=<function>
# 2: before
# 2: after
# 4: before
# 4: after
# 7: before Snake=<function> Animal=<function>
# 7: after Snake=<function> Animal=<function>
# 8: before
# 8: after
# 11: before sam=/
# 2: enter @name='Sam'
# 2: leave return=/
# 11: after sam=<object> Snake()=<object>
# 12: before sam=<object>
# 8: enter
# 9: before
# 4: enter meters=5
# 5: before @name='Sam' meters=5
# 5: after @name='Sam' meters=5
# 4: leave return='Sam moved 5m'
# 9: after super()='Sam moved 5m'
# 8: leave return='Sam moved 5m by slithering'
# 12: after sam=<object> move()='Sam moved 5m by slithering'
| 13645 | class Animal
constructor: (@name) ->
move: (meters) ->
@name + ' moved ' + meters + 'm'
class Snake extends Animal
move: ->
super(5) + ' by slithering'
sam = new Snake '<NAME>'
sam.move()
# Trace:
# 1: before Animal=<function>
# 1: after Animal=<function>
# 2: before
# 2: after
# 4: before
# 4: after
# 7: before Snake=<function> Animal=<function>
# 7: after Snake=<function> Animal=<function>
# 8: before
# 8: after
# 11: before sam=/
# 2: enter @name='<NAME>'
# 2: leave return=/
# 11: after sam=<object> Snake()=<object>
# 12: before sam=<object>
# 8: enter
# 9: before
# 4: enter meters=5
# 5: before @name='<NAME>' meters=5
# 5: after @name='<NAME>' meters=5
# 4: leave return='Sam moved 5m'
# 9: after super()='Sam moved 5m'
# 8: leave return='Sam moved 5m by slithering'
# 12: after sam=<object> move()='Sam moved 5m by slithering'
| true | class Animal
constructor: (@name) ->
move: (meters) ->
@name + ' moved ' + meters + 'm'
class Snake extends Animal
move: ->
super(5) + ' by slithering'
sam = new Snake 'PI:NAME:<NAME>END_PI'
sam.move()
# Trace:
# 1: before Animal=<function>
# 1: after Animal=<function>
# 2: before
# 2: after
# 4: before
# 4: after
# 7: before Snake=<function> Animal=<function>
# 7: after Snake=<function> Animal=<function>
# 8: before
# 8: after
# 11: before sam=/
# 2: enter @name='PI:NAME:<NAME>END_PI'
# 2: leave return=/
# 11: after sam=<object> Snake()=<object>
# 12: before sam=<object>
# 8: enter
# 9: before
# 4: enter meters=5
# 5: before @name='PI:NAME:<NAME>END_PI' meters=5
# 5: after @name='PI:NAME:<NAME>END_PI' meters=5
# 4: leave return='Sam moved 5m'
# 9: after super()='Sam moved 5m'
# 8: leave return='Sam moved 5m by slithering'
# 12: after sam=<object> move()='Sam moved 5m by slithering'
|
[
{
"context": "uire '../../config'\nconfig.MONGO_URL = 'mongodb://127.0.0.1:27017/nfd-test'\n\ndescribe 'auth', ->\n\n beforeEac",
"end": 185,
"score": 0.9997198581695557,
"start": 176,
"tag": "IP_ADDRESS",
"value": "127.0.0.1"
},
{
"context": "ogs you in', (done) ->\n Users.inser... | test/integration/auth.coffee | craigspaeth/nfd-api | 0 | app = require '../../'
request = require 'superagent'
dal = require '../../dal'
Users = require '../../dal/users'
config = require '../../config'
config.MONGO_URL = 'mongodb://127.0.0.1:27017/nfd-test'
describe 'auth', ->
beforeEach (done) ->
dal.connect =>
@server = app.listen 5000, ->
console.log 'listening'
done()
afterEach ->
@server.close()
xit 'logs you in', (done) ->
Users.insert { email: 'craig@foo.com', password: 'foobarbaz' }, (err, user) ->
Users.findOne { email: 'craig@foo.com' }, (err, user) ->
request.post('http://localhost:5000/login').send({
email: 'craig@foo.com'
password: 'foobarbaz'
}).end (res) ->
console.log res.text
res.body.email.should.equal 'craig@foo.com'
done() | 111058 | app = require '../../'
request = require 'superagent'
dal = require '../../dal'
Users = require '../../dal/users'
config = require '../../config'
config.MONGO_URL = 'mongodb://127.0.0.1:27017/nfd-test'
describe 'auth', ->
beforeEach (done) ->
dal.connect =>
@server = app.listen 5000, ->
console.log 'listening'
done()
afterEach ->
@server.close()
xit 'logs you in', (done) ->
Users.insert { email: '<EMAIL>', password: '<PASSWORD>' }, (err, user) ->
Users.findOne { email: '<EMAIL>' }, (err, user) ->
request.post('http://localhost:5000/login').send({
email: '<EMAIL>'
password: '<PASSWORD>'
}).end (res) ->
console.log res.text
res.body.email.should.equal '<EMAIL>'
done() | true | app = require '../../'
request = require 'superagent'
dal = require '../../dal'
Users = require '../../dal/users'
config = require '../../config'
config.MONGO_URL = 'mongodb://127.0.0.1:27017/nfd-test'
describe 'auth', ->
beforeEach (done) ->
dal.connect =>
@server = app.listen 5000, ->
console.log 'listening'
done()
afterEach ->
@server.close()
xit 'logs you in', (done) ->
Users.insert { email: 'PI:EMAIL:<EMAIL>END_PI', password: 'PI:PASSWORD:<PASSWORD>END_PI' }, (err, user) ->
Users.findOne { email: 'PI:EMAIL:<EMAIL>END_PI' }, (err, user) ->
request.post('http://localhost:5000/login').send({
email: 'PI:EMAIL:<EMAIL>END_PI'
password: 'PI:PASSWORD:<PASSWORD>END_PI'
}).end (res) ->
console.log res.text
res.body.email.should.equal 'PI:EMAIL:<EMAIL>END_PI'
done() |
[
{
"context": " (ƒ) -> storage.index \"test/models/1\", {\"name\": \"Duke\"}, ƒ\n (ƒ) -> storage.refresh \"test\", ƒ\n ]",
"end": 402,
"score": 0.9958944916725159,
"start": 398,
"tag": "NAME",
"value": "Duke"
},
{
"context": "SON(),\n \"id\": \"1\"\n \"n... | test/sync_test.coffee | craigdavey/elastic-storage | 0 | {ElasticStorage} = require "./test_helper"
{series, nextTick} = require "async"
Backbone = require "backbone"
module.exports =
setUp: (done) -> (new ElasticStorage).init "test", done
tearDown: (done) -> (new ElasticStorage).drop "test", done
"fetch a model": (test) ->
test.expect 1
storage = new ElasticStorage
series [
(ƒ) -> storage.index "test/models/1", {"name": "Duke"}, ƒ
(ƒ) -> storage.refresh "test", ƒ
], ->
model = new Backbone.Model
model.sync = ElasticStorage.setupBackboneSync(storage)
model.url = "test/models/1"
model.fetch
success: ->
console.info model.toJSON()
test.symmetry model.toJSON(),
"id": "1"
"name": "Duke"
test.done()
"fetch a collection of models without criteria": (test) ->
test.expect 1
storage = new ElasticStorage
series [
(ƒ) -> storage.index "test/models/1", {"name": "Duke"}, ƒ
(ƒ) -> storage.index "test/models/2", {"name": "Luke"}, ƒ
(ƒ) -> storage.refresh "test", ƒ
], ->
collection = new Backbone.Collection
collection.sync = ElasticStorage.setupBackboneSync(storage)
collection.url = "test/models"
collection.fetch
success: ->
test.same collection.toJSON(), [
{"id": "2", "name": "Luke"}
{"id": "1", "name": "Duke"}
]
test.done()
"fetch a collection of models with criteria": (test) ->
test.expect 1
storage = new ElasticStorage
series [
(ƒ) -> storage.index "test/models/1", {"name": "Duke"}, ƒ
(ƒ) -> storage.index "test/models/2", {"name": "Luke"}, ƒ
(ƒ) -> storage.refresh "test", ƒ
], ->
collection = new Backbone.Collection
collection.sync = ElasticStorage.setupBackboneSync(storage)
collection.url = "test/models"
collection.criteria =
"sort": "name"
"size": 1
collection.fetch
success: ->
test.same collection.toJSON(), [
{"id": "1", "name": "Duke"}
]
test.done()
"save model and listen for sync event": (test) ->
test.expect 1
model = new Backbone.Model
model.sync = ElasticStorage.setupBackboneSync()
model.storageAddress = ->
if @isNew() then "test/models" else "test/models/#{@id}"
model.save()
model.on "sync", ->
test.assert model.has("id")
test.done()
"save model and listen for error event": (test) ->
test.expect 1
storage = new ElasticStorage
storage.create = (address, attributes, callback) ->
nextTick -> callback("Error occurred")
model = new Backbone.Model
model.sync = ElasticStorage.setupBackboneSync(storage)
model.storageAddress = ->
if @isNew() then "test/models" else "test/models/#{@id}"
model.save()
model.on "error", ->
test.assert true
test.done()
"create from a model with a `storageAddress` method": (test) ->
test.expect 1
model = new Backbone.Model
model.sync = ElasticStorage.setupBackboneSync()
model.storageAddress = ->
if @isNew() then "test/models" else "test/models/#{@id}"
model.save null,
success: ->
test.assert model.has("id")
test.done()
"create from a model with a `url` method": (test) ->
test.expect 1
model = new Backbone.Model
model.sync = ElasticStorage.setupBackboneSync()
model.url = ->
if @isNew() then "test/models" else "test/models/#{@id}"
model.save null,
success: ->
test.assert model.has("id")
test.done()
"update a model": (test) ->
test.expect 1
storage = new ElasticStorage
series [
(ƒ) -> storage.index "test/models/1", {"name": "Duke"}, ƒ
(ƒ) -> storage.refresh "test", ƒ
], ->
model = new Backbone.Model
model.sync = ElasticStorage.setupBackboneSync()
model.url = "test/models/1"
model.fetch
success: ->
model.set "name", "Duke Lukas"
model.save null,
success: ->
test.equality model.get("name"), "Duke Lukas"
test.done()
"delete a model": (test) ->
test.expect 1
storage = new ElasticStorage
series [
(ƒ) -> storage.index "test/models/1", {"name": "Duke"}, ƒ
(ƒ) -> storage.refresh "test", ƒ
], ->
model = new Backbone.Model
model.sync = ElasticStorage.setupBackboneSync()
model.url = "test/models/1"
model.fetch
success: ->
model.destroy
success: ->
storage.has "test/models/1", (error, exists) ->
test.equality exists, no
test.done()
| 42241 | {ElasticStorage} = require "./test_helper"
{series, nextTick} = require "async"
Backbone = require "backbone"
module.exports =
setUp: (done) -> (new ElasticStorage).init "test", done
tearDown: (done) -> (new ElasticStorage).drop "test", done
"fetch a model": (test) ->
test.expect 1
storage = new ElasticStorage
series [
(ƒ) -> storage.index "test/models/1", {"name": "<NAME>"}, ƒ
(ƒ) -> storage.refresh "test", ƒ
], ->
model = new Backbone.Model
model.sync = ElasticStorage.setupBackboneSync(storage)
model.url = "test/models/1"
model.fetch
success: ->
console.info model.toJSON()
test.symmetry model.toJSON(),
"id": "1"
"name": "<NAME>"
test.done()
"fetch a collection of models without criteria": (test) ->
test.expect 1
storage = new ElasticStorage
series [
(ƒ) -> storage.index "test/models/1", {"name": "<NAME>"}, ƒ
(ƒ) -> storage.index "test/models/2", {"name": "<NAME>"}, ƒ
(ƒ) -> storage.refresh "test", ƒ
], ->
collection = new Backbone.Collection
collection.sync = ElasticStorage.setupBackboneSync(storage)
collection.url = "test/models"
collection.fetch
success: ->
test.same collection.toJSON(), [
{"id": "2", "name": "<NAME>"}
{"id": "1", "name": "<NAME>"}
]
test.done()
"fetch a collection of models with criteria": (test) ->
test.expect 1
storage = new ElasticStorage
series [
(ƒ) -> storage.index "test/models/1", {"name": "<NAME>"}, ƒ
(ƒ) -> storage.index "test/models/2", {"name": "<NAME>"}, ƒ
(ƒ) -> storage.refresh "test", ƒ
], ->
collection = new Backbone.Collection
collection.sync = ElasticStorage.setupBackboneSync(storage)
collection.url = "test/models"
collection.criteria =
"sort": "name"
"size": 1
collection.fetch
success: ->
test.same collection.toJSON(), [
{"id": "1", "name": "<NAME>"}
]
test.done()
"save model and listen for sync event": (test) ->
test.expect 1
model = new Backbone.Model
model.sync = ElasticStorage.setupBackboneSync()
model.storageAddress = ->
if @isNew() then "test/models" else "test/models/#{@id}"
model.save()
model.on "sync", ->
test.assert model.has("id")
test.done()
"save model and listen for error event": (test) ->
test.expect 1
storage = new ElasticStorage
storage.create = (address, attributes, callback) ->
nextTick -> callback("Error occurred")
model = new Backbone.Model
model.sync = ElasticStorage.setupBackboneSync(storage)
model.storageAddress = ->
if @isNew() then "test/models" else "test/models/#{@id}"
model.save()
model.on "error", ->
test.assert true
test.done()
"create from a model with a `storageAddress` method": (test) ->
test.expect 1
model = new Backbone.Model
model.sync = ElasticStorage.setupBackboneSync()
model.storageAddress = ->
if @isNew() then "test/models" else "test/models/#{@id}"
model.save null,
success: ->
test.assert model.has("id")
test.done()
"create from a model with a `url` method": (test) ->
test.expect 1
model = new Backbone.Model
model.sync = ElasticStorage.setupBackboneSync()
model.url = ->
if @isNew() then "test/models" else "test/models/#{@id}"
model.save null,
success: ->
test.assert model.has("id")
test.done()
"update a model": (test) ->
test.expect 1
storage = new ElasticStorage
series [
(ƒ) -> storage.index "test/models/1", {"name": "<NAME>"}, ƒ
(ƒ) -> storage.refresh "test", ƒ
], ->
model = new Backbone.Model
model.sync = ElasticStorage.setupBackboneSync()
model.url = "test/models/1"
model.fetch
success: ->
model.set "name", "<NAME>"
model.save null,
success: ->
test.equality model.get("name"), "<NAME>"
test.done()
"delete a model": (test) ->
test.expect 1
storage = new ElasticStorage
series [
(ƒ) -> storage.index "test/models/1", {"name": "<NAME>"}, ƒ
(ƒ) -> storage.refresh "test", ƒ
], ->
model = new Backbone.Model
model.sync = ElasticStorage.setupBackboneSync()
model.url = "test/models/1"
model.fetch
success: ->
model.destroy
success: ->
storage.has "test/models/1", (error, exists) ->
test.equality exists, no
test.done()
| true | {ElasticStorage} = require "./test_helper"
{series, nextTick} = require "async"
Backbone = require "backbone"
module.exports =
setUp: (done) -> (new ElasticStorage).init "test", done
tearDown: (done) -> (new ElasticStorage).drop "test", done
"fetch a model": (test) ->
test.expect 1
storage = new ElasticStorage
series [
(ƒ) -> storage.index "test/models/1", {"name": "PI:NAME:<NAME>END_PI"}, ƒ
(ƒ) -> storage.refresh "test", ƒ
], ->
model = new Backbone.Model
model.sync = ElasticStorage.setupBackboneSync(storage)
model.url = "test/models/1"
model.fetch
success: ->
console.info model.toJSON()
test.symmetry model.toJSON(),
"id": "1"
"name": "PI:NAME:<NAME>END_PI"
test.done()
"fetch a collection of models without criteria": (test) ->
test.expect 1
storage = new ElasticStorage
series [
(ƒ) -> storage.index "test/models/1", {"name": "PI:NAME:<NAME>END_PI"}, ƒ
(ƒ) -> storage.index "test/models/2", {"name": "PI:NAME:<NAME>END_PI"}, ƒ
(ƒ) -> storage.refresh "test", ƒ
], ->
collection = new Backbone.Collection
collection.sync = ElasticStorage.setupBackboneSync(storage)
collection.url = "test/models"
collection.fetch
success: ->
test.same collection.toJSON(), [
{"id": "2", "name": "PI:NAME:<NAME>END_PI"}
{"id": "1", "name": "PI:NAME:<NAME>END_PI"}
]
test.done()
"fetch a collection of models with criteria": (test) ->
test.expect 1
storage = new ElasticStorage
series [
(ƒ) -> storage.index "test/models/1", {"name": "PI:NAME:<NAME>END_PI"}, ƒ
(ƒ) -> storage.index "test/models/2", {"name": "PI:NAME:<NAME>END_PI"}, ƒ
(ƒ) -> storage.refresh "test", ƒ
], ->
collection = new Backbone.Collection
collection.sync = ElasticStorage.setupBackboneSync(storage)
collection.url = "test/models"
collection.criteria =
"sort": "name"
"size": 1
collection.fetch
success: ->
test.same collection.toJSON(), [
{"id": "1", "name": "PI:NAME:<NAME>END_PI"}
]
test.done()
"save model and listen for sync event": (test) ->
test.expect 1
model = new Backbone.Model
model.sync = ElasticStorage.setupBackboneSync()
model.storageAddress = ->
if @isNew() then "test/models" else "test/models/#{@id}"
model.save()
model.on "sync", ->
test.assert model.has("id")
test.done()
"save model and listen for error event": (test) ->
test.expect 1
storage = new ElasticStorage
storage.create = (address, attributes, callback) ->
nextTick -> callback("Error occurred")
model = new Backbone.Model
model.sync = ElasticStorage.setupBackboneSync(storage)
model.storageAddress = ->
if @isNew() then "test/models" else "test/models/#{@id}"
model.save()
model.on "error", ->
test.assert true
test.done()
"create from a model with a `storageAddress` method": (test) ->
test.expect 1
model = new Backbone.Model
model.sync = ElasticStorage.setupBackboneSync()
model.storageAddress = ->
if @isNew() then "test/models" else "test/models/#{@id}"
model.save null,
success: ->
test.assert model.has("id")
test.done()
"create from a model with a `url` method": (test) ->
test.expect 1
model = new Backbone.Model
model.sync = ElasticStorage.setupBackboneSync()
model.url = ->
if @isNew() then "test/models" else "test/models/#{@id}"
model.save null,
success: ->
test.assert model.has("id")
test.done()
"update a model": (test) ->
test.expect 1
storage = new ElasticStorage
series [
(ƒ) -> storage.index "test/models/1", {"name": "PI:NAME:<NAME>END_PI"}, ƒ
(ƒ) -> storage.refresh "test", ƒ
], ->
model = new Backbone.Model
model.sync = ElasticStorage.setupBackboneSync()
model.url = "test/models/1"
model.fetch
success: ->
model.set "name", "PI:NAME:<NAME>END_PI"
model.save null,
success: ->
test.equality model.get("name"), "PI:NAME:<NAME>END_PI"
test.done()
"delete a model": (test) ->
test.expect 1
storage = new ElasticStorage
series [
(ƒ) -> storage.index "test/models/1", {"name": "PI:NAME:<NAME>END_PI"}, ƒ
(ƒ) -> storage.refresh "test", ƒ
], ->
model = new Backbone.Model
model.sync = ElasticStorage.setupBackboneSync()
model.url = "test/models/1"
model.fetch
success: ->
model.destroy
success: ->
storage.has "test/models/1", (error, exists) ->
test.equality exists, no
test.done()
|
[
{
"context": "(galaxy_id)}/galaxy/create\", {\n username: username\n password: password\n rememberMe",
"end": 3030,
"score": 0.9989238977432251,
"start": 3022,
"tag": "USERNAME",
"value": "username"
},
{
"context": "{\n username: username\n ... | plugins/starpeace-client/api/api-client.coffee | starpeace-project/starpeace-client-website | 1 | import _ from 'lodash'
import moment from 'moment'
import axios from 'axios'
import SandboxConfiguration from '~/plugins/starpeace-client/api/sandbox/sandbox-configuration.coffee'
export default class APIClient
constructor: (@client_state) ->
new SandboxConfiguration(axios)
axios.defaults.withCredentials = true
@client = axios.create()
galaxy_url: (galaxy_id=null) ->
galaxy_id = @client_state.identity.galaxy_id unless galaxy_id?
galaxy_config = if galaxy_id?.length then @client_state.core.galaxy_cache.galaxy_configuration(galaxy_id) else null
throw "no configuration for galaxy #{galaxy_id}" unless galaxy_config?.api_protocol? && galaxy_config?.api_url? && galaxy_config?.api_port?
"#{galaxy_config.api_protocol}://#{galaxy_config.api_url}:#{galaxy_config.api_port}"
galaxy_auth: (options, galaxy_id=null) ->
galaxy_id = @client_state.identity.galaxy_id unless galaxy_id?
headers = { }
headers.Authorization = "JWT #{@client_state.options.galaxy_jwt}" if @client_state.options.galaxy_id == galaxy_id && @client_state.options.galaxy_jwt?.length
headers.VisaId = @client_state.player.planet_visa_id if @client_state.player.planet_visa_id?.length
_.assign(options, { headers: headers })
handle_request: (request_promise, handle_result) ->
new Promise (done, error) =>
request_promise
.then (result) -> done(handle_result(result.data))
.catch (err) =>
@client_state.handle_connection_error() unless err.status?
@client_state.handle_authorization_error() if err.response?.status == 401 || err.response?.status == 403
error(err)
delete: (path, parameters, handle_result) ->
@handle_request(@client.delete("#{@galaxy_url()}/#{path}", @galaxy_auth(parameters)), handle_result)
get: (path, query, handle_result) ->
@handle_request(@client.get("#{@galaxy_url()}/#{path}", @galaxy_auth({ params: (query || {}) })), handle_result)
get_binary: (path, query, handle_result) ->
@handle_request(@client.get("#{@galaxy_url()}/#{path}", @galaxy_auth({ responseType: 'arraybuffer', params: (query || {}) })), handle_result)
post: (path, parameters, handle_result) ->
@handle_request(@client.post("#{@galaxy_url()}/#{path}", parameters, @galaxy_auth({})), handle_result)
put: (path, parameters, handle_result) ->
@handle_request(@client.put("#{@galaxy_url()}/#{path}", parameters, @galaxy_auth({})), handle_result)
patch: (path, parameters, handle_result) ->
@handle_request(@client.patch("#{@galaxy_url()}/#{path}", parameters, @galaxy_auth({})), handle_result)
galaxy_metadata: (galaxy_id) ->
new Promise (done, error) =>
@client.get("#{@galaxy_url(galaxy_id)}/galaxy/metadata", @galaxy_auth({}, galaxy_id))
.then (result) -> done(result.data)
.catch error
galaxy_create: (galaxy_id, username, password, remember_me) ->
new Promise (done, error) =>
@client.post("#{@galaxy_url(galaxy_id)}/galaxy/create", {
username: username
password: password
rememberMe: remember_me
})
.then (result) -> done(result.data)
.catch (err) -> error(err.response)
galaxy_login: (galaxy_id, username, password, remember_me) ->
new Promise (done, error) =>
@client.post("#{@galaxy_url(galaxy_id)}/galaxy/login", {
username: username
password: password
rememberMe: remember_me
})
.then (result) -> done(result.data)
.catch (err) -> error(err.response)
galaxy_logout: (galaxy_id) ->
new Promise (done, error) =>
@client.post("#{@galaxy_url(galaxy_id)}/galaxy/logout", {}, @galaxy_auth({}, galaxy_id))
.then (result) -> done(result.data)
.catch (err) -> error(err.response)
register_visa: (galaxy_id, planet_id, visa_type) ->
new Promise (done, error) =>
@client.post("#{@galaxy_url(galaxy_id)}/planets/#{planet_id}/visa", {
identityType: visa_type
}, @galaxy_auth({}, galaxy_id))
.then (result) -> done(result.data)
.catch(error)
buildings_for_planet: (planet_id, chunk_x, chunk_y) ->
@get("planets/#{planet_id}/buildings", {
chunkX: chunk_x
chunkY: chunk_y
}, (result) -> result || [])
building_for_id: (building_id) ->
@get("buildings/#{building_id}", {}, (result) -> result)
building_details_for_id: (building_id) ->
@get("buildings/#{building_id}/details", {}, (result) -> result)
construct_building: (planet_id, company_id, definition_id, name, map_x, map_y) ->
@post("planets/#{planet_id}/buildings", {
companyId: company_id
definitionId: definition_id
name: name
mapX: map_x
mapY: map_y
}, (result) -> result)
events_for_planet: (planet_id, last_update) ->
@get("planets/#{planet_id}/events", {
lastUpdate: last_update.format()
}, (result) -> result || {})
building_metadata_for_planet: (planet_id) ->
@get("planets/#{planet_id}/metadata/buildings", {}, (result) -> result || {})
core_metadata_for_planet: (planet_id) ->
@get("planets/#{planet_id}/metadata/core", {}, (result) -> result || {})
invention_metadata_for_planet: (planet_id) ->
@get("planets/#{planet_id}/metadata/inventions", {}, (result) -> result || {})
details_for_planet: (planet_id) ->
@get("planets/#{planet_id}/details", {}, (result) -> result || [])
online_tycoons_for_planet: (planet_id) ->
@get("planets/#{planet_id}/online", {}, (result) -> result || [])
rankings_for_planet: (planet_id, ranking_type_id) ->
@get("planets/#{planet_id}/rankings/#{ranking_type_id}", {}, (result) -> result || [])
search_corporations_for_planet: (planet_id, query, startsWithQuery) ->
@get("planets/#{planet_id}/search/corporations", { query, startsWithQuery }, (result) -> result || [])
search_tycoons_for_planet: (planet_id, query, startsWithQuery) ->
@get("planets/#{planet_id}/search/tycoons", { query, startsWithQuery }, (result) -> result || [])
towns_for_planet: (planet_id) ->
@get("planets/#{planet_id}/towns", {}, (result) -> result)
buildings_for_town: (planet_id, town_id, industryCategoryId, industryTypeId) ->
@get("planets/#{planet_id}/towns/#{town_id}/buildings", { industryCategoryId, industryTypeId }, (result) -> result || [])
companies_for_town: (planet_id, town_id) ->
@get("planets/#{planet_id}/towns/#{town_id}/companies", {}, (result) -> result || [])
details_for_town: (planet_id, town_id) ->
@get("planets/#{planet_id}/towns/#{town_id}/details", {}, (result) -> result || [])
overlay_data_for_planet: (planet_id, type, chunk_x, chunk_y) ->
@get_binary("planets/#{planet_id}/overlay/#{type}", {
chunkX: chunk_x
chunkY: chunk_y
}, (result) -> if result? then new Uint8Array(result) else null)
road_data_for_planet: (planet_id, chunk_x, chunk_y) ->
@get_binary("planets/#{planet_id}/roads", {
chunkX: chunk_x
chunkY: chunk_y
}, (result) -> if result? then new Uint8Array(result) else null)
tycoon_for_id: (tycoon_id) ->
@get("tycoons/#{tycoon_id}", {}, (result) -> result)
create_corporation: (planet_id, corporation_name) ->
@post("planets/#{planet_id}/corporations", { name: corporation_name }, (result) -> result)
corporation_for_id: (corporation_id) ->
@get("corporations/#{corporation_id}", {}, (result) -> result)
bookmarks_for_corporation: (corporation_id) ->
@get("corporations/#{corporation_id}/bookmarks", {}, (result) -> result || [])
create_corporation_bookmark: (corporation_id, type, parent_id, order, name, extra_params={}) ->
@post("corporations/#{corporation_id}/bookmarks", _.merge({
type: type
parentId: parent_id
order: order
name: name
}, extra_params), (result) -> result)
update_corporation_bookmarks: (corporation_id, bookmark_deltas) ->
@patch("corporations/#{corporation_id}/bookmarks", {
deltas: bookmark_deltas
}, (result) -> result)
cashflow_for_corporation: (corporation_id) ->
@get("corporations/#{corporation_id}/cashflow", {}, (result) -> result)
mail_for_corporation: (corporation_id) ->
@get("corporations/#{corporation_id}/mail", {}, (result) -> result || [])
send_mail: (corporation_id, to, subject, body) ->
@post("corporations/#{corporation_id}/mail", {
to: to
subject: subject
body: body
}, (result) -> result)
mark_mail_read: (corporation_id, mail_id) ->
@put("corporations/#{corporation_id}/mail/#{mail_id}/mark-read", {}, (result) -> result)
delete_mail: (corporation_id, mail_id) ->
@delete("corporations/#{corporation_id}/mail/#{mail_id}", {}, (result) -> result)
create_company: (planet_id, company_name, seal_id) ->
@post("planets/#{planet_id}/companies", { name: company_name, sealId: seal_id }, (result) -> result)
buildings_for_company: (company_id) ->
@get("companies/#{company_id}/buildings", {}, (result) -> result || [])
inventions_for_company: (company_id) ->
@get("companies/#{company_id}/inventions", {}, (result) -> result || [])
queue_company_invention: (company_id, invention_id) ->
@put("companies/#{company_id}/inventions/#{invention_id}", {}, (result) -> result)
sell_company_invention: (company_id, invention_id) ->
@delete("companies/#{company_id}/inventions/#{invention_id}", {}, (result) -> result)
| 194874 | import _ from 'lodash'
import moment from 'moment'
import axios from 'axios'
import SandboxConfiguration from '~/plugins/starpeace-client/api/sandbox/sandbox-configuration.coffee'
export default class APIClient
constructor: (@client_state) ->
new SandboxConfiguration(axios)
axios.defaults.withCredentials = true
@client = axios.create()
galaxy_url: (galaxy_id=null) ->
galaxy_id = @client_state.identity.galaxy_id unless galaxy_id?
galaxy_config = if galaxy_id?.length then @client_state.core.galaxy_cache.galaxy_configuration(galaxy_id) else null
throw "no configuration for galaxy #{galaxy_id}" unless galaxy_config?.api_protocol? && galaxy_config?.api_url? && galaxy_config?.api_port?
"#{galaxy_config.api_protocol}://#{galaxy_config.api_url}:#{galaxy_config.api_port}"
galaxy_auth: (options, galaxy_id=null) ->
galaxy_id = @client_state.identity.galaxy_id unless galaxy_id?
headers = { }
headers.Authorization = "JWT #{@client_state.options.galaxy_jwt}" if @client_state.options.galaxy_id == galaxy_id && @client_state.options.galaxy_jwt?.length
headers.VisaId = @client_state.player.planet_visa_id if @client_state.player.planet_visa_id?.length
_.assign(options, { headers: headers })
handle_request: (request_promise, handle_result) ->
new Promise (done, error) =>
request_promise
.then (result) -> done(handle_result(result.data))
.catch (err) =>
@client_state.handle_connection_error() unless err.status?
@client_state.handle_authorization_error() if err.response?.status == 401 || err.response?.status == 403
error(err)
delete: (path, parameters, handle_result) ->
@handle_request(@client.delete("#{@galaxy_url()}/#{path}", @galaxy_auth(parameters)), handle_result)
get: (path, query, handle_result) ->
@handle_request(@client.get("#{@galaxy_url()}/#{path}", @galaxy_auth({ params: (query || {}) })), handle_result)
get_binary: (path, query, handle_result) ->
@handle_request(@client.get("#{@galaxy_url()}/#{path}", @galaxy_auth({ responseType: 'arraybuffer', params: (query || {}) })), handle_result)
post: (path, parameters, handle_result) ->
@handle_request(@client.post("#{@galaxy_url()}/#{path}", parameters, @galaxy_auth({})), handle_result)
put: (path, parameters, handle_result) ->
@handle_request(@client.put("#{@galaxy_url()}/#{path}", parameters, @galaxy_auth({})), handle_result)
patch: (path, parameters, handle_result) ->
@handle_request(@client.patch("#{@galaxy_url()}/#{path}", parameters, @galaxy_auth({})), handle_result)
galaxy_metadata: (galaxy_id) ->
new Promise (done, error) =>
@client.get("#{@galaxy_url(galaxy_id)}/galaxy/metadata", @galaxy_auth({}, galaxy_id))
.then (result) -> done(result.data)
.catch error
galaxy_create: (galaxy_id, username, password, remember_me) ->
new Promise (done, error) =>
@client.post("#{@galaxy_url(galaxy_id)}/galaxy/create", {
username: username
password: <PASSWORD>
rememberMe: remember_me
})
.then (result) -> done(result.data)
.catch (err) -> error(err.response)
galaxy_login: (galaxy_id, username, password, remember_me) ->
new Promise (done, error) =>
@client.post("#{@galaxy_url(galaxy_id)}/galaxy/login", {
username: username
password: <PASSWORD>
rememberMe: remember_me
})
.then (result) -> done(result.data)
.catch (err) -> error(err.response)
galaxy_logout: (galaxy_id) ->
new Promise (done, error) =>
@client.post("#{@galaxy_url(galaxy_id)}/galaxy/logout", {}, @galaxy_auth({}, galaxy_id))
.then (result) -> done(result.data)
.catch (err) -> error(err.response)
register_visa: (galaxy_id, planet_id, visa_type) ->
new Promise (done, error) =>
@client.post("#{@galaxy_url(galaxy_id)}/planets/#{planet_id}/visa", {
identityType: visa_type
}, @galaxy_auth({}, galaxy_id))
.then (result) -> done(result.data)
.catch(error)
buildings_for_planet: (planet_id, chunk_x, chunk_y) ->
@get("planets/#{planet_id}/buildings", {
chunkX: chunk_x
chunkY: chunk_y
}, (result) -> result || [])
building_for_id: (building_id) ->
@get("buildings/#{building_id}", {}, (result) -> result)
building_details_for_id: (building_id) ->
@get("buildings/#{building_id}/details", {}, (result) -> result)
construct_building: (planet_id, company_id, definition_id, name, map_x, map_y) ->
@post("planets/#{planet_id}/buildings", {
companyId: company_id
definitionId: definition_id
name: name
mapX: map_x
mapY: map_y
}, (result) -> result)
events_for_planet: (planet_id, last_update) ->
@get("planets/#{planet_id}/events", {
lastUpdate: last_update.format()
}, (result) -> result || {})
building_metadata_for_planet: (planet_id) ->
@get("planets/#{planet_id}/metadata/buildings", {}, (result) -> result || {})
core_metadata_for_planet: (planet_id) ->
@get("planets/#{planet_id}/metadata/core", {}, (result) -> result || {})
invention_metadata_for_planet: (planet_id) ->
@get("planets/#{planet_id}/metadata/inventions", {}, (result) -> result || {})
details_for_planet: (planet_id) ->
@get("planets/#{planet_id}/details", {}, (result) -> result || [])
online_tycoons_for_planet: (planet_id) ->
@get("planets/#{planet_id}/online", {}, (result) -> result || [])
rankings_for_planet: (planet_id, ranking_type_id) ->
@get("planets/#{planet_id}/rankings/#{ranking_type_id}", {}, (result) -> result || [])
search_corporations_for_planet: (planet_id, query, startsWithQuery) ->
@get("planets/#{planet_id}/search/corporations", { query, startsWithQuery }, (result) -> result || [])
search_tycoons_for_planet: (planet_id, query, startsWithQuery) ->
@get("planets/#{planet_id}/search/tycoons", { query, startsWithQuery }, (result) -> result || [])
towns_for_planet: (planet_id) ->
@get("planets/#{planet_id}/towns", {}, (result) -> result)
buildings_for_town: (planet_id, town_id, industryCategoryId, industryTypeId) ->
@get("planets/#{planet_id}/towns/#{town_id}/buildings", { industryCategoryId, industryTypeId }, (result) -> result || [])
companies_for_town: (planet_id, town_id) ->
@get("planets/#{planet_id}/towns/#{town_id}/companies", {}, (result) -> result || [])
details_for_town: (planet_id, town_id) ->
@get("planets/#{planet_id}/towns/#{town_id}/details", {}, (result) -> result || [])
overlay_data_for_planet: (planet_id, type, chunk_x, chunk_y) ->
@get_binary("planets/#{planet_id}/overlay/#{type}", {
chunkX: chunk_x
chunkY: chunk_y
}, (result) -> if result? then new Uint8Array(result) else null)
road_data_for_planet: (planet_id, chunk_x, chunk_y) ->
@get_binary("planets/#{planet_id}/roads", {
chunkX: chunk_x
chunkY: chunk_y
}, (result) -> if result? then new Uint8Array(result) else null)
tycoon_for_id: (tycoon_id) ->
@get("tycoons/#{tycoon_id}", {}, (result) -> result)
create_corporation: (planet_id, corporation_name) ->
@post("planets/#{planet_id}/corporations", { name: corporation_name }, (result) -> result)
corporation_for_id: (corporation_id) ->
@get("corporations/#{corporation_id}", {}, (result) -> result)
bookmarks_for_corporation: (corporation_id) ->
@get("corporations/#{corporation_id}/bookmarks", {}, (result) -> result || [])
create_corporation_bookmark: (corporation_id, type, parent_id, order, name, extra_params={}) ->
@post("corporations/#{corporation_id}/bookmarks", _.merge({
type: type
parentId: parent_id
order: order
name: name
}, extra_params), (result) -> result)
update_corporation_bookmarks: (corporation_id, bookmark_deltas) ->
@patch("corporations/#{corporation_id}/bookmarks", {
deltas: bookmark_deltas
}, (result) -> result)
cashflow_for_corporation: (corporation_id) ->
@get("corporations/#{corporation_id}/cashflow", {}, (result) -> result)
mail_for_corporation: (corporation_id) ->
@get("corporations/#{corporation_id}/mail", {}, (result) -> result || [])
send_mail: (corporation_id, to, subject, body) ->
@post("corporations/#{corporation_id}/mail", {
to: to
subject: subject
body: body
}, (result) -> result)
mark_mail_read: (corporation_id, mail_id) ->
@put("corporations/#{corporation_id}/mail/#{mail_id}/mark-read", {}, (result) -> result)
delete_mail: (corporation_id, mail_id) ->
@delete("corporations/#{corporation_id}/mail/#{mail_id}", {}, (result) -> result)
create_company: (planet_id, company_name, seal_id) ->
@post("planets/#{planet_id}/companies", { name: company_name, sealId: seal_id }, (result) -> result)
buildings_for_company: (company_id) ->
@get("companies/#{company_id}/buildings", {}, (result) -> result || [])
inventions_for_company: (company_id) ->
@get("companies/#{company_id}/inventions", {}, (result) -> result || [])
queue_company_invention: (company_id, invention_id) ->
@put("companies/#{company_id}/inventions/#{invention_id}", {}, (result) -> result)
sell_company_invention: (company_id, invention_id) ->
@delete("companies/#{company_id}/inventions/#{invention_id}", {}, (result) -> result)
| true | import _ from 'lodash'
import moment from 'moment'
import axios from 'axios'
import SandboxConfiguration from '~/plugins/starpeace-client/api/sandbox/sandbox-configuration.coffee'
export default class APIClient
constructor: (@client_state) ->
new SandboxConfiguration(axios)
axios.defaults.withCredentials = true
@client = axios.create()
galaxy_url: (galaxy_id=null) ->
galaxy_id = @client_state.identity.galaxy_id unless galaxy_id?
galaxy_config = if galaxy_id?.length then @client_state.core.galaxy_cache.galaxy_configuration(galaxy_id) else null
throw "no configuration for galaxy #{galaxy_id}" unless galaxy_config?.api_protocol? && galaxy_config?.api_url? && galaxy_config?.api_port?
"#{galaxy_config.api_protocol}://#{galaxy_config.api_url}:#{galaxy_config.api_port}"
galaxy_auth: (options, galaxy_id=null) ->
galaxy_id = @client_state.identity.galaxy_id unless galaxy_id?
headers = { }
headers.Authorization = "JWT #{@client_state.options.galaxy_jwt}" if @client_state.options.galaxy_id == galaxy_id && @client_state.options.galaxy_jwt?.length
headers.VisaId = @client_state.player.planet_visa_id if @client_state.player.planet_visa_id?.length
_.assign(options, { headers: headers })
handle_request: (request_promise, handle_result) ->
new Promise (done, error) =>
request_promise
.then (result) -> done(handle_result(result.data))
.catch (err) =>
@client_state.handle_connection_error() unless err.status?
@client_state.handle_authorization_error() if err.response?.status == 401 || err.response?.status == 403
error(err)
delete: (path, parameters, handle_result) ->
@handle_request(@client.delete("#{@galaxy_url()}/#{path}", @galaxy_auth(parameters)), handle_result)
get: (path, query, handle_result) ->
@handle_request(@client.get("#{@galaxy_url()}/#{path}", @galaxy_auth({ params: (query || {}) })), handle_result)
get_binary: (path, query, handle_result) ->
@handle_request(@client.get("#{@galaxy_url()}/#{path}", @galaxy_auth({ responseType: 'arraybuffer', params: (query || {}) })), handle_result)
post: (path, parameters, handle_result) ->
@handle_request(@client.post("#{@galaxy_url()}/#{path}", parameters, @galaxy_auth({})), handle_result)
put: (path, parameters, handle_result) ->
@handle_request(@client.put("#{@galaxy_url()}/#{path}", parameters, @galaxy_auth({})), handle_result)
patch: (path, parameters, handle_result) ->
@handle_request(@client.patch("#{@galaxy_url()}/#{path}", parameters, @galaxy_auth({})), handle_result)
galaxy_metadata: (galaxy_id) ->
new Promise (done, error) =>
@client.get("#{@galaxy_url(galaxy_id)}/galaxy/metadata", @galaxy_auth({}, galaxy_id))
.then (result) -> done(result.data)
.catch error
galaxy_create: (galaxy_id, username, password, remember_me) ->
new Promise (done, error) =>
@client.post("#{@galaxy_url(galaxy_id)}/galaxy/create", {
username: username
password: PI:PASSWORD:<PASSWORD>END_PI
rememberMe: remember_me
})
.then (result) -> done(result.data)
.catch (err) -> error(err.response)
galaxy_login: (galaxy_id, username, password, remember_me) ->
new Promise (done, error) =>
@client.post("#{@galaxy_url(galaxy_id)}/galaxy/login", {
username: username
password: PI:PASSWORD:<PASSWORD>END_PI
rememberMe: remember_me
})
.then (result) -> done(result.data)
.catch (err) -> error(err.response)
galaxy_logout: (galaxy_id) ->
new Promise (done, error) =>
@client.post("#{@galaxy_url(galaxy_id)}/galaxy/logout", {}, @galaxy_auth({}, galaxy_id))
.then (result) -> done(result.data)
.catch (err) -> error(err.response)
register_visa: (galaxy_id, planet_id, visa_type) ->
new Promise (done, error) =>
@client.post("#{@galaxy_url(galaxy_id)}/planets/#{planet_id}/visa", {
identityType: visa_type
}, @galaxy_auth({}, galaxy_id))
.then (result) -> done(result.data)
.catch(error)
buildings_for_planet: (planet_id, chunk_x, chunk_y) ->
@get("planets/#{planet_id}/buildings", {
chunkX: chunk_x
chunkY: chunk_y
}, (result) -> result || [])
building_for_id: (building_id) ->
@get("buildings/#{building_id}", {}, (result) -> result)
building_details_for_id: (building_id) ->
@get("buildings/#{building_id}/details", {}, (result) -> result)
construct_building: (planet_id, company_id, definition_id, name, map_x, map_y) ->
@post("planets/#{planet_id}/buildings", {
companyId: company_id
definitionId: definition_id
name: name
mapX: map_x
mapY: map_y
}, (result) -> result)
events_for_planet: (planet_id, last_update) ->
@get("planets/#{planet_id}/events", {
lastUpdate: last_update.format()
}, (result) -> result || {})
building_metadata_for_planet: (planet_id) ->
@get("planets/#{planet_id}/metadata/buildings", {}, (result) -> result || {})
core_metadata_for_planet: (planet_id) ->
@get("planets/#{planet_id}/metadata/core", {}, (result) -> result || {})
invention_metadata_for_planet: (planet_id) ->
@get("planets/#{planet_id}/metadata/inventions", {}, (result) -> result || {})
details_for_planet: (planet_id) ->
@get("planets/#{planet_id}/details", {}, (result) -> result || [])
online_tycoons_for_planet: (planet_id) ->
@get("planets/#{planet_id}/online", {}, (result) -> result || [])
rankings_for_planet: (planet_id, ranking_type_id) ->
@get("planets/#{planet_id}/rankings/#{ranking_type_id}", {}, (result) -> result || [])
search_corporations_for_planet: (planet_id, query, startsWithQuery) ->
@get("planets/#{planet_id}/search/corporations", { query, startsWithQuery }, (result) -> result || [])
search_tycoons_for_planet: (planet_id, query, startsWithQuery) ->
@get("planets/#{planet_id}/search/tycoons", { query, startsWithQuery }, (result) -> result || [])
towns_for_planet: (planet_id) ->
@get("planets/#{planet_id}/towns", {}, (result) -> result)
buildings_for_town: (planet_id, town_id, industryCategoryId, industryTypeId) ->
@get("planets/#{planet_id}/towns/#{town_id}/buildings", { industryCategoryId, industryTypeId }, (result) -> result || [])
companies_for_town: (planet_id, town_id) ->
@get("planets/#{planet_id}/towns/#{town_id}/companies", {}, (result) -> result || [])
details_for_town: (planet_id, town_id) ->
@get("planets/#{planet_id}/towns/#{town_id}/details", {}, (result) -> result || [])
overlay_data_for_planet: (planet_id, type, chunk_x, chunk_y) ->
@get_binary("planets/#{planet_id}/overlay/#{type}", {
chunkX: chunk_x
chunkY: chunk_y
}, (result) -> if result? then new Uint8Array(result) else null)
road_data_for_planet: (planet_id, chunk_x, chunk_y) ->
@get_binary("planets/#{planet_id}/roads", {
chunkX: chunk_x
chunkY: chunk_y
}, (result) -> if result? then new Uint8Array(result) else null)
tycoon_for_id: (tycoon_id) ->
@get("tycoons/#{tycoon_id}", {}, (result) -> result)
create_corporation: (planet_id, corporation_name) ->
@post("planets/#{planet_id}/corporations", { name: corporation_name }, (result) -> result)
corporation_for_id: (corporation_id) ->
@get("corporations/#{corporation_id}", {}, (result) -> result)
bookmarks_for_corporation: (corporation_id) ->
@get("corporations/#{corporation_id}/bookmarks", {}, (result) -> result || [])
create_corporation_bookmark: (corporation_id, type, parent_id, order, name, extra_params={}) ->
@post("corporations/#{corporation_id}/bookmarks", _.merge({
type: type
parentId: parent_id
order: order
name: name
}, extra_params), (result) -> result)
update_corporation_bookmarks: (corporation_id, bookmark_deltas) ->
@patch("corporations/#{corporation_id}/bookmarks", {
deltas: bookmark_deltas
}, (result) -> result)
cashflow_for_corporation: (corporation_id) ->
@get("corporations/#{corporation_id}/cashflow", {}, (result) -> result)
mail_for_corporation: (corporation_id) ->
@get("corporations/#{corporation_id}/mail", {}, (result) -> result || [])
send_mail: (corporation_id, to, subject, body) ->
@post("corporations/#{corporation_id}/mail", {
to: to
subject: subject
body: body
}, (result) -> result)
mark_mail_read: (corporation_id, mail_id) ->
@put("corporations/#{corporation_id}/mail/#{mail_id}/mark-read", {}, (result) -> result)
delete_mail: (corporation_id, mail_id) ->
@delete("corporations/#{corporation_id}/mail/#{mail_id}", {}, (result) -> result)
create_company: (planet_id, company_name, seal_id) ->
@post("planets/#{planet_id}/companies", { name: company_name, sealId: seal_id }, (result) -> result)
buildings_for_company: (company_id) ->
@get("companies/#{company_id}/buildings", {}, (result) -> result || [])
inventions_for_company: (company_id) ->
@get("companies/#{company_id}/inventions", {}, (result) -> result || [])
queue_company_invention: (company_id, invention_id) ->
@put("companies/#{company_id}/inventions/#{invention_id}", {}, (result) -> result)
sell_company_invention: (company_id, invention_id) ->
@delete("companies/#{company_id}/inventions/#{invention_id}", {}, (result) -> result)
|
[
{
"context": "ps controller\n#\n# Nodize CMS\n# https://github.com/nodize/nodizecms\n#\n# Copyright 2012, Hypee\n# http://hype",
"end": 62,
"score": 0.9994558095932007,
"start": 56,
"tag": "USERNAME",
"value": "nodize"
},
{
"context": "://github.com/nodize/nodizecms\n#\n# Copyright 2... | modules/backend/controllers/ctrl_group.coffee | nodize/nodizecms | 32 | # Groups controller
#
# Nodize CMS
# https://github.com/nodize/nodizecms
#
# Copyright 2012, Hypee
# http://hypee.com
#
# Licensed under the MIT license:
# http://www.opensource.org/licenses/MIT
#
@include = ->
#
#
# Creating a new group
#
@post '/:lang/admin/groups/save' : (req, res) ->
values = req.body
#
# Building new record
#
user_group = User_group.build()
user_group.group_name = values.group_name
user_group.level = values.level
#
# Saving it
#
user_group.save()
.on "success", (user_group)->
#
# Updating id
#
user_group.id_group = user_group.id
user_group.save()
.on "success", ->
message =
message_type:"success"
message:"Group saved"
update:[
element:"mainPanel"
url:"\/"+req.params.lang+"\/admin\/users"
]
callback:null
res.send message
.on "failure", (err) ->
console.log "Database error on group save :", err
.on "failure", (err) ->
console.log "Database error on group save :", err
#
# Deleting a group
#
@post '/:lang/admin/groups/delete/:id_group' : (req, res) ->
#
# Find the group
#
User_group.find({where:{id:req.params.id_group}})
.on 'success', (user_group) ->
#
# Then delete it
#
user_group.destroy()
.on "success", (user_group) ->
message =
message_type:"success"
message:"Group deleted"
update:[
element:"mainPanel"
url:"\/"+req.params.lang+"\/admin\/users"
],
callback:null
id:user_group.id
res.send message
.on 'failure', (err) ->
console.log 'database error ', err
.on 'failure', (err) ->
console.log 'database error ', err
#
# Editing a new group
#
@post '/:lang/admin/groups/edit/:id_group' : (req) ->
loadGroup = ->
User_group.find({where:{id:req.params.id_group}})
.on 'success', (user_group) ->
renderPage( user_group )
.on 'failure', (err) ->
console.log 'database error ', err
renderPage = (user_group) =>
@render "backend_group",
layout : no
hardcode : @helpers
lang : @params.lang
ion_lang : ion_lang[ @params.lang ]
user_group : user_group
loadGroup()
#
# Updating group record
#
@post '/:lang/admin/groups/update' : (req, res) ->
values = req.body
loadGroup = ->
User_group.find({where:{id_group:values.group_PK}})
.on 'success', (user_group) ->
updateGroup( user_group )
.on 'failure', (err) ->
console.log 'database error ', err
updateGroup = (user_group) ->
user_group.group_name = values.group_name
user_group.level = values.level
user_group.save()
.on 'success', (user_group) ->
message =
message_type:"success"
message:"Group updated"
update:[
element:"mainPanel"
url:"\/"+req.params.lang+"\/admin\/users"
],
callback:null
id:user_group.id_group
res.send message
.on 'failure', (err) ->
console.log 'database error ', err
loadGroup()
| 16720 | # Groups controller
#
# Nodize CMS
# https://github.com/nodize/nodizecms
#
# Copyright 2012, <NAME>
# http://hypee.com
#
# Licensed under the MIT license:
# http://www.opensource.org/licenses/MIT
#
@include = ->
#
#
# Creating a new group
#
@post '/:lang/admin/groups/save' : (req, res) ->
values = req.body
#
# Building new record
#
user_group = User_group.build()
user_group.group_name = values.group_name
user_group.level = values.level
#
# Saving it
#
user_group.save()
.on "success", (user_group)->
#
# Updating id
#
user_group.id_group = user_group.id
user_group.save()
.on "success", ->
message =
message_type:"success"
message:"Group saved"
update:[
element:"mainPanel"
url:"\/"+req.params.lang+"\/admin\/users"
]
callback:null
res.send message
.on "failure", (err) ->
console.log "Database error on group save :", err
.on "failure", (err) ->
console.log "Database error on group save :", err
#
# Deleting a group
#
@post '/:lang/admin/groups/delete/:id_group' : (req, res) ->
#
# Find the group
#
User_group.find({where:{id:req.params.id_group}})
.on 'success', (user_group) ->
#
# Then delete it
#
user_group.destroy()
.on "success", (user_group) ->
message =
message_type:"success"
message:"Group deleted"
update:[
element:"mainPanel"
url:"\/"+req.params.lang+"\/admin\/users"
],
callback:null
id:user_group.id
res.send message
.on 'failure', (err) ->
console.log 'database error ', err
.on 'failure', (err) ->
console.log 'database error ', err
#
# Editing a new group
#
@post '/:lang/admin/groups/edit/:id_group' : (req) ->
loadGroup = ->
User_group.find({where:{id:req.params.id_group}})
.on 'success', (user_group) ->
renderPage( user_group )
.on 'failure', (err) ->
console.log 'database error ', err
renderPage = (user_group) =>
@render "backend_group",
layout : no
hardcode : @helpers
lang : @params.lang
ion_lang : ion_lang[ @params.lang ]
user_group : user_group
loadGroup()
#
# Updating group record
#
@post '/:lang/admin/groups/update' : (req, res) ->
values = req.body
loadGroup = ->
User_group.find({where:{id_group:values.group_PK}})
.on 'success', (user_group) ->
updateGroup( user_group )
.on 'failure', (err) ->
console.log 'database error ', err
updateGroup = (user_group) ->
user_group.group_name = values.group_name
user_group.level = values.level
user_group.save()
.on 'success', (user_group) ->
message =
message_type:"success"
message:"Group updated"
update:[
element:"mainPanel"
url:"\/"+req.params.lang+"\/admin\/users"
],
callback:null
id:user_group.id_group
res.send message
.on 'failure', (err) ->
console.log 'database error ', err
loadGroup()
| true | # Groups controller
#
# Nodize CMS
# https://github.com/nodize/nodizecms
#
# Copyright 2012, PI:NAME:<NAME>END_PI
# http://hypee.com
#
# Licensed under the MIT license:
# http://www.opensource.org/licenses/MIT
#
@include = ->
#
#
# Creating a new group
#
@post '/:lang/admin/groups/save' : (req, res) ->
values = req.body
#
# Building new record
#
user_group = User_group.build()
user_group.group_name = values.group_name
user_group.level = values.level
#
# Saving it
#
user_group.save()
.on "success", (user_group)->
#
# Updating id
#
user_group.id_group = user_group.id
user_group.save()
.on "success", ->
message =
message_type:"success"
message:"Group saved"
update:[
element:"mainPanel"
url:"\/"+req.params.lang+"\/admin\/users"
]
callback:null
res.send message
.on "failure", (err) ->
console.log "Database error on group save :", err
.on "failure", (err) ->
console.log "Database error on group save :", err
#
# Deleting a group
#
@post '/:lang/admin/groups/delete/:id_group' : (req, res) ->
#
# Find the group
#
User_group.find({where:{id:req.params.id_group}})
.on 'success', (user_group) ->
#
# Then delete it
#
user_group.destroy()
.on "success", (user_group) ->
message =
message_type:"success"
message:"Group deleted"
update:[
element:"mainPanel"
url:"\/"+req.params.lang+"\/admin\/users"
],
callback:null
id:user_group.id
res.send message
.on 'failure', (err) ->
console.log 'database error ', err
.on 'failure', (err) ->
console.log 'database error ', err
#
# Editing a new group
#
@post '/:lang/admin/groups/edit/:id_group' : (req) ->
loadGroup = ->
User_group.find({where:{id:req.params.id_group}})
.on 'success', (user_group) ->
renderPage( user_group )
.on 'failure', (err) ->
console.log 'database error ', err
renderPage = (user_group) =>
@render "backend_group",
layout : no
hardcode : @helpers
lang : @params.lang
ion_lang : ion_lang[ @params.lang ]
user_group : user_group
loadGroup()
#
# Updating group record
#
@post '/:lang/admin/groups/update' : (req, res) ->
values = req.body
loadGroup = ->
User_group.find({where:{id_group:values.group_PK}})
.on 'success', (user_group) ->
updateGroup( user_group )
.on 'failure', (err) ->
console.log 'database error ', err
updateGroup = (user_group) ->
user_group.group_name = values.group_name
user_group.level = values.level
user_group.save()
.on 'success', (user_group) ->
message =
message_type:"success"
message:"Group updated"
update:[
element:"mainPanel"
url:"\/"+req.params.lang+"\/admin\/users"
],
callback:null
id:user_group.id_group
res.send message
.on 'failure', (err) ->
console.log 'database error ', err
loadGroup()
|
[
{
"context": "lass IsyInsteonDimmerNode extends IsyNode\n key: 'insteonDimmer'\n types: [[1, 14], [1, 32]]\n\n aspects:\n powe",
"end": 114,
"score": 0.9388879537582397,
"start": 101,
"tag": "KEY",
"value": "insteonDimmer"
}
] | lib/adapters/isy/IsyInsteonDimmerNode.coffee | monitron/jarvis-ha | 1 |
IsyNode = require('./IsyNode')
module.exports = class IsyInsteonDimmerNode extends IsyNode
key: 'insteonDimmer'
types: [[1, 14], [1, 32]]
aspects:
powerOnOff:
commands:
set: (node, value) ->
if value
command = 'DON'
args = [255]
operative = node.getAspect('brightness').getDatum('state') != 255
else
command = 'DOF'
args = []
operative = node.getAspect('powerOnOff').getDatum('state') == true
node.adapter.executeCommand node.id, command, args, operative
brightness:
commands:
set: (node, value) ->
operative = node.getAspect('brightness').getDatum('state') != value
node.adapter.executeCommand node.id, 'DON',
[node.percentageToByte(value)], operative
processData: (data) ->
if data.ST?
brightness = parseInt(data.ST)
@getAspect('powerOnOff').setData state: (brightness != 0)
@getAspect('brightness').setData state: @byteToPercentage(brightness)
| 136420 |
IsyNode = require('./IsyNode')
module.exports = class IsyInsteonDimmerNode extends IsyNode
key: '<KEY>'
types: [[1, 14], [1, 32]]
aspects:
powerOnOff:
commands:
set: (node, value) ->
if value
command = 'DON'
args = [255]
operative = node.getAspect('brightness').getDatum('state') != 255
else
command = 'DOF'
args = []
operative = node.getAspect('powerOnOff').getDatum('state') == true
node.adapter.executeCommand node.id, command, args, operative
brightness:
commands:
set: (node, value) ->
operative = node.getAspect('brightness').getDatum('state') != value
node.adapter.executeCommand node.id, 'DON',
[node.percentageToByte(value)], operative
processData: (data) ->
if data.ST?
brightness = parseInt(data.ST)
@getAspect('powerOnOff').setData state: (brightness != 0)
@getAspect('brightness').setData state: @byteToPercentage(brightness)
| true |
IsyNode = require('./IsyNode')
module.exports = class IsyInsteonDimmerNode extends IsyNode
key: 'PI:KEY:<KEY>END_PI'
types: [[1, 14], [1, 32]]
aspects:
powerOnOff:
commands:
set: (node, value) ->
if value
command = 'DON'
args = [255]
operative = node.getAspect('brightness').getDatum('state') != 255
else
command = 'DOF'
args = []
operative = node.getAspect('powerOnOff').getDatum('state') == true
node.adapter.executeCommand node.id, command, args, operative
brightness:
commands:
set: (node, value) ->
operative = node.getAspect('brightness').getDatum('state') != value
node.adapter.executeCommand node.id, 'DON',
[node.percentageToByte(value)], operative
processData: (data) ->
if data.ST?
brightness = parseInt(data.ST)
@getAspect('powerOnOff').setData state: (brightness != 0)
@getAspect('brightness').setData state: @byteToPercentage(brightness)
|
[
{
"context": "hisCollectible\n achieved.push\n name: \"Impossible I\"\n desc: \"Find a certain collectible in Nor",
"end": 576,
"score": 0.9715597629547119,
"start": 564,
"tag": "NAME",
"value": "Impossible I"
}
] | src/character/achievements/Impossible.coffee | jawsome/IdleLands | 3 |
Achievement = require "../base/Achievement"
_ = require "lodash"
`/**
* This achievement, in a perfect world, will never be earned. But, feel free to try!
*
* @name Impossible
* @prerequisite Varies
* @reward Bragging rights
* @category Achievements
* @package Player
*/`
class Impossible extends Achievement
getAllAchievedFor: (player) ->
achieved = []
howDidYouEvenGetThisCollectible = _.findWhere player.collectibles, {name: "How Did You Even Get Out Here"}
if howDidYouEvenGetThisCollectible
achieved.push
name: "Impossible I"
desc: "Find a certain collectible in Norkos -9"
reward: "Bragging rights"
title: "Leet H4x0r"
type: "special"
achieved
module.exports = exports = Impossible | 94904 |
Achievement = require "../base/Achievement"
_ = require "lodash"
`/**
* This achievement, in a perfect world, will never be earned. But, feel free to try!
*
* @name Impossible
* @prerequisite Varies
* @reward Bragging rights
* @category Achievements
* @package Player
*/`
class Impossible extends Achievement
getAllAchievedFor: (player) ->
achieved = []
howDidYouEvenGetThisCollectible = _.findWhere player.collectibles, {name: "How Did You Even Get Out Here"}
if howDidYouEvenGetThisCollectible
achieved.push
name: "<NAME>"
desc: "Find a certain collectible in Norkos -9"
reward: "Bragging rights"
title: "Leet H4x0r"
type: "special"
achieved
module.exports = exports = Impossible | true |
Achievement = require "../base/Achievement"
_ = require "lodash"
`/**
* This achievement, in a perfect world, will never be earned. But, feel free to try!
*
* @name Impossible
* @prerequisite Varies
* @reward Bragging rights
* @category Achievements
* @package Player
*/`
class Impossible extends Achievement
getAllAchievedFor: (player) ->
achieved = []
howDidYouEvenGetThisCollectible = _.findWhere player.collectibles, {name: "How Did You Even Get Out Here"}
if howDidYouEvenGetThisCollectible
achieved.push
name: "PI:NAME:<NAME>END_PI"
desc: "Find a certain collectible in Norkos -9"
reward: "Bragging rights"
title: "Leet H4x0r"
type: "special"
achieved
module.exports = exports = Impossible |
[
{
"context": "# Copyright (c) ppy Pty Ltd <contact@ppy.sh>. Licensed under the GNU Affero General Public Li",
"end": 43,
"score": 0.9999092817306519,
"start": 29,
"tag": "EMAIL",
"value": "contact@ppy.sh"
}
] | resources/assets/coffee/_classes/account-edit-avatar.coffee | osu-katakuna/osu-katakuna-web | 5 | # Copyright (c) ppy Pty Ltd <contact@ppy.sh>. Licensed under the GNU Affero General Public License v3.0.
# See the LICENCE file in the repository root for full licence text.
class @AccountEditAvatar
constructor: ->
$(document).on 'turbolinks:load', @initialize
$(document).on 'turbolinks:before-cache', @rollback
$.subscribe 'dragenterGlobal', @overlayStart
$.subscribe 'dragendGlobal', @overlayEnd
$(document).on 'dragenter', '.js-account-edit-avatar', @overlayEnter
$(document).on 'dragover', '.js-account-edit-avatar', @overlayHover
@main = document.getElementsByClassName('js-account-edit-avatar')
$button: ->
$('.js-account-edit-avatar__button')
initialize: =>
return if !@main[0]?
@isAvailable = true
@$main = $(@main)
@$button().fileupload
url: laroute.route('account.avatar')
dataType: 'json'
dropZone: @$main
submit: =>
@main[0].classList.add 'js-account-edit-avatar--saving'
$.publish 'dragendGlobal'
done: (_e, data) =>
$.publish 'user:update', data.result
fail: osu.fileuploadFailCallback(@$button)
complete: =>
@main[0].classList.remove 'js-account-edit-avatar--saving'
overlayEnd: =>
return if !@isAvailable
@main[0].classList.remove 'js-account-edit-avatar--start'
overlayEnter: =>
@dragging ?= true
overlayHover: =>
return if !@dragging
@main[0].classList.add 'js-account-edit-avatar--hover'
# see GlobalDrag
Timeout.clear @overlayLeaveTimeout
@overlayLeaveTimeout = Timeout.set 100, @overlayLeave
overlayLeave: =>
@dragging = null
@main[0].classList.remove 'js-account-edit-avatar--hover'
overlayStart: =>
return if !@isAvailable
@main[0].classList.add 'js-account-edit-avatar--start'
rollback: =>
return if !@isAvailable
@isAvailable = false
@$button().fileupload 'destroy'
| 225770 | # Copyright (c) ppy Pty Ltd <<EMAIL>>. Licensed under the GNU Affero General Public License v3.0.
# See the LICENCE file in the repository root for full licence text.
class @AccountEditAvatar
constructor: ->
$(document).on 'turbolinks:load', @initialize
$(document).on 'turbolinks:before-cache', @rollback
$.subscribe 'dragenterGlobal', @overlayStart
$.subscribe 'dragendGlobal', @overlayEnd
$(document).on 'dragenter', '.js-account-edit-avatar', @overlayEnter
$(document).on 'dragover', '.js-account-edit-avatar', @overlayHover
@main = document.getElementsByClassName('js-account-edit-avatar')
$button: ->
$('.js-account-edit-avatar__button')
initialize: =>
return if !@main[0]?
@isAvailable = true
@$main = $(@main)
@$button().fileupload
url: laroute.route('account.avatar')
dataType: 'json'
dropZone: @$main
submit: =>
@main[0].classList.add 'js-account-edit-avatar--saving'
$.publish 'dragendGlobal'
done: (_e, data) =>
$.publish 'user:update', data.result
fail: osu.fileuploadFailCallback(@$button)
complete: =>
@main[0].classList.remove 'js-account-edit-avatar--saving'
overlayEnd: =>
return if !@isAvailable
@main[0].classList.remove 'js-account-edit-avatar--start'
overlayEnter: =>
@dragging ?= true
overlayHover: =>
return if !@dragging
@main[0].classList.add 'js-account-edit-avatar--hover'
# see GlobalDrag
Timeout.clear @overlayLeaveTimeout
@overlayLeaveTimeout = Timeout.set 100, @overlayLeave
overlayLeave: =>
@dragging = null
@main[0].classList.remove 'js-account-edit-avatar--hover'
overlayStart: =>
return if !@isAvailable
@main[0].classList.add 'js-account-edit-avatar--start'
rollback: =>
return if !@isAvailable
@isAvailable = false
@$button().fileupload 'destroy'
| true | # Copyright (c) ppy Pty Ltd <PI:EMAIL:<EMAIL>END_PI>. Licensed under the GNU Affero General Public License v3.0.
# See the LICENCE file in the repository root for full licence text.
class @AccountEditAvatar
constructor: ->
$(document).on 'turbolinks:load', @initialize
$(document).on 'turbolinks:before-cache', @rollback
$.subscribe 'dragenterGlobal', @overlayStart
$.subscribe 'dragendGlobal', @overlayEnd
$(document).on 'dragenter', '.js-account-edit-avatar', @overlayEnter
$(document).on 'dragover', '.js-account-edit-avatar', @overlayHover
@main = document.getElementsByClassName('js-account-edit-avatar')
$button: ->
$('.js-account-edit-avatar__button')
initialize: =>
return if !@main[0]?
@isAvailable = true
@$main = $(@main)
@$button().fileupload
url: laroute.route('account.avatar')
dataType: 'json'
dropZone: @$main
submit: =>
@main[0].classList.add 'js-account-edit-avatar--saving'
$.publish 'dragendGlobal'
done: (_e, data) =>
$.publish 'user:update', data.result
fail: osu.fileuploadFailCallback(@$button)
complete: =>
@main[0].classList.remove 'js-account-edit-avatar--saving'
overlayEnd: =>
return if !@isAvailable
@main[0].classList.remove 'js-account-edit-avatar--start'
overlayEnter: =>
@dragging ?= true
overlayHover: =>
return if !@dragging
@main[0].classList.add 'js-account-edit-avatar--hover'
# see GlobalDrag
Timeout.clear @overlayLeaveTimeout
@overlayLeaveTimeout = Timeout.set 100, @overlayLeave
overlayLeave: =>
@dragging = null
@main[0].classList.remove 'js-account-edit-avatar--hover'
overlayStart: =>
return if !@isAvailable
@main[0].classList.add 'js-account-edit-avatar--start'
rollback: =>
return if !@isAvailable
@isAvailable = false
@$button().fileupload 'destroy'
|
[
{
"context": "app.use session(\n secret: \"keyboard cat\"\n key: \"sid\"\n cookie:\n secure: true\n)\n\n# Set View Engine.",
"end": 1739,
"score": 0.6203336119651794,
"start": 1736,
"tag": "KEY",
"value": "sid"
}
] | src/index.coffee | timmyg/pedalwagon-api | 0 | express = require 'express'
stylus = require 'stylus'
assets = require 'connect-assets'
mongoose = require 'mongoose'
session = require('express-session')
cookieParser = require('cookie-parser')
bodyParser = require("body-parser")
#### Basic application initialization
# Create app instance.
app = express()
# Define Port & Environment
app.port = process.env.PORT or process.env.VMC_APP_PORT or 3000
env = process.env.NODE_ENV or "development"
# Config module exports has `setEnvironment` function that sets app settings depending on environment.
config = require "./config"
config.setEnvironment env
# db_config = "mongodb://#{config.DB_USER}:#{config.DB_PASS}@#{config.DB_HOST}:#{config.DB_PORT}/#{config.DB_NAME}"
# mongoose.connect db_config
if env != 'production'
mongoose.connect 'mongodb://localhost/example'
else
console.log('If you are running in production, you may want to modify the mongoose connect path')
#### View initialization
# Add Connect Assets.
app.use assets()
# Set the public folder as static assets.
app.use express.static(process.cwd() + '/public')
# app.use express.static(path.join(process.env.PWD, 'public'))
# process.env.PWD = process.cwd()
# app.use(express.static(process.env.PWD + '/public'));
# app.use( '/public/forms', express.static(__dirname + '/public/forms') );
app.use (req, res, next) ->
res.header 'Access-Control-Allow-Origin', '*'
res.header 'Access-Control-Allow-Headers', 'Origin, X-Requested-With, X-File-Name, Content-Type, Accept, Cache-Control, lifecycle-api-key'
res.header 'Access-Control-Allow-Methods', 'PUT, POST, GET, OPTIONS'
next()
# Express Session
console.log "setting session/cookie"
app.use cookieParser()
app.use session(
secret: "keyboard cat"
key: "sid"
cookie:
secure: true
)
# Set View Engine.
app.set 'view engine', 'jade'
# [Body parser middleware](http://www.senchalabs.org/connect/middleware-bodyParser.html) parses JSON or XML bodies into `req.body` object
app.use bodyParser()
#### Finalization
# Initialize routes
routes = require './routes'
routes(app)
if !process.env.STRIPE_API_KEY or !process.env.MANDRILL_API_KEY or !process.env.LOB_API_KEY or !process.env.XOLA_API_KEY
console.log "/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\"
console.log "/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\"
console.log "/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\"
console.log "All environment variables are not set!!"
console.log "/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\"
console.log "/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\"
console.log "/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\"
else
console.log "all environment variables are good to go!"
# Export application object
module.exports = app
| 79988 | express = require 'express'
stylus = require 'stylus'
assets = require 'connect-assets'
mongoose = require 'mongoose'
session = require('express-session')
cookieParser = require('cookie-parser')
bodyParser = require("body-parser")
#### Basic application initialization
# Create app instance.
app = express()
# Define Port & Environment
app.port = process.env.PORT or process.env.VMC_APP_PORT or 3000
env = process.env.NODE_ENV or "development"
# Config module exports has `setEnvironment` function that sets app settings depending on environment.
config = require "./config"
config.setEnvironment env
# db_config = "mongodb://#{config.DB_USER}:#{config.DB_PASS}@#{config.DB_HOST}:#{config.DB_PORT}/#{config.DB_NAME}"
# mongoose.connect db_config
if env != 'production'
mongoose.connect 'mongodb://localhost/example'
else
console.log('If you are running in production, you may want to modify the mongoose connect path')
#### View initialization
# Add Connect Assets.
app.use assets()
# Set the public folder as static assets.
app.use express.static(process.cwd() + '/public')
# app.use express.static(path.join(process.env.PWD, 'public'))
# process.env.PWD = process.cwd()
# app.use(express.static(process.env.PWD + '/public'));
# app.use( '/public/forms', express.static(__dirname + '/public/forms') );
app.use (req, res, next) ->
res.header 'Access-Control-Allow-Origin', '*'
res.header 'Access-Control-Allow-Headers', 'Origin, X-Requested-With, X-File-Name, Content-Type, Accept, Cache-Control, lifecycle-api-key'
res.header 'Access-Control-Allow-Methods', 'PUT, POST, GET, OPTIONS'
next()
# Express Session
console.log "setting session/cookie"
app.use cookieParser()
app.use session(
secret: "keyboard cat"
key: "<KEY>"
cookie:
secure: true
)
# Set View Engine.
app.set 'view engine', 'jade'
# [Body parser middleware](http://www.senchalabs.org/connect/middleware-bodyParser.html) parses JSON or XML bodies into `req.body` object
app.use bodyParser()
#### Finalization
# Initialize routes
routes = require './routes'
routes(app)
if !process.env.STRIPE_API_KEY or !process.env.MANDRILL_API_KEY or !process.env.LOB_API_KEY or !process.env.XOLA_API_KEY
console.log "/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\"
console.log "/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\"
console.log "/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\"
console.log "All environment variables are not set!!"
console.log "/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\"
console.log "/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\"
console.log "/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\"
else
console.log "all environment variables are good to go!"
# Export application object
module.exports = app
| true | express = require 'express'
stylus = require 'stylus'
assets = require 'connect-assets'
mongoose = require 'mongoose'
session = require('express-session')
cookieParser = require('cookie-parser')
bodyParser = require("body-parser")
#### Basic application initialization
# Create app instance.
app = express()
# Define Port & Environment
app.port = process.env.PORT or process.env.VMC_APP_PORT or 3000
env = process.env.NODE_ENV or "development"
# Config module exports has `setEnvironment` function that sets app settings depending on environment.
config = require "./config"
config.setEnvironment env
# db_config = "mongodb://#{config.DB_USER}:#{config.DB_PASS}@#{config.DB_HOST}:#{config.DB_PORT}/#{config.DB_NAME}"
# mongoose.connect db_config
if env != 'production'
mongoose.connect 'mongodb://localhost/example'
else
console.log('If you are running in production, you may want to modify the mongoose connect path')
#### View initialization
# Add Connect Assets.
app.use assets()
# Set the public folder as static assets.
app.use express.static(process.cwd() + '/public')
# app.use express.static(path.join(process.env.PWD, 'public'))
# process.env.PWD = process.cwd()
# app.use(express.static(process.env.PWD + '/public'));
# app.use( '/public/forms', express.static(__dirname + '/public/forms') );
app.use (req, res, next) ->
res.header 'Access-Control-Allow-Origin', '*'
res.header 'Access-Control-Allow-Headers', 'Origin, X-Requested-With, X-File-Name, Content-Type, Accept, Cache-Control, lifecycle-api-key'
res.header 'Access-Control-Allow-Methods', 'PUT, POST, GET, OPTIONS'
next()
# Express Session
console.log "setting session/cookie"
app.use cookieParser()
app.use session(
secret: "keyboard cat"
key: "PI:KEY:<KEY>END_PI"
cookie:
secure: true
)
# Set View Engine.
app.set 'view engine', 'jade'
# [Body parser middleware](http://www.senchalabs.org/connect/middleware-bodyParser.html) parses JSON or XML bodies into `req.body` object
app.use bodyParser()
#### Finalization
# Initialize routes
routes = require './routes'
routes(app)
if !process.env.STRIPE_API_KEY or !process.env.MANDRILL_API_KEY or !process.env.LOB_API_KEY or !process.env.XOLA_API_KEY
console.log "/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\"
console.log "/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\"
console.log "/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\"
console.log "All environment variables are not set!!"
console.log "/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\"
console.log "/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\"
console.log "/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\/\\"
else
console.log "all environment variables are good to go!"
# Export application object
module.exports = app
|
[
{
"context": "s\",\"Prov\",\"Eccl\",\"Song\",\"Isa\",\"Jer\",\"Lam\",\"Ezek\",\"Dan\",\"Hos\",\"Joel\",\"Amos\",\"Obad\",\"Jonah\",\"Mic\",\"Nah\",\"",
"end": 505,
"score": 0.7877184748649597,
"start": 502,
"tag": "NAME",
"value": "Dan"
},
{
"context": "ov\",\"Eccl\",\"Song\",\"Isa\"... | lib/bible-tools/lib/Bible-Passage-Reference-Parser/src/af/spec.coffee | saiba-mais/bible-lessons | 0 | bcv_parser = require("../../js/af_bcv_parser.js").bcv_parser
describe "Parsing", ->
p = {}
beforeEach ->
p = new bcv_parser
p.options.osis_compaction_strategy = "b"
p.options.sequence_combination_strategy = "combine"
it "should round-trip OSIS references", ->
p.set_options osis_compaction_strategy: "bc"
books = ["Gen","Exod","Lev","Num","Deut","Josh","Judg","Ruth","1Sam","2Sam","1Kgs","2Kgs","1Chr","2Chr","Ezra","Neh","Esth","Job","Ps","Prov","Eccl","Song","Isa","Jer","Lam","Ezek","Dan","Hos","Joel","Amos","Obad","Jonah","Mic","Nah","Hab","Zeph","Hag","Zech","Mal","Matt","Mark","Luke","John","Acts","Rom","1Cor","2Cor","Gal","Eph","Phil","Col","1Thess","2Thess","1Tim","2Tim","Titus","Phlm","Heb","Jas","1Pet","2Pet","1John","2John","3John","Jude","Rev"]
for book in books
bc = book + ".1"
bcv = bc + ".1"
bcv_range = bcv + "-" + bc + ".2"
expect(p.parse(bc).osis()).toEqual bc
expect(p.parse(bcv).osis()).toEqual bcv
expect(p.parse(bcv_range).osis()).toEqual bcv_range
it "should round-trip OSIS Apocrypha references", ->
p.set_options osis_compaction_strategy: "bc", ps151_strategy: "b"
p.include_apocrypha true
books = ["Tob","Jdt","GkEsth","Wis","Sir","Bar","PrAzar","Sus","Bel","SgThree","EpJer","1Macc","2Macc","3Macc","4Macc","1Esd","2Esd","PrMan","Ps151"]
for book in books
bc = book + ".1"
bcv = bc + ".1"
bcv_range = bcv + "-" + bc + ".2"
expect(p.parse(bc).osis()).toEqual bc
expect(p.parse(bcv).osis()).toEqual bcv
expect(p.parse(bcv_range).osis()).toEqual bcv_range
p.set_options ps151_strategy: "bc"
expect(p.parse("Ps151.1").osis()).toEqual "Ps.151"
expect(p.parse("Ps151.1.1").osis()).toEqual "Ps.151.1"
expect(p.parse("Ps151.1-Ps151.2").osis()).toEqual "Ps.151.1-Ps.151.2"
p.include_apocrypha false
for book in books
bc = book + ".1"
expect(p.parse(bc).osis()).toEqual ""
it "should handle a preceding character", ->
expect(p.parse(" Gen 1").osis()).toEqual "Gen.1"
expect(p.parse("Matt5John3").osis()).toEqual "Matt.5,John.3"
expect(p.parse("1Ps 1").osis()).toEqual ""
expect(p.parse("11Sam 1").osis()).toEqual ""
describe "Localized book Gen (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Gen (af)", ->
`
expect(p.parse("Genesis 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("Gen 1:1").osis()).toEqual("Gen.1.1")
p.include_apocrypha(false)
expect(p.parse("GENESIS 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("GEN 1:1").osis()).toEqual("Gen.1.1")
`
true
describe "Localized book Exod (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Exod (af)", ->
`
expect(p.parse("Eksodus 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("Exodus 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("Exod 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("Eks 1:1").osis()).toEqual("Exod.1.1")
p.include_apocrypha(false)
expect(p.parse("EKSODUS 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("EXODUS 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("EXOD 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("EKS 1:1").osis()).toEqual("Exod.1.1")
`
true
describe "Localized book Bel (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Bel (af)", ->
`
expect(p.parse("Bel 1:1").osis()).toEqual("Bel.1.1")
`
true
describe "Localized book Lev (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Lev (af)", ->
`
expect(p.parse("Levitikus 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("Levítikus 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("Lev 1:1").osis()).toEqual("Lev.1.1")
p.include_apocrypha(false)
expect(p.parse("LEVITIKUS 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("LEVÍTIKUS 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("LEV 1:1").osis()).toEqual("Lev.1.1")
`
true
describe "Localized book Num (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Num (af)", ->
`
expect(p.parse("Numeri 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("Númeri 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("Num 1:1").osis()).toEqual("Num.1.1")
p.include_apocrypha(false)
expect(p.parse("NUMERI 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("NÚMERI 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("NUM 1:1").osis()).toEqual("Num.1.1")
`
true
describe "Localized book Sir (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Sir (af)", ->
`
expect(p.parse("Sir 1:1").osis()).toEqual("Sir.1.1")
`
true
describe "Localized book Wis (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Wis (af)", ->
`
expect(p.parse("Wis 1:1").osis()).toEqual("Wis.1.1")
`
true
describe "Localized book Lam (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Lam (af)", ->
`
expect(p.parse("Klaagliedere 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("Klaagl 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("Lam 1:1").osis()).toEqual("Lam.1.1")
p.include_apocrypha(false)
expect(p.parse("KLAAGLIEDERE 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("KLAAGL 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("LAM 1:1").osis()).toEqual("Lam.1.1")
`
true
describe "Localized book EpJer (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: EpJer (af)", ->
`
expect(p.parse("EpJer 1:1").osis()).toEqual("EpJer.1.1")
`
true
describe "Localized book Rev (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Rev (af)", ->
`
expect(p.parse("Openbaring 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("Rev 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("Op 1:1").osis()).toEqual("Rev.1.1")
p.include_apocrypha(false)
expect(p.parse("OPENBARING 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("REV 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("OP 1:1").osis()).toEqual("Rev.1.1")
`
true
describe "Localized book PrMan (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: PrMan (af)", ->
`
expect(p.parse("PrMan 1:1").osis()).toEqual("PrMan.1.1")
`
true
describe "Localized book Deut (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Deut (af)", ->
`
expect(p.parse("Deuteronomium 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("Deuteronómium 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("Deut 1:1").osis()).toEqual("Deut.1.1")
p.include_apocrypha(false)
expect(p.parse("DEUTERONOMIUM 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("DEUTERONÓMIUM 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("DEUT 1:1").osis()).toEqual("Deut.1.1")
`
true
describe "Localized book Josh (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Josh (af)", ->
`
expect(p.parse("Josua 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("Josh 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("Jos 1:1").osis()).toEqual("Josh.1.1")
p.include_apocrypha(false)
expect(p.parse("JOSUA 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("JOSH 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("JOS 1:1").osis()).toEqual("Josh.1.1")
`
true
describe "Localized book Judg (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Judg (af)", ->
`
expect(p.parse("Rigters 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("Judg 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("Rig 1:1").osis()).toEqual("Judg.1.1")
p.include_apocrypha(false)
expect(p.parse("RIGTERS 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("JUDG 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("RIG 1:1").osis()).toEqual("Judg.1.1")
`
true
describe "Localized book Ruth (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Ruth (af)", ->
`
expect(p.parse("Ruth 1:1").osis()).toEqual("Ruth.1.1")
expect(p.parse("Rut 1:1").osis()).toEqual("Ruth.1.1")
p.include_apocrypha(false)
expect(p.parse("RUTH 1:1").osis()).toEqual("Ruth.1.1")
expect(p.parse("RUT 1:1").osis()).toEqual("Ruth.1.1")
`
true
describe "Localized book 1Esd (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Esd (af)", ->
`
expect(p.parse("1Esd 1:1").osis()).toEqual("1Esd.1.1")
`
true
describe "Localized book 2Esd (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Esd (af)", ->
`
expect(p.parse("2Esd 1:1").osis()).toEqual("2Esd.1.1")
`
true
describe "Localized book Isa (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Isa (af)", ->
`
expect(p.parse("Jesaja 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("Isa 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("Jes 1:1").osis()).toEqual("Isa.1.1")
p.include_apocrypha(false)
expect(p.parse("JESAJA 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("ISA 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("JES 1:1").osis()).toEqual("Isa.1.1")
`
true
describe "Localized book 2Sam (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Sam (af)", ->
`
expect(p.parse("2 Samuel 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 Sam 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2Sam 1:1").osis()).toEqual("2Sam.1.1")
p.include_apocrypha(false)
expect(p.parse("2 SAMUEL 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 SAM 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2SAM 1:1").osis()).toEqual("2Sam.1.1")
`
true
describe "Localized book 1Sam (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Sam (af)", ->
`
expect(p.parse("1 Samuel 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 Sam 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1Sam 1:1").osis()).toEqual("1Sam.1.1")
p.include_apocrypha(false)
expect(p.parse("1 SAMUEL 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 SAM 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1SAM 1:1").osis()).toEqual("1Sam.1.1")
`
true
describe "Localized book 2Kgs (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Kgs (af)", ->
`
expect(p.parse("2 Konings 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 Kon 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2Kgs 1:1").osis()).toEqual("2Kgs.1.1")
p.include_apocrypha(false)
expect(p.parse("2 KONINGS 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 KON 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2KGS 1:1").osis()).toEqual("2Kgs.1.1")
`
true
describe "Localized book 1Kgs (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Kgs (af)", ->
`
expect(p.parse("1 Konings 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 Kon 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1Kgs 1:1").osis()).toEqual("1Kgs.1.1")
p.include_apocrypha(false)
expect(p.parse("1 KONINGS 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 KON 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1KGS 1:1").osis()).toEqual("1Kgs.1.1")
`
true
describe "Localized book 2Chr (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Chr (af)", ->
`
expect(p.parse("2 Kronieke 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 Kron 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2Chr 1:1").osis()).toEqual("2Chr.1.1")
p.include_apocrypha(false)
expect(p.parse("2 KRONIEKE 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 KRON 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2CHR 1:1").osis()).toEqual("2Chr.1.1")
`
true
describe "Localized book 1Chr (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Chr (af)", ->
`
expect(p.parse("1 Kronieke 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 Kron 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1Chr 1:1").osis()).toEqual("1Chr.1.1")
p.include_apocrypha(false)
expect(p.parse("1 KRONIEKE 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 KRON 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1CHR 1:1").osis()).toEqual("1Chr.1.1")
`
true
describe "Localized book Ezra (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Ezra (af)", ->
`
expect(p.parse("Esra 1:1").osis()).toEqual("Ezra.1.1")
expect(p.parse("Ezra 1:1").osis()).toEqual("Ezra.1.1")
p.include_apocrypha(false)
expect(p.parse("ESRA 1:1").osis()).toEqual("Ezra.1.1")
expect(p.parse("EZRA 1:1").osis()).toEqual("Ezra.1.1")
`
true
describe "Localized book Neh (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Neh (af)", ->
`
expect(p.parse("Nehemia 1:1").osis()).toEqual("Neh.1.1")
expect(p.parse("Neh 1:1").osis()).toEqual("Neh.1.1")
p.include_apocrypha(false)
expect(p.parse("NEHEMIA 1:1").osis()).toEqual("Neh.1.1")
expect(p.parse("NEH 1:1").osis()).toEqual("Neh.1.1")
`
true
describe "Localized book GkEsth (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: GkEsth (af)", ->
`
expect(p.parse("GkEsth 1:1").osis()).toEqual("GkEsth.1.1")
`
true
describe "Localized book Esth (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Esth (af)", ->
`
expect(p.parse("Ester 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("Esth 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("Est 1:1").osis()).toEqual("Esth.1.1")
p.include_apocrypha(false)
expect(p.parse("ESTER 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("ESTH 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("EST 1:1").osis()).toEqual("Esth.1.1")
`
true
describe "Localized book Job (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Job (af)", ->
`
expect(p.parse("Job 1:1").osis()).toEqual("Job.1.1")
p.include_apocrypha(false)
expect(p.parse("JOB 1:1").osis()).toEqual("Job.1.1")
`
true
describe "Localized book Ps (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Ps (af)", ->
`
expect(p.parse("Psalms 1:1").osis()).toEqual("Ps.1.1")
expect(p.parse("Psalm 1:1").osis()).toEqual("Ps.1.1")
expect(p.parse("Ps 1:1").osis()).toEqual("Ps.1.1")
p.include_apocrypha(false)
expect(p.parse("PSALMS 1:1").osis()).toEqual("Ps.1.1")
expect(p.parse("PSALM 1:1").osis()).toEqual("Ps.1.1")
expect(p.parse("PS 1:1").osis()).toEqual("Ps.1.1")
`
true
describe "Localized book PrAzar (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: PrAzar (af)", ->
`
expect(p.parse("PrAzar 1:1").osis()).toEqual("PrAzar.1.1")
`
true
describe "Localized book Prov (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Prov (af)", ->
`
expect(p.parse("Spreuke 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("Prov 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("Spr 1:1").osis()).toEqual("Prov.1.1")
p.include_apocrypha(false)
expect(p.parse("SPREUKE 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("PROV 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("SPR 1:1").osis()).toEqual("Prov.1.1")
`
true
describe "Localized book Eccl (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Eccl (af)", ->
`
expect(p.parse("Prediker 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("Eccl 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("Pred 1:1").osis()).toEqual("Eccl.1.1")
p.include_apocrypha(false)
expect(p.parse("PREDIKER 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("ECCL 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("PRED 1:1").osis()).toEqual("Eccl.1.1")
`
true
describe "Localized book SgThree (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: SgThree (af)", ->
`
expect(p.parse("SgThree 1:1").osis()).toEqual("SgThree.1.1")
`
true
describe "Localized book Song (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Song (af)", ->
`
expect(p.parse("Hooglied 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("Hoogl 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("Song 1:1").osis()).toEqual("Song.1.1")
p.include_apocrypha(false)
expect(p.parse("HOOGLIED 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("HOOGL 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("SONG 1:1").osis()).toEqual("Song.1.1")
`
true
describe "Localized book Jer (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Jer (af)", ->
`
expect(p.parse("Jeremia 1:1").osis()).toEqual("Jer.1.1")
expect(p.parse("Jer 1:1").osis()).toEqual("Jer.1.1")
p.include_apocrypha(false)
expect(p.parse("JEREMIA 1:1").osis()).toEqual("Jer.1.1")
expect(p.parse("JER 1:1").osis()).toEqual("Jer.1.1")
`
true
describe "Localized book Ezek (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Ezek (af)", ->
`
expect(p.parse("Esegiel 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("Esegiël 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("Eseg 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("Ezek 1:1").osis()).toEqual("Ezek.1.1")
p.include_apocrypha(false)
expect(p.parse("ESEGIEL 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("ESEGIËL 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("ESEG 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("EZEK 1:1").osis()).toEqual("Ezek.1.1")
`
true
describe "Localized book Dan (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Dan (af)", ->
`
expect(p.parse("Daniel 1:1").osis()).toEqual("Dan.1.1")
expect(p.parse("Daniël 1:1").osis()).toEqual("Dan.1.1")
expect(p.parse("Dan 1:1").osis()).toEqual("Dan.1.1")
p.include_apocrypha(false)
expect(p.parse("DANIEL 1:1").osis()).toEqual("Dan.1.1")
expect(p.parse("DANIËL 1:1").osis()).toEqual("Dan.1.1")
expect(p.parse("DAN 1:1").osis()).toEqual("Dan.1.1")
`
true
describe "Localized book Hos (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Hos (af)", ->
`
expect(p.parse("Hosea 1:1").osis()).toEqual("Hos.1.1")
expect(p.parse("Hos 1:1").osis()).toEqual("Hos.1.1")
p.include_apocrypha(false)
expect(p.parse("HOSEA 1:1").osis()).toEqual("Hos.1.1")
expect(p.parse("HOS 1:1").osis()).toEqual("Hos.1.1")
`
true
describe "Localized book Joel (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Joel (af)", ->
`
expect(p.parse("Joel 1:1").osis()).toEqual("Joel.1.1")
expect(p.parse("Joël 1:1").osis()).toEqual("Joel.1.1")
p.include_apocrypha(false)
expect(p.parse("JOEL 1:1").osis()).toEqual("Joel.1.1")
expect(p.parse("JOËL 1:1").osis()).toEqual("Joel.1.1")
`
true
describe "Localized book Amos (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Amos (af)", ->
`
expect(p.parse("Amos 1:1").osis()).toEqual("Amos.1.1")
expect(p.parse("Am 1:1").osis()).toEqual("Amos.1.1")
p.include_apocrypha(false)
expect(p.parse("AMOS 1:1").osis()).toEqual("Amos.1.1")
expect(p.parse("AM 1:1").osis()).toEqual("Amos.1.1")
`
true
describe "Localized book Obad (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Obad (af)", ->
`
expect(p.parse("Obadja 1:1").osis()).toEqual("Obad.1.1")
expect(p.parse("Obad 1:1").osis()).toEqual("Obad.1.1")
expect(p.parse("Ob 1:1").osis()).toEqual("Obad.1.1")
p.include_apocrypha(false)
expect(p.parse("OBADJA 1:1").osis()).toEqual("Obad.1.1")
expect(p.parse("OBAD 1:1").osis()).toEqual("Obad.1.1")
expect(p.parse("OB 1:1").osis()).toEqual("Obad.1.1")
`
true
describe "Localized book Jonah (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Jonah (af)", ->
`
expect(p.parse("Jonah 1:1").osis()).toEqual("Jonah.1.1")
expect(p.parse("Jona 1:1").osis()).toEqual("Jonah.1.1")
p.include_apocrypha(false)
expect(p.parse("JONAH 1:1").osis()).toEqual("Jonah.1.1")
expect(p.parse("JONA 1:1").osis()).toEqual("Jonah.1.1")
`
true
describe "Localized book Mic (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Mic (af)", ->
`
expect(p.parse("Miga 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("Mic 1:1").osis()).toEqual("Mic.1.1")
p.include_apocrypha(false)
expect(p.parse("MIGA 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("MIC 1:1").osis()).toEqual("Mic.1.1")
`
true
describe "Localized book Nah (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Nah (af)", ->
`
expect(p.parse("Nahum 1:1").osis()).toEqual("Nah.1.1")
expect(p.parse("Nah 1:1").osis()).toEqual("Nah.1.1")
p.include_apocrypha(false)
expect(p.parse("NAHUM 1:1").osis()).toEqual("Nah.1.1")
expect(p.parse("NAH 1:1").osis()).toEqual("Nah.1.1")
`
true
describe "Localized book Hab (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Hab (af)", ->
`
expect(p.parse("Habakuk 1:1").osis()).toEqual("Hab.1.1")
expect(p.parse("Hab 1:1").osis()).toEqual("Hab.1.1")
p.include_apocrypha(false)
expect(p.parse("HABAKUK 1:1").osis()).toEqual("Hab.1.1")
expect(p.parse("HAB 1:1").osis()).toEqual("Hab.1.1")
`
true
describe "Localized book Zeph (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Zeph (af)", ->
`
expect(p.parse("Sefanja 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("Zeph 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("Sef 1:1").osis()).toEqual("Zeph.1.1")
p.include_apocrypha(false)
expect(p.parse("SEFANJA 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("ZEPH 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("SEF 1:1").osis()).toEqual("Zeph.1.1")
`
true
describe "Localized book Hag (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Hag (af)", ->
`
expect(p.parse("Haggai 1:1").osis()).toEqual("Hag.1.1")
expect(p.parse("Hag 1:1").osis()).toEqual("Hag.1.1")
p.include_apocrypha(false)
expect(p.parse("HAGGAI 1:1").osis()).toEqual("Hag.1.1")
expect(p.parse("HAG 1:1").osis()).toEqual("Hag.1.1")
`
true
describe "Localized book Zech (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Zech (af)", ->
`
expect(p.parse("Sagaria 1:1").osis()).toEqual("Zech.1.1")
expect(p.parse("Zech 1:1").osis()).toEqual("Zech.1.1")
expect(p.parse("Sag 1:1").osis()).toEqual("Zech.1.1")
p.include_apocrypha(false)
expect(p.parse("SAGARIA 1:1").osis()).toEqual("Zech.1.1")
expect(p.parse("ZECH 1:1").osis()).toEqual("Zech.1.1")
expect(p.parse("SAG 1:1").osis()).toEqual("Zech.1.1")
`
true
describe "Localized book Mal (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Mal (af)", ->
`
expect(p.parse("Maleagi 1:1").osis()).toEqual("Mal.1.1")
expect(p.parse("Mal 1:1").osis()).toEqual("Mal.1.1")
p.include_apocrypha(false)
expect(p.parse("MALEAGI 1:1").osis()).toEqual("Mal.1.1")
expect(p.parse("MAL 1:1").osis()).toEqual("Mal.1.1")
`
true
describe "Localized book Matt (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Matt (af)", ->
`
expect(p.parse("Mattheus 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("Mattheüs 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("Matthéus 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("Matthéüs 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("Matteus 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("Matt 1:1").osis()).toEqual("Matt.1.1")
p.include_apocrypha(false)
expect(p.parse("MATTHEUS 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("MATTHEÜS 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("MATTHÉUS 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("MATTHÉÜS 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("MATTEUS 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("MATT 1:1").osis()).toEqual("Matt.1.1")
`
true
describe "Localized book Mark (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Mark (af)", ->
`
expect(p.parse("Markus 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("Mark 1:1").osis()).toEqual("Mark.1.1")
p.include_apocrypha(false)
expect(p.parse("MARKUS 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("MARK 1:1").osis()).toEqual("Mark.1.1")
`
true
describe "Localized book Luke (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Luke (af)", ->
`
expect(p.parse("Lukas 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("Luke 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("Luk 1:1").osis()).toEqual("Luke.1.1")
p.include_apocrypha(false)
expect(p.parse("LUKAS 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("LUKE 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("LUK 1:1").osis()).toEqual("Luke.1.1")
`
true
describe "Localized book 1John (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1John (af)", ->
`
expect(p.parse("1. Johannes 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("I. Johannes 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 Johannes 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("I Johannes 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1. Joh 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("I. Joh 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 Joh 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1John 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("I Joh 1:1").osis()).toEqual("1John.1.1")
p.include_apocrypha(false)
expect(p.parse("1. JOHANNES 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("I. JOHANNES 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 JOHANNES 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("I JOHANNES 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1. JOH 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("I. JOH 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 JOH 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1JOHN 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("I JOH 1:1").osis()).toEqual("1John.1.1")
`
true
describe "Localized book 2John (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2John (af)", ->
`
expect(p.parse("II. Johannes 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2. Johannes 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("II Johannes 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 Johannes 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("II. Joh 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2. Joh 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("II Joh 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 Joh 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2John 1:1").osis()).toEqual("2John.1.1")
p.include_apocrypha(false)
expect(p.parse("II. JOHANNES 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2. JOHANNES 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("II JOHANNES 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 JOHANNES 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("II. JOH 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2. JOH 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("II JOH 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 JOH 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2JOHN 1:1").osis()).toEqual("2John.1.1")
`
true
describe "Localized book 3John (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 3John (af)", ->
`
expect(p.parse("III. Johannes 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("III Johannes 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3. Johannes 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 Johannes 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("III. Joh 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("III Joh 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3. Joh 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 Joh 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3John 1:1").osis()).toEqual("3John.1.1")
p.include_apocrypha(false)
expect(p.parse("III. JOHANNES 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("III JOHANNES 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3. JOHANNES 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 JOHANNES 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("III. JOH 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("III JOH 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3. JOH 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 JOH 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3JOHN 1:1").osis()).toEqual("3John.1.1")
`
true
describe "Localized book John (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: John (af)", ->
`
expect(p.parse("Johannes 1:1").osis()).toEqual("John.1.1")
expect(p.parse("John 1:1").osis()).toEqual("John.1.1")
expect(p.parse("Joh 1:1").osis()).toEqual("John.1.1")
p.include_apocrypha(false)
expect(p.parse("JOHANNES 1:1").osis()).toEqual("John.1.1")
expect(p.parse("JOHN 1:1").osis()).toEqual("John.1.1")
expect(p.parse("JOH 1:1").osis()).toEqual("John.1.1")
`
true
describe "Localized book Acts (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Acts (af)", ->
`
expect(p.parse("Handelinge 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("Acts 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("Hand 1:1").osis()).toEqual("Acts.1.1")
p.include_apocrypha(false)
expect(p.parse("HANDELINGE 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("ACTS 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("HAND 1:1").osis()).toEqual("Acts.1.1")
`
true
describe "Localized book Rom (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Rom (af)", ->
`
expect(p.parse("Romeine 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("Rom 1:1").osis()).toEqual("Rom.1.1")
p.include_apocrypha(false)
expect(p.parse("ROMEINE 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROM 1:1").osis()).toEqual("Rom.1.1")
`
true
describe "Localized book 2Cor (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Cor (af)", ->
`
expect(p.parse("II. Korinthiers 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. Korinthiërs 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. Korinthiers 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. Korinthiërs 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II Korinthiers 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II Korinthiërs 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. Korintiers 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. Korintiërs 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 Korinthiers 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 Korinthiërs 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. Korintiers 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. Korintiërs 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II Korintiers 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II Korintiërs 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 Korintiers 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 Korintiërs 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. Kor 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. Kor 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II Kor 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 Kor 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2Cor 1:1").osis()).toEqual("2Cor.1.1")
p.include_apocrypha(false)
expect(p.parse("II. KORINTHIERS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. KORINTHIËRS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. KORINTHIERS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. KORINTHIËRS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II KORINTHIERS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II KORINTHIËRS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. KORINTIERS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. KORINTIËRS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KORINTHIERS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KORINTHIËRS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. KORINTIERS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. KORINTIËRS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II KORINTIERS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II KORINTIËRS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KORINTIERS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KORINTIËRS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. KOR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. KOR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II KOR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KOR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2COR 1:1").osis()).toEqual("2Cor.1.1")
`
true
describe "Localized book 1Cor (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Cor (af)", ->
`
expect(p.parse("1. Korinthiers 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. Korinthiërs 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. Korinthiers 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. Korinthiërs 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 Korinthiers 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 Korinthiërs 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. Korintiers 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. Korintiërs 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I Korinthiers 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I Korinthiërs 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. Korintiers 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. Korintiërs 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 Korintiers 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 Korintiërs 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I Korintiers 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I Korintiërs 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. Kor 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. Kor 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 Kor 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I Kor 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1Cor 1:1").osis()).toEqual("1Cor.1.1")
p.include_apocrypha(false)
expect(p.parse("1. KORINTHIERS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. KORINTHIËRS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. KORINTHIERS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. KORINTHIËRS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KORINTHIERS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KORINTHIËRS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. KORINTIERS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. KORINTIËRS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I KORINTHIERS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I KORINTHIËRS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. KORINTIERS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. KORINTIËRS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KORINTIERS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KORINTIËRS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I KORINTIERS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I KORINTIËRS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. KOR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. KOR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KOR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I KOR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1COR 1:1").osis()).toEqual("1Cor.1.1")
`
true
describe "Localized book Gal (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Gal (af)", ->
`
expect(p.parse("Galasiers 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("Galasiërs 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("Galásiers 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("Galásiërs 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("Gal 1:1").osis()).toEqual("Gal.1.1")
p.include_apocrypha(false)
expect(p.parse("GALASIERS 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALASIËRS 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALÁSIERS 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALÁSIËRS 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GAL 1:1").osis()).toEqual("Gal.1.1")
`
true
describe "Localized book Eph (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Eph (af)", ->
`
expect(p.parse("Efesiers 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("Efesiërs 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("Efésiers 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("Efésiërs 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("Eph 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("Ef 1:1").osis()).toEqual("Eph.1.1")
p.include_apocrypha(false)
expect(p.parse("EFESIERS 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EFESIËRS 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EFÉSIERS 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EFÉSIËRS 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EPH 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EF 1:1").osis()).toEqual("Eph.1.1")
`
true
describe "Localized book Phil (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Phil (af)", ->
`
expect(p.parse("Filippense 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("Phil 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("Fil 1:1").osis()).toEqual("Phil.1.1")
p.include_apocrypha(false)
expect(p.parse("FILIPPENSE 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("PHIL 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("FIL 1:1").osis()).toEqual("Phil.1.1")
`
true
describe "Localized book Col (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Col (af)", ->
`
expect(p.parse("Kolossense 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("Col 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("Kol 1:1").osis()).toEqual("Col.1.1")
p.include_apocrypha(false)
expect(p.parse("KOLOSSENSE 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("COL 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KOL 1:1").osis()).toEqual("Col.1.1")
`
true
describe "Localized book 2Thess (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Thess (af)", ->
`
expect(p.parse("II. Tessalonisense 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. Tessalonisense 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II Tessalonisense 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II. Tessaonicense 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 Tessalonisense 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. Tessaonicense 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II Tessaonicense 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 Tessaonicense 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II. Thess 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. Thess 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II Thess 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II. Tess 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 Thess 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. Tess 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II Tess 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 Tess 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2Thess 1:1").osis()).toEqual("2Thess.1.1")
p.include_apocrypha(false)
expect(p.parse("II. TESSALONISENSE 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. TESSALONISENSE 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II TESSALONISENSE 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II. TESSAONICENSE 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 TESSALONISENSE 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. TESSAONICENSE 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II TESSAONICENSE 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 TESSAONICENSE 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II. THESS 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. THESS 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II THESS 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II. TESS 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 THESS 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. TESS 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II TESS 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 TESS 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2THESS 1:1").osis()).toEqual("2Thess.1.1")
`
true
describe "Localized book 1Thess (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Thess (af)", ->
`
expect(p.parse("1. Tessalonisense 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I. Tessalonisense 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 Tessalonisense 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. Tessaonicense 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I Tessalonisense 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I. Tessaonicense 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 Tessaonicense 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I Tessaonicense 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. Thess 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I. Thess 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 Thess 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. Tess 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I Thess 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I. Tess 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 Tess 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1Thess 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I Tess 1:1").osis()).toEqual("1Thess.1.1")
p.include_apocrypha(false)
expect(p.parse("1. TESSALONISENSE 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I. TESSALONISENSE 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 TESSALONISENSE 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. TESSAONICENSE 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I TESSALONISENSE 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I. TESSAONICENSE 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 TESSAONICENSE 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I TESSAONICENSE 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. THESS 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I. THESS 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 THESS 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. TESS 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I THESS 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I. TESS 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 TESS 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1THESS 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I TESS 1:1").osis()).toEqual("1Thess.1.1")
`
true
describe "Localized book 2Tim (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Tim (af)", ->
`
expect(p.parse("II. Timoteus 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. Timoteus 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("II Timoteus 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 Timoteus 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("II. Tim 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. Tim 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("II Tim 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 Tim 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2Tim 1:1").osis()).toEqual("2Tim.1.1")
p.include_apocrypha(false)
expect(p.parse("II. TIMOTEUS 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. TIMOTEUS 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("II TIMOTEUS 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 TIMOTEUS 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("II. TIM 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. TIM 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("II TIM 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 TIM 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2TIM 1:1").osis()).toEqual("2Tim.1.1")
`
true
describe "Localized book 1Tim (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Tim (af)", ->
`
expect(p.parse("1. Timoteus 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("I. Timoteus 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 Timoteus 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("I Timoteus 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. Tim 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("I. Tim 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 Tim 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("I Tim 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1Tim 1:1").osis()).toEqual("1Tim.1.1")
p.include_apocrypha(false)
expect(p.parse("1. TIMOTEUS 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("I. TIMOTEUS 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 TIMOTEUS 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("I TIMOTEUS 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. TIM 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("I. TIM 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 TIM 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("I TIM 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1TIM 1:1").osis()).toEqual("1Tim.1.1")
`
true
describe "Localized book Titus (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Titus (af)", ->
`
expect(p.parse("Titus 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("Tit 1:1").osis()).toEqual("Titus.1.1")
p.include_apocrypha(false)
expect(p.parse("TITUS 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("TIT 1:1").osis()).toEqual("Titus.1.1")
`
true
describe "Localized book Phlm (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Phlm (af)", ->
`
expect(p.parse("Filemon 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("Filem 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("Phlm 1:1").osis()).toEqual("Phlm.1.1")
p.include_apocrypha(false)
expect(p.parse("FILEMON 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("FILEM 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("PHLM 1:1").osis()).toEqual("Phlm.1.1")
`
true
describe "Localized book Heb (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Heb (af)", ->
`
expect(p.parse("Hebreers 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("Hebreërs 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("Heb 1:1").osis()).toEqual("Heb.1.1")
p.include_apocrypha(false)
expect(p.parse("HEBREERS 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("HEBREËRS 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("HEB 1:1").osis()).toEqual("Heb.1.1")
`
true
describe "Localized book Jas (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Jas (af)", ->
`
expect(p.parse("Jakobus 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("Jak 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("Jas 1:1").osis()).toEqual("Jas.1.1")
p.include_apocrypha(false)
expect(p.parse("JAKOBUS 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("JAK 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("JAS 1:1").osis()).toEqual("Jas.1.1")
`
true
describe "Localized book 2Pet (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Pet (af)", ->
`
expect(p.parse("II. Petrus 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2. Petrus 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("II Petrus 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 Petrus 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("II. Pet 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2. Pet 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("II Pet 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 Pet 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2Pet 1:1").osis()).toEqual("2Pet.1.1")
p.include_apocrypha(false)
expect(p.parse("II. PETRUS 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2. PETRUS 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("II PETRUS 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 PETRUS 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("II. PET 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2. PET 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("II PET 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 PET 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2PET 1:1").osis()).toEqual("2Pet.1.1")
`
true
describe "Localized book 1Pet (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Pet (af)", ->
`
expect(p.parse("1. Petrus 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("I. Petrus 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 Petrus 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("I Petrus 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1. Pet 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("I. Pet 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 Pet 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("I Pet 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1Pet 1:1").osis()).toEqual("1Pet.1.1")
p.include_apocrypha(false)
expect(p.parse("1. PETRUS 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("I. PETRUS 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 PETRUS 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("I PETRUS 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1. PET 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("I. PET 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 PET 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("I PET 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1PET 1:1").osis()).toEqual("1Pet.1.1")
`
true
describe "Localized book Jude (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Jude (af)", ->
`
expect(p.parse("Judas 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("Jude 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("Jud 1:1").osis()).toEqual("Jude.1.1")
p.include_apocrypha(false)
expect(p.parse("JUDAS 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("JUDE 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("JUD 1:1").osis()).toEqual("Jude.1.1")
`
true
describe "Localized book Tob (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Tob (af)", ->
`
expect(p.parse("Tob 1:1").osis()).toEqual("Tob.1.1")
`
true
describe "Localized book Jdt (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Jdt (af)", ->
`
expect(p.parse("Jdt 1:1").osis()).toEqual("Jdt.1.1")
`
true
describe "Localized book Bar (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Bar (af)", ->
`
expect(p.parse("Bar 1:1").osis()).toEqual("Bar.1.1")
`
true
describe "Localized book Sus (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Sus (af)", ->
`
expect(p.parse("Sus 1:1").osis()).toEqual("Sus.1.1")
`
true
describe "Localized book 2Macc (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Macc (af)", ->
`
expect(p.parse("2Macc 1:1").osis()).toEqual("2Macc.1.1")
`
true
describe "Localized book 3Macc (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 3Macc (af)", ->
`
expect(p.parse("3Macc 1:1").osis()).toEqual("3Macc.1.1")
`
true
describe "Localized book 4Macc (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 4Macc (af)", ->
`
expect(p.parse("4Macc 1:1").osis()).toEqual("4Macc.1.1")
`
true
describe "Localized book 1Macc (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Macc (af)", ->
`
expect(p.parse("1Macc 1:1").osis()).toEqual("1Macc.1.1")
`
true
describe "Miscellaneous tests", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore", book_sequence_strategy: "ignore", osis_compaction_strategy: "bc", captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should return the expected language", ->
expect(p.languages).toEqual ["af"]
it "should handle ranges (af)", ->
expect(p.parse("Titus 1:1 tot 2").osis()).toEqual "Titus.1.1-Titus.1.2"
expect(p.parse("Matt 1tot2").osis()).toEqual "Matt.1-Matt.2"
expect(p.parse("Phlm 2 TOT 3").osis()).toEqual "Phlm.1.2-Phlm.1.3"
it "should handle chapters (af)", ->
expect(p.parse("Titus 1:1, hoofstuk 2").osis()).toEqual "Titus.1.1,Titus.2"
expect(p.parse("Matt 3:4 HOOFSTUK 6").osis()).toEqual "Matt.3.4,Matt.6"
expect(p.parse("Titus 1:1, hfst. 2").osis()).toEqual "Titus.1.1,Titus.2"
expect(p.parse("Matt 3:4 HFST. 6").osis()).toEqual "Matt.3.4,Matt.6"
expect(p.parse("Titus 1:1, hfst 2").osis()).toEqual "Titus.1.1,Titus.2"
expect(p.parse("Matt 3:4 HFST 6").osis()).toEqual "Matt.3.4,Matt.6"
it "should handle verses (af)", ->
expect(p.parse("Exod 1:1 vers 3").osis()).toEqual "Exod.1.1,Exod.1.3"
expect(p.parse("Phlm VERS 6").osis()).toEqual "Phlm.1.6"
expect(p.parse("Exod 1:1 Bybelvers 3").osis()).toEqual "Exod.1.1,Exod.1.3"
expect(p.parse("Phlm BYBELVERS 6").osis()).toEqual "Phlm.1.6"
it "should handle 'and' (af)", ->
expect(p.parse("Exod 1:1 en 3").osis()).toEqual "Exod.1.1,Exod.1.3"
expect(p.parse("Phlm 2 EN 6").osis()).toEqual "Phlm.1.2,Phlm.1.6"
expect(p.parse("Exod 1:1 asook 3").osis()).toEqual "Exod.1.1,Exod.1.3"
expect(p.parse("Phlm 2 ASOOK 6").osis()).toEqual "Phlm.1.2,Phlm.1.6"
it "should handle titles (af)", ->
expect(p.parse("Ps 3 title, 4:2, 5:title").osis()).toEqual "Ps.3.1,Ps.4.2,Ps.5.1"
expect(p.parse("PS 3 TITLE, 4:2, 5:TITLE").osis()).toEqual "Ps.3.1,Ps.4.2,Ps.5.1"
it "should handle 'ff' (af)", ->
expect(p.parse("Rev 3ff., 4:2ff.").osis()).toEqual "Rev.3-Rev.22,Rev.4.2-Rev.4.11"
expect(p.parse("REV 3 FF., 4:2 FF.").osis()).toEqual "Rev.3-Rev.22,Rev.4.2-Rev.4.11"
expect(p.parse("Rev 3ff, 4:2ff").osis()).toEqual "Rev.3-Rev.22,Rev.4.2-Rev.4.11"
expect(p.parse("REV 3 FF, 4:2 FF").osis()).toEqual "Rev.3-Rev.22,Rev.4.2-Rev.4.11"
it "should handle translations (af)", ->
expect(p.parse("Lev 1 (AFR53)").osis_and_translations()).toEqual [["Lev.1", "AFR53"]]
expect(p.parse("lev 1 afr53").osis_and_translations()).toEqual [["Lev.1", "AFR53"]]
expect(p.parse("Lev 1 (AFR83)").osis_and_translations()).toEqual [["Lev.1", "AFR83"]]
expect(p.parse("lev 1 afr83").osis_and_translations()).toEqual [["Lev.1", "AFR83"]]
it "should handle book ranges (af)", ->
p.set_options {book_alone_strategy: "full", book_range_strategy: "include"}
expect(p.parse("1 tot 3 Joh").osis()).toEqual "1John.1-3John.1"
it "should handle boundaries (af)", ->
p.set_options {book_alone_strategy: "full"}
expect(p.parse("\u2014Matt\u2014").osis()).toEqual "Matt.1-Matt.28"
expect(p.parse("\u201cMatt 1:1\u201d").osis()).toEqual "Matt.1.1"
| 133729 | bcv_parser = require("../../js/af_bcv_parser.js").bcv_parser
describe "Parsing", ->
p = {}
beforeEach ->
p = new bcv_parser
p.options.osis_compaction_strategy = "b"
p.options.sequence_combination_strategy = "combine"
it "should round-trip OSIS references", ->
p.set_options osis_compaction_strategy: "bc"
books = ["Gen","Exod","Lev","Num","Deut","Josh","Judg","Ruth","1Sam","2Sam","1Kgs","2Kgs","1Chr","2Chr","Ezra","Neh","Esth","Job","Ps","Prov","Eccl","Song","Isa","Jer","Lam","Ezek","<NAME>","<NAME>","<NAME>","<NAME>mos","<NAME>ad","<NAME>","<NAME>","<NAME>","Hab","Zeph","Hag","Zech","<NAME>","<NAME>","<NAME>","<NAME>","<NAME>","Acts","Rom","1Cor","2Cor","Gal","Eph","Phil","Col","1Thess","2Thess","1Tim","2Tim","Titus","Phlm","Heb","Jas","1Pet","2Pet","1John","2John","3John","Jude","Rev"]
for book in books
bc = book + ".1"
bcv = bc + ".1"
bcv_range = bcv + "-" + bc + ".2"
expect(p.parse(bc).osis()).toEqual bc
expect(p.parse(bcv).osis()).toEqual bcv
expect(p.parse(bcv_range).osis()).toEqual bcv_range
it "should round-trip OSIS Apocrypha references", ->
p.set_options osis_compaction_strategy: "bc", ps151_strategy: "b"
p.include_apocrypha true
books = ["Tob","Jdt","GkEsth","Wis","Sir","Bar","PrAzar","Sus","Bel","SgThree","EpJer","1Macc","2Macc","3Macc","4Macc","1Esd","2Esd","PrMan","Ps151"]
for book in books
bc = book + ".1"
bcv = bc + ".1"
bcv_range = bcv + "-" + bc + ".2"
expect(p.parse(bc).osis()).toEqual bc
expect(p.parse(bcv).osis()).toEqual bcv
expect(p.parse(bcv_range).osis()).toEqual bcv_range
p.set_options ps151_strategy: "bc"
expect(p.parse("Ps151.1").osis()).toEqual "Ps.151"
expect(p.parse("Ps151.1.1").osis()).toEqual "Ps.151.1"
expect(p.parse("Ps151.1-Ps151.2").osis()).toEqual "Ps.151.1-Ps.151.2"
p.include_apocrypha false
for book in books
bc = book + ".1"
expect(p.parse(bc).osis()).toEqual ""
it "should handle a preceding character", ->
expect(p.parse(" Gen 1").osis()).toEqual "Gen.1"
expect(p.parse("Matt5John3").osis()).toEqual "Matt.5,John.3"
expect(p.parse("1Ps 1").osis()).toEqual ""
expect(p.parse("11Sam 1").osis()).toEqual ""
describe "Localized book Gen (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Gen (af)", ->
`
expect(p.parse("Genesis 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("Gen 1:1").osis()).toEqual("Gen.1.1")
p.include_apocrypha(false)
expect(p.parse("GENESIS 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("GEN 1:1").osis()).toEqual("Gen.1.1")
`
true
describe "Localized book Exod (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Exod (af)", ->
`
expect(p.parse("Eksodus 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("Exodus 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("Exod 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("Eks 1:1").osis()).toEqual("Exod.1.1")
p.include_apocrypha(false)
expect(p.parse("EKSODUS 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("EXODUS 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("EXOD 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("EKS 1:1").osis()).toEqual("Exod.1.1")
`
true
describe "Localized book Bel (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Bel (af)", ->
`
expect(p.parse("Bel 1:1").osis()).toEqual("Bel.1.1")
`
true
describe "Localized book Lev (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Lev (af)", ->
`
expect(p.parse("Levitikus 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("Levítikus 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("Lev 1:1").osis()).toEqual("Lev.1.1")
p.include_apocrypha(false)
expect(p.parse("LEVITIKUS 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("LEVÍTIKUS 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("LEV 1:1").osis()).toEqual("Lev.1.1")
`
true
describe "Localized book Num (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Num (af)", ->
`
expect(p.parse("Numeri 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("Númeri 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("Num 1:1").osis()).toEqual("Num.1.1")
p.include_apocrypha(false)
expect(p.parse("NUMERI 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("NÚMERI 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("NUM 1:1").osis()).toEqual("Num.1.1")
`
true
describe "Localized book Sir (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Sir (af)", ->
`
expect(p.parse("Sir 1:1").osis()).toEqual("Sir.1.1")
`
true
describe "Localized book Wis (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Wis (af)", ->
`
expect(p.parse("Wis 1:1").osis()).toEqual("Wis.1.1")
`
true
describe "Localized book Lam (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Lam (af)", ->
`
expect(p.parse("Klaagliedere 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("Klaagl 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("Lam 1:1").osis()).toEqual("Lam.1.1")
p.include_apocrypha(false)
expect(p.parse("KLAAGLIEDERE 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("KLAAGL 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("LAM 1:1").osis()).toEqual("Lam.1.1")
`
true
describe "Localized book EpJer (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: EpJer (af)", ->
`
expect(p.parse("EpJer 1:1").osis()).toEqual("EpJer.1.1")
`
true
describe "Localized book Rev (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Rev (af)", ->
`
expect(p.parse("Openbaring 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("Rev 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("Op 1:1").osis()).toEqual("Rev.1.1")
p.include_apocrypha(false)
expect(p.parse("OPENBARING 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("REV 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("OP 1:1").osis()).toEqual("Rev.1.1")
`
true
describe "Localized book PrMan (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: PrMan (af)", ->
`
expect(p.parse("PrMan 1:1").osis()).toEqual("PrMan.1.1")
`
true
describe "Localized book Deut (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Deut (af)", ->
`
expect(p.parse("Deuteronomium 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("Deuteronómium 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("Deut 1:1").osis()).toEqual("Deut.1.1")
p.include_apocrypha(false)
expect(p.parse("DEUTERONOMIUM 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("DEUTERONÓMIUM 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("DEUT 1:1").osis()).toEqual("Deut.1.1")
`
true
describe "Localized book Josh (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: <NAME>osh (af)", ->
`
expect(p.parse("Josua 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("Josh 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("Jos 1:1").osis()).toEqual("Josh.1.1")
p.include_apocrypha(false)
expect(p.parse("JOSUA 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("JOSH 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("JOS 1:1").osis()).toEqual("Josh.1.1")
`
true
describe "Localized book Judg (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: <NAME>udg (af)", ->
`
expect(p.parse("Rigters 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("Judg 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("Rig 1:1").osis()).toEqual("Judg.1.1")
p.include_apocrypha(false)
expect(p.parse("RIGTERS 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("JUDG 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("RIG 1:1").osis()).toEqual("Judg.1.1")
`
true
describe "Localized book Ruth (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Ruth (af)", ->
`
expect(p.parse("Ruth 1:1").osis()).toEqual("Ruth.1.1")
expect(p.parse("Rut 1:1").osis()).toEqual("Ruth.1.1")
p.include_apocrypha(false)
expect(p.parse("RUTH 1:1").osis()).toEqual("Ruth.1.1")
expect(p.parse("RUT 1:1").osis()).toEqual("Ruth.1.1")
`
true
describe "Localized book 1Esd (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Esd (af)", ->
`
expect(p.parse("1Esd 1:1").osis()).toEqual("1Esd.1.1")
`
true
describe "Localized book 2Esd (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Esd (af)", ->
`
expect(p.parse("2Esd 1:1").osis()).toEqual("2Esd.1.1")
`
true
describe "Localized book Isa (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Isa (af)", ->
`
expect(p.parse("Jesaja 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("Isa 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("Jes 1:1").osis()).toEqual("Isa.1.1")
p.include_apocrypha(false)
expect(p.parse("JESAJA 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("ISA 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("JES 1:1").osis()).toEqual("Isa.1.1")
`
true
describe "Localized book 2Sam (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Sam (af)", ->
`
expect(p.parse("2 Samuel 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 Sam 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2Sam 1:1").osis()).toEqual("2Sam.1.1")
p.include_apocrypha(false)
expect(p.parse("2 SAMUEL 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 SAM 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2SAM 1:1").osis()).toEqual("2Sam.1.1")
`
true
describe "Localized book 1Sam (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Sam (af)", ->
`
expect(p.parse("1 Samuel 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 Sam 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1Sam 1:1").osis()).toEqual("1Sam.1.1")
p.include_apocrypha(false)
expect(p.parse("1 SAMUEL 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 SAM 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1SAM 1:1").osis()).toEqual("1Sam.1.1")
`
true
describe "Localized book 2Kgs (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Kgs (af)", ->
`
expect(p.parse("2 Konings 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 Kon 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2Kgs 1:1").osis()).toEqual("2Kgs.1.1")
p.include_apocrypha(false)
expect(p.parse("2 KONINGS 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 KON 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2KGS 1:1").osis()).toEqual("2Kgs.1.1")
`
true
describe "Localized book 1Kgs (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Kgs (af)", ->
`
expect(p.parse("1 Konings 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 Kon 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1Kgs 1:1").osis()).toEqual("1Kgs.1.1")
p.include_apocrypha(false)
expect(p.parse("1 KONINGS 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 KON 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1KGS 1:1").osis()).toEqual("1Kgs.1.1")
`
true
describe "Localized book 2Chr (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Chr (af)", ->
`
expect(p.parse("2 Kronieke 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 Kron 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2Chr 1:1").osis()).toEqual("2Chr.1.1")
p.include_apocrypha(false)
expect(p.parse("2 KRONIEKE 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 KRON 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2CHR 1:1").osis()).toEqual("2Chr.1.1")
`
true
describe "Localized book 1Chr (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Chr (af)", ->
`
expect(p.parse("1 Kronieke 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 Kron 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1Chr 1:1").osis()).toEqual("1Chr.1.1")
p.include_apocrypha(false)
expect(p.parse("1 KRONIEKE 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 KRON 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1CHR 1:1").osis()).toEqual("1Chr.1.1")
`
true
describe "Localized book Ezra (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Ezra (af)", ->
`
expect(p.parse("Esra 1:1").osis()).toEqual("Ezra.1.1")
expect(p.parse("Ezra 1:1").osis()).toEqual("Ezra.1.1")
p.include_apocrypha(false)
expect(p.parse("ESRA 1:1").osis()).toEqual("Ezra.1.1")
expect(p.parse("EZRA 1:1").osis()).toEqual("Ezra.1.1")
`
true
describe "Localized book Neh (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Neh (af)", ->
`
expect(p.parse("Nehemia 1:1").osis()).toEqual("Neh.1.1")
expect(p.parse("Neh 1:1").osis()).toEqual("Neh.1.1")
p.include_apocrypha(false)
expect(p.parse("NEHEMIA 1:1").osis()).toEqual("Neh.1.1")
expect(p.parse("NEH 1:1").osis()).toEqual("Neh.1.1")
`
true
describe "Localized book GkEsth (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: GkEsth (af)", ->
`
expect(p.parse("GkEsth 1:1").osis()).toEqual("GkEsth.1.1")
`
true
describe "Localized book Esth (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Esth (af)", ->
`
expect(p.parse("Ester 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("Esth 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("Est 1:1").osis()).toEqual("Esth.1.1")
p.include_apocrypha(false)
expect(p.parse("ESTER 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("ESTH 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("EST 1:1").osis()).toEqual("Esth.1.1")
`
true
describe "Localized book Job (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Job (af)", ->
`
expect(p.parse("Job 1:1").osis()).toEqual("Job.1.1")
p.include_apocrypha(false)
expect(p.parse("JOB 1:1").osis()).toEqual("Job.1.1")
`
true
describe "Localized book Ps (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Ps (af)", ->
`
expect(p.parse("Psalms 1:1").osis()).toEqual("Ps.1.1")
expect(p.parse("Psalm 1:1").osis()).toEqual("Ps.1.1")
expect(p.parse("Ps 1:1").osis()).toEqual("Ps.1.1")
p.include_apocrypha(false)
expect(p.parse("PSALMS 1:1").osis()).toEqual("Ps.1.1")
expect(p.parse("PSALM 1:1").osis()).toEqual("Ps.1.1")
expect(p.parse("PS 1:1").osis()).toEqual("Ps.1.1")
`
true
describe "Localized book PrAzar (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: PrAzar (af)", ->
`
expect(p.parse("PrAzar 1:1").osis()).toEqual("PrAzar.1.1")
`
true
describe "Localized book Prov (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Prov (af)", ->
`
expect(p.parse("Spreuke 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("Prov 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("Spr 1:1").osis()).toEqual("Prov.1.1")
p.include_apocrypha(false)
expect(p.parse("SPREUKE 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("PROV 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("SPR 1:1").osis()).toEqual("Prov.1.1")
`
true
describe "Localized book Eccl (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Eccl (af)", ->
`
expect(p.parse("Prediker 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("Eccl 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("Pred 1:1").osis()).toEqual("Eccl.1.1")
p.include_apocrypha(false)
expect(p.parse("PREDIKER 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("ECCL 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("PRED 1:1").osis()).toEqual("Eccl.1.1")
`
true
describe "Localized book SgThree (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: SgThree (af)", ->
`
expect(p.parse("SgThree 1:1").osis()).toEqual("SgThree.1.1")
`
true
describe "Localized book Song (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Song (af)", ->
`
expect(p.parse("Hooglied 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("Hoogl 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("Song 1:1").osis()).toEqual("Song.1.1")
p.include_apocrypha(false)
expect(p.parse("HOOGLIED 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("HOOGL 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("SONG 1:1").osis()).toEqual("Song.1.1")
`
true
describe "Localized book Jer (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Jer (af)", ->
`
expect(p.parse("Jeremia 1:1").osis()).toEqual("Jer.1.1")
expect(p.parse("Jer 1:1").osis()).toEqual("Jer.1.1")
p.include_apocrypha(false)
expect(p.parse("JEREMIA 1:1").osis()).toEqual("Jer.1.1")
expect(p.parse("JER 1:1").osis()).toEqual("Jer.1.1")
`
true
describe "Localized book Ezek (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Ezek (af)", ->
`
expect(p.parse("Esegiel 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("Esegiël 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("Eseg 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("Ezek 1:1").osis()).toEqual("Ezek.1.1")
p.include_apocrypha(false)
expect(p.parse("ESEGIEL 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("ESEGIËL 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("ESEG 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("EZEK 1:1").osis()).toEqual("Ezek.1.1")
`
true
describe "Localized book Dan (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: <NAME> (af)", ->
`
expect(p.parse("<NAME> 1:1").osis()).toEqual("Dan.1.1")
expect(p.parse("Daniël 1:1").osis()).toEqual("Dan.1.1")
expect(p.parse("Dan 1:1").osis()).toEqual("Dan.1.1")
p.include_apocrypha(false)
expect(p.parse("DANIEL 1:1").osis()).toEqual("Dan.1.1")
expect(p.parse("DANIËL 1:1").osis()).toEqual("Dan.1.1")
expect(p.parse("DAN 1:1").osis()).toEqual("Dan.1.1")
`
true
describe "Localized book <NAME>os (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: <NAME>os (af)", ->
`
expect(p.parse("Hosea 1:1").osis()).toEqual("Hos.1.1")
expect(p.parse("Hos 1:1").osis()).toEqual("Hos.1.1")
p.include_apocrypha(false)
expect(p.parse("HOSEA 1:1").osis()).toEqual("Hos.1.1")
expect(p.parse("HOS 1:1").osis()).toEqual("Hos.1.1")
`
true
describe "Localized book <NAME> (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Joel (af)", ->
`
expect(p.parse("Joel 1:1").osis()).toEqual("Joel.1.1")
expect(p.parse("Joël 1:1").osis()).toEqual("Joel.1.1")
p.include_apocrypha(false)
expect(p.parse("JOEL 1:1").osis()).toEqual("Joel.1.1")
expect(p.parse("JOËL 1:1").osis()).toEqual("Joel.1.1")
`
true
describe "Localized book Amos (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Amos (af)", ->
`
expect(p.parse("Amos 1:1").osis()).toEqual("Amos.1.1")
expect(p.parse("Am 1:1").osis()).toEqual("Amos.1.1")
p.include_apocrypha(false)
expect(p.parse("AMOS 1:1").osis()).toEqual("Amos.1.1")
expect(p.parse("AM 1:1").osis()).toEqual("Amos.1.1")
`
true
describe "Localized book Obad (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Obad (af)", ->
`
expect(p.parse("Obadja 1:1").osis()).toEqual("Obad.1.1")
expect(p.parse("Obad 1:1").osis()).toEqual("Obad.1.1")
expect(p.parse("Ob 1:1").osis()).toEqual("Obad.1.1")
p.include_apocrypha(false)
expect(p.parse("OBADJA 1:1").osis()).toEqual("Obad.1.1")
expect(p.parse("OBAD 1:1").osis()).toEqual("Obad.1.1")
expect(p.parse("OB 1:1").osis()).toEqual("Obad.1.1")
`
true
describe "Localized book <NAME>ah (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Jonah (af)", ->
`
expect(p.parse("Jonah 1:1").osis()).toEqual("Jonah.1.1")
expect(p.parse("Jona 1:1").osis()).toEqual("Jonah.1.1")
p.include_apocrypha(false)
expect(p.parse("JONAH 1:1").osis()).toEqual("Jonah.1.1")
expect(p.parse("JONA 1:1").osis()).toEqual("Jonah.1.1")
`
true
describe "Localized book <NAME>ic (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Mic (af)", ->
`
expect(p.parse("Miga 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("Mic 1:1").osis()).toEqual("Mic.1.1")
p.include_apocrypha(false)
expect(p.parse("MIGA 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("MIC 1:1").osis()).toEqual("Mic.1.1")
`
true
describe "Localized book Nah (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Nah (af)", ->
`
expect(p.parse("Nahum 1:1").osis()).toEqual("Nah.1.1")
expect(p.parse("Nah 1:1").osis()).toEqual("Nah.1.1")
p.include_apocrypha(false)
expect(p.parse("NAHUM 1:1").osis()).toEqual("Nah.1.1")
expect(p.parse("NAH 1:1").osis()).toEqual("Nah.1.1")
`
true
describe "Localized book Hab (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Hab (af)", ->
`
expect(p.parse("Habakuk 1:1").osis()).toEqual("Hab.1.1")
expect(p.parse("Hab 1:1").osis()).toEqual("Hab.1.1")
p.include_apocrypha(false)
expect(p.parse("HABAKUK 1:1").osis()).toEqual("Hab.1.1")
expect(p.parse("HAB 1:1").osis()).toEqual("Hab.1.1")
`
true
describe "Localized book Zeph (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Zeph (af)", ->
`
expect(p.parse("Sefanja 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("Zeph 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("Sef 1:1").osis()).toEqual("Zeph.1.1")
p.include_apocrypha(false)
expect(p.parse("SEFANJA 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("ZEPH 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("SEF 1:1").osis()).toEqual("Zeph.1.1")
`
true
describe "Localized book Hag (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Hag (af)", ->
`
expect(p.parse("Haggai 1:1").osis()).toEqual("Hag.1.1")
expect(p.parse("Hag 1:1").osis()).toEqual("Hag.1.1")
p.include_apocrypha(false)
expect(p.parse("HAGGAI 1:1").osis()).toEqual("Hag.1.1")
expect(p.parse("HAG 1:1").osis()).toEqual("Hag.1.1")
`
true
describe "Localized book Zech (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Zech (af)", ->
`
expect(p.parse("Sagaria 1:1").osis()).toEqual("Zech.1.1")
expect(p.parse("Zech 1:1").osis()).toEqual("Zech.1.1")
expect(p.parse("Sag 1:1").osis()).toEqual("Zech.1.1")
p.include_apocrypha(false)
expect(p.parse("SAGARIA 1:1").osis()).toEqual("Zech.1.1")
expect(p.parse("ZECH 1:1").osis()).toEqual("Zech.1.1")
expect(p.parse("SAG 1:1").osis()).toEqual("Zech.1.1")
`
true
describe "Localized book Mal (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Mal (af)", ->
`
expect(p.parse("Maleagi 1:1").osis()).toEqual("Mal.1.1")
expect(p.parse("Mal 1:1").osis()).toEqual("Mal.1.1")
p.include_apocrypha(false)
expect(p.parse("MALEAGI 1:1").osis()).toEqual("Mal.1.1")
expect(p.parse("MAL 1:1").osis()).toEqual("Mal.1.1")
`
true
describe "Localized book <NAME> <NAME> (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: <NAME> (af)", ->
`
expect(p.parse("Mattheus 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("Mattheüs 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("Matthéus 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("Matthéüs 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("Matteus 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("Matt 1:1").osis()).toEqual("Matt.1.1")
p.include_apocrypha(false)
expect(p.parse("MATTHEUS 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("MATTHEÜS 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("MATTHÉUS 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("MATTHÉÜS 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("MATTEUS 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("MATT 1:1").osis()).toEqual("Matt.1.1")
`
true
describe "Localized book Mark (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Mark (af)", ->
`
expect(p.parse("Markus 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("Mark 1:1").osis()).toEqual("Mark.1.1")
p.include_apocrypha(false)
expect(p.parse("MARKUS 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("MARK 1:1").osis()).toEqual("Mark.1.1")
`
true
describe "Localized book <NAME> (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: <NAME> (af)", ->
`
expect(p.parse("Lukas 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("Luke 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("Luk 1:1").osis()).toEqual("Luke.1.1")
p.include_apocrypha(false)
expect(p.parse("LUKAS 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("LUKE 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("LUK 1:1").osis()).toEqual("Luke.1.1")
`
true
describe "Localized book 1John (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1John (af)", ->
`
expect(p.parse("1. Johannes 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("I. Johannes 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 Johannes 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("I Johannes 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1. Joh 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("I. Joh 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 Joh 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1John 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("I Joh 1:1").osis()).toEqual("1John.1.1")
p.include_apocrypha(false)
expect(p.parse("1. JOHANNES 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("I. JOHANNES 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 JOHANNES 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("I JOHANNES 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1. JOH 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("I. JOH 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 JOH 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1JOHN 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("I JOH 1:1").osis()).toEqual("1John.1.1")
`
true
describe "Localized book 2John (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2John (af)", ->
`
expect(p.parse("II. Johannes 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2. Johannes 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("II Johannes 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 Johannes 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("II. Joh 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2. Joh 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("II Joh 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 Joh 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2John 1:1").osis()).toEqual("2John.1.1")
p.include_apocrypha(false)
expect(p.parse("II. JOHANNES 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2. JOHANNES 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("II JOHANNES 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 JOHANNES 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("II. JOH 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2. JOH 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("II JOH 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 JOH 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2JOHN 1:1").osis()).toEqual("2John.1.1")
`
true
describe "Localized book 3John (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 3John (af)", ->
`
expect(p.parse("III. Johannes 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("III Johannes 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3. Johannes 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 Johannes 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("III. Joh 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("III Joh 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3. Joh 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 Joh 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3John 1:1").osis()).toEqual("3John.1.1")
p.include_apocrypha(false)
expect(p.parse("III. JOHANNES 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("III JOHANNES 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3. JOHANNES 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 JOHANNES 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("III. JOH 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("III JOH 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3. JOH 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 JOH 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3JOHN 1:1").osis()).toEqual("3John.1.1")
`
true
describe "Localized book <NAME> (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: <NAME> (af)", ->
`
expect(p.parse("Johannes 1:1").osis()).toEqual("John.1.1")
expect(p.parse("John 1:1").osis()).toEqual("John.1.1")
expect(p.parse("Joh 1:1").osis()).toEqual("John.1.1")
p.include_apocrypha(false)
expect(p.parse("JOHANNES 1:1").osis()).toEqual("John.1.1")
expect(p.parse("JOHN 1:1").osis()).toEqual("John.1.1")
expect(p.parse("JOH 1:1").osis()).toEqual("John.1.1")
`
true
describe "Localized book Acts (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Acts (af)", ->
`
expect(p.parse("Handelinge 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("Acts 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("Hand 1:1").osis()).toEqual("Acts.1.1")
p.include_apocrypha(false)
expect(p.parse("HANDELINGE 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("ACTS 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("HAND 1:1").osis()).toEqual("Acts.1.1")
`
true
describe "Localized book Rom (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Rom (af)", ->
`
expect(p.parse("Romeine 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("Rom 1:1").osis()).toEqual("Rom.1.1")
p.include_apocrypha(false)
expect(p.parse("ROMEINE 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROM 1:1").osis()).toEqual("Rom.1.1")
`
true
describe "Localized book 2Cor (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Cor (af)", ->
`
expect(p.parse("II. Korinthiers 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. Korinthiërs 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. Korinthiers 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. Korinthiërs 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II Korinthiers 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II Korinthiërs 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. Korintiers 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. Korintiërs 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 Korinthiers 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 Korinthiërs 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. Korintiers 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. Korintiërs 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II Korintiers 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II Korintiërs 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 Korintiers 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 Korintiërs 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. Kor 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. Kor 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II Kor 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 Kor 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2Cor 1:1").osis()).toEqual("2Cor.1.1")
p.include_apocrypha(false)
expect(p.parse("II. KORINTHIERS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. KORINTHIËRS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. KORINTHIERS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. KORINTHIËRS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II KORINTHIERS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II KORINTHIËRS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. KORINTIERS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. KORINTIËRS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KORINTHIERS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KORINTHIËRS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. KORINTIERS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. KORINTIËRS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II KORINTIERS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II KORINTIËRS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KORINTIERS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KORINTIËRS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. KOR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. KOR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II KOR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KOR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2COR 1:1").osis()).toEqual("2Cor.1.1")
`
true
describe "Localized book 1Cor (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Cor (af)", ->
`
expect(p.parse("1. Korinthiers 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. Korinthiërs 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. Korinthiers 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. Korinthiërs 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 Korinthiers 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 Korinthiërs 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. Korintiers 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. Korintiërs 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I Korinthiers 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I Korinthiërs 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. Korintiers 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. Korintiërs 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 Korintiers 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 Korintiërs 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I Korintiers 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I Korintiërs 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. Kor 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. Kor 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 Kor 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I Kor 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1Cor 1:1").osis()).toEqual("1Cor.1.1")
p.include_apocrypha(false)
expect(p.parse("1. KORINTHIERS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. KORINTHIËRS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. KORINTHIERS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. KORINTHIËRS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KORINTHIERS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KORINTHIËRS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. KORINTIERS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. KORINTIËRS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I KORINTHIERS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I KORINTHIËRS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. KORINTIERS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. KORINTIËRS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KORINTIERS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KORINTIËRS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I KORINTIERS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I KORINTIËRS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. KOR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. KOR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KOR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I KOR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1COR 1:1").osis()).toEqual("1Cor.1.1")
`
true
describe "Localized book Gal (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Gal (af)", ->
`
expect(p.parse("Galasiers 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("Galasiërs 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("Galásiers 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("Galásiërs 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("Gal 1:1").osis()).toEqual("Gal.1.1")
p.include_apocrypha(false)
expect(p.parse("GALASIERS 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALASIËRS 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALÁSIERS 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALÁSIËRS 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GAL 1:1").osis()).toEqual("Gal.1.1")
`
true
describe "Localized book Eph (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Eph (af)", ->
`
expect(p.parse("Efesiers 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("Efesiërs 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("Efésiers 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("Efésiërs 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("Eph 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("Ef 1:1").osis()).toEqual("Eph.1.1")
p.include_apocrypha(false)
expect(p.parse("EFESIERS 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EFESIËRS 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EFÉSIERS 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EFÉSIËRS 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EPH 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EF 1:1").osis()).toEqual("Eph.1.1")
`
true
describe "Localized book <NAME> (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: <NAME> (af)", ->
`
expect(p.parse("Filippense 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("Phil 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("Fil 1:1").osis()).toEqual("Phil.1.1")
p.include_apocrypha(false)
expect(p.parse("FILIPPENSE 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("PHIL 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("FIL 1:1").osis()).toEqual("Phil.1.1")
`
true
describe "Localized book Col (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Col (af)", ->
`
expect(p.parse("Kolossense 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("Col 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("Kol 1:1").osis()).toEqual("Col.1.1")
p.include_apocrypha(false)
expect(p.parse("KOLOSSENSE 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("COL 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KOL 1:1").osis()).toEqual("Col.1.1")
`
true
describe "Localized book 2Thess (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Thess (af)", ->
`
expect(p.parse("II. Tessalonisense 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. Tessalonisense 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II Tessalonisense 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II. Tessaonicense 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 Tessalonisense 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. Tessaonicense 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II Tessaonicense 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 Tessaonicense 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II. Thess 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. Thess 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II Thess 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II. Tess 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 Thess 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. Tess 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II Tess 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 Tess 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2Thess 1:1").osis()).toEqual("2Thess.1.1")
p.include_apocrypha(false)
expect(p.parse("II. TESSALONISENSE 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. TESSALONISENSE 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II TESSALONISENSE 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II. TESSAONICENSE 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 TESSALONISENSE 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. TESSAONICENSE 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II TESSAONICENSE 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 TESSAONICENSE 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II. THESS 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. THESS 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II THESS 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II. TESS 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 THESS 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. TESS 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II TESS 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 TESS 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2THESS 1:1").osis()).toEqual("2Thess.1.1")
`
true
describe "Localized book 1Thess (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Thess (af)", ->
`
expect(p.parse("1. Tessalonisense 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I. Tessalonisense 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 Tessalonisense 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. Tessaonicense 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I Tessalonisense 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I. Tessaonicense 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 Tessaonicense 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I Tessaonicense 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. Thess 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I. Thess 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 Thess 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. Tess 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I Thess 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I. Tess 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 Tess 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1Thess 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I Tess 1:1").osis()).toEqual("1Thess.1.1")
p.include_apocrypha(false)
expect(p.parse("1. TESSALONISENSE 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I. TESSALONISENSE 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 TESSALONISENSE 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. TESSAONICENSE 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I TESSALONISENSE 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I. TESSAONICENSE 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 TESSAONICENSE 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I TESSAONICENSE 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. THESS 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I. THESS 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 THESS 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. TESS 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I THESS 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I. TESS 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 TESS 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1THESS 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I TESS 1:1").osis()).toEqual("1Thess.1.1")
`
true
describe "Localized book 2Tim (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Tim (af)", ->
`
expect(p.parse("II. Timoteus 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. Timoteus 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("II Timoteus 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 Timoteus 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("II. Tim 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. Tim 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("II Tim 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 Tim 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2Tim 1:1").osis()).toEqual("2Tim.1.1")
p.include_apocrypha(false)
expect(p.parse("II. TIMOTEUS 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. TIMOTEUS 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("II TIMOTEUS 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 TIMOTEUS 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("II. TIM 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. TIM 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("II TIM 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 TIM 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2TIM 1:1").osis()).toEqual("2Tim.1.1")
`
true
describe "Localized book 1Tim (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Tim (af)", ->
`
expect(p.parse("1. Timoteus 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("I. Timoteus 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 Timoteus 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("I Timoteus 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. Tim 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("I. Tim 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 Tim 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("I Tim 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1Tim 1:1").osis()).toEqual("1Tim.1.1")
p.include_apocrypha(false)
expect(p.parse("1. TIMOTEUS 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("I. TIMOTEUS 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 TIMOTEUS 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("I TIMOTEUS 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. TIM 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("I. TIM 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 TIM 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("I TIM 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1TIM 1:1").osis()).toEqual("1Tim.1.1")
`
true
describe "Localized book Titus (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Titus (af)", ->
`
expect(p.parse("Titus 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("Tit 1:1").osis()).toEqual("Titus.1.1")
p.include_apocrypha(false)
expect(p.parse("TITUS 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("TIT 1:1").osis()).toEqual("Titus.1.1")
`
true
describe "Localized book Phlm (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Phlm (af)", ->
`
expect(p.parse("Filemon 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("Filem 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("Phlm 1:1").osis()).toEqual("Phlm.1.1")
p.include_apocrypha(false)
expect(p.parse("FILEMON 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("FILEM 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("PHLM 1:1").osis()).toEqual("Phlm.1.1")
`
true
describe "Localized book Heb (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Heb (af)", ->
`
expect(p.parse("Hebreers 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("Hebreërs 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("Heb 1:1").osis()).toEqual("Heb.1.1")
p.include_apocrypha(false)
expect(p.parse("HEBREERS 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("HEBREËRS 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("HEB 1:1").osis()).toEqual("Heb.1.1")
`
true
describe "Localized book Jas (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Jas (af)", ->
`
expect(p.parse("Jakobus 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("Jak 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("Jas 1:1").osis()).toEqual("Jas.1.1")
p.include_apocrypha(false)
expect(p.parse("JAKOBUS 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("JAK 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("JAS 1:1").osis()).toEqual("Jas.1.1")
`
true
describe "Localized book 2Pet (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Pet (af)", ->
`
expect(p.parse("II. Petrus 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2. Petrus 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("II Petrus 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 Petrus 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("II. Pet 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2. Pet 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("II Pet 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 Pet 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2Pet 1:1").osis()).toEqual("2Pet.1.1")
p.include_apocrypha(false)
expect(p.parse("II. PETRUS 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2. PETRUS 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("II PETRUS 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 PETRUS 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("II. PET 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2. PET 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("II PET 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 PET 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2PET 1:1").osis()).toEqual("2Pet.1.1")
`
true
describe "Localized book 1Pet (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Pet (af)", ->
`
expect(p.parse("1. Petrus 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("I. Petrus 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 Petrus 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("I Petrus 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1. Pet 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("I. Pet 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 Pet 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("I Pet 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1Pet 1:1").osis()).toEqual("1Pet.1.1")
p.include_apocrypha(false)
expect(p.parse("1. PETRUS 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("I. PETRUS 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 PETRUS 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("I PETRUS 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1. PET 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("I. PET 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 PET 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("I PET 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1PET 1:1").osis()).toEqual("1Pet.1.1")
`
true
describe "Localized book Jude (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Jude (af)", ->
`
expect(p.parse("Judas 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("Jude 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("Jud 1:1").osis()).toEqual("Jude.1.1")
p.include_apocrypha(false)
expect(p.parse("JUDAS 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("JUDE 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("JUD 1:1").osis()).toEqual("Jude.1.1")
`
true
describe "Localized book Tob (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Tob (af)", ->
`
expect(p.parse("Tob 1:1").osis()).toEqual("Tob.1.1")
`
true
describe "Localized book Jdt (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Jdt (af)", ->
`
expect(p.parse("Jdt 1:1").osis()).toEqual("Jdt.1.1")
`
true
describe "Localized book Bar (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Bar (af)", ->
`
expect(p.parse("Bar 1:1").osis()).toEqual("Bar.1.1")
`
true
describe "Localized book Sus (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Sus (af)", ->
`
expect(p.parse("Sus 1:1").osis()).toEqual("Sus.1.1")
`
true
describe "Localized book 2Macc (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Macc (af)", ->
`
expect(p.parse("2Macc 1:1").osis()).toEqual("2Macc.1.1")
`
true
describe "Localized book 3Macc (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 3Macc (af)", ->
`
expect(p.parse("3Macc 1:1").osis()).toEqual("3Macc.1.1")
`
true
describe "Localized book 4Macc (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 4Macc (af)", ->
`
expect(p.parse("4Macc 1:1").osis()).toEqual("4Macc.1.1")
`
true
describe "Localized book 1Macc (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Macc (af)", ->
`
expect(p.parse("1Macc 1:1").osis()).toEqual("1Macc.1.1")
`
true
describe "Miscellaneous tests", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore", book_sequence_strategy: "ignore", osis_compaction_strategy: "bc", captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should return the expected language", ->
expect(p.languages).toEqual ["af"]
it "should handle ranges (af)", ->
expect(p.parse("Titus 1:1 tot 2").osis()).toEqual "Titus.1.1-Titus.1.2"
expect(p.parse("Matt 1tot2").osis()).toEqual "Matt.1-Matt.2"
expect(p.parse("Phlm 2 TOT 3").osis()).toEqual "Phlm.1.2-Phlm.1.3"
it "should handle chapters (af)", ->
expect(p.parse("Titus 1:1, hoofstuk 2").osis()).toEqual "Titus.1.1,Titus.2"
expect(p.parse("Matt 3:4 HOOFSTUK 6").osis()).toEqual "Matt.3.4,Matt.6"
expect(p.parse("Titus 1:1, hfst. 2").osis()).toEqual "Titus.1.1,Titus.2"
expect(p.parse("Matt 3:4 HFST. 6").osis()).toEqual "Matt.3.4,Matt.6"
expect(p.parse("Titus 1:1, hfst 2").osis()).toEqual "Titus.1.1,Titus.2"
expect(p.parse("Matt 3:4 HFST 6").osis()).toEqual "Matt.3.4,Matt.6"
it "should handle verses (af)", ->
expect(p.parse("Exod 1:1 vers 3").osis()).toEqual "Exod.1.1,Exod.1.3"
expect(p.parse("Phlm VERS 6").osis()).toEqual "Phlm.1.6"
expect(p.parse("Exod 1:1 Bybelvers 3").osis()).toEqual "Exod.1.1,Exod.1.3"
expect(p.parse("Phlm BYBELVERS 6").osis()).toEqual "Phlm.1.6"
it "should handle 'and' (af)", ->
expect(p.parse("Exod 1:1 en 3").osis()).toEqual "Exod.1.1,Exod.1.3"
expect(p.parse("Phlm 2 EN 6").osis()).toEqual "Phlm.1.2,Phlm.1.6"
expect(p.parse("Exod 1:1 asook 3").osis()).toEqual "Exod.1.1,Exod.1.3"
expect(p.parse("Phlm 2 ASOOK 6").osis()).toEqual "Phlm.1.2,Phlm.1.6"
it "should handle titles (af)", ->
expect(p.parse("Ps 3 title, 4:2, 5:title").osis()).toEqual "Ps.3.1,Ps.4.2,Ps.5.1"
expect(p.parse("PS 3 TITLE, 4:2, 5:TITLE").osis()).toEqual "Ps.3.1,Ps.4.2,Ps.5.1"
it "should handle 'ff' (af)", ->
expect(p.parse("Rev 3ff., 4:2ff.").osis()).toEqual "Rev.3-Rev.22,Rev.4.2-Rev.4.11"
expect(p.parse("REV 3 FF., 4:2 FF.").osis()).toEqual "Rev.3-Rev.22,Rev.4.2-Rev.4.11"
expect(p.parse("Rev 3ff, 4:2ff").osis()).toEqual "Rev.3-Rev.22,Rev.4.2-Rev.4.11"
expect(p.parse("REV 3 FF, 4:2 FF").osis()).toEqual "Rev.3-Rev.22,Rev.4.2-Rev.4.11"
it "should handle translations (af)", ->
expect(p.parse("Lev 1 (AFR53)").osis_and_translations()).toEqual [["Lev.1", "AFR53"]]
expect(p.parse("lev 1 afr53").osis_and_translations()).toEqual [["Lev.1", "AFR53"]]
expect(p.parse("Lev 1 (AFR83)").osis_and_translations()).toEqual [["Lev.1", "AFR83"]]
expect(p.parse("lev 1 afr83").osis_and_translations()).toEqual [["Lev.1", "AFR83"]]
it "should handle book ranges (af)", ->
p.set_options {book_alone_strategy: "full", book_range_strategy: "include"}
expect(p.parse("1 tot 3 Joh").osis()).toEqual "1John.1-3John.1"
it "should handle boundaries (af)", ->
p.set_options {book_alone_strategy: "full"}
expect(p.parse("\u2014Matt\u2014").osis()).toEqual "Matt.1-Matt.28"
expect(p.parse("\u201cMatt 1:1\u201d").osis()).toEqual "Matt.1.1"
| true | bcv_parser = require("../../js/af_bcv_parser.js").bcv_parser
describe "Parsing", ->
p = {}
beforeEach ->
p = new bcv_parser
p.options.osis_compaction_strategy = "b"
p.options.sequence_combination_strategy = "combine"
it "should round-trip OSIS references", ->
p.set_options osis_compaction_strategy: "bc"
books = ["Gen","Exod","Lev","Num","Deut","Josh","Judg","Ruth","1Sam","2Sam","1Kgs","2Kgs","1Chr","2Chr","Ezra","Neh","Esth","Job","Ps","Prov","Eccl","Song","Isa","Jer","Lam","Ezek","PI:NAME:<NAME>END_PI","PI:NAME:<NAME>END_PI","PI:NAME:<NAME>END_PI","PI:NAME:<NAME>END_PImos","PI:NAME:<NAME>END_PIad","PI:NAME:<NAME>END_PI","PI:NAME:<NAME>END_PI","PI:NAME:<NAME>END_PI","Hab","Zeph","Hag","Zech","PI:NAME:<NAME>END_PI","PI:NAME:<NAME>END_PI","PI:NAME:<NAME>END_PI","PI:NAME:<NAME>END_PI","PI:NAME:<NAME>END_PI","Acts","Rom","1Cor","2Cor","Gal","Eph","Phil","Col","1Thess","2Thess","1Tim","2Tim","Titus","Phlm","Heb","Jas","1Pet","2Pet","1John","2John","3John","Jude","Rev"]
for book in books
bc = book + ".1"
bcv = bc + ".1"
bcv_range = bcv + "-" + bc + ".2"
expect(p.parse(bc).osis()).toEqual bc
expect(p.parse(bcv).osis()).toEqual bcv
expect(p.parse(bcv_range).osis()).toEqual bcv_range
it "should round-trip OSIS Apocrypha references", ->
p.set_options osis_compaction_strategy: "bc", ps151_strategy: "b"
p.include_apocrypha true
books = ["Tob","Jdt","GkEsth","Wis","Sir","Bar","PrAzar","Sus","Bel","SgThree","EpJer","1Macc","2Macc","3Macc","4Macc","1Esd","2Esd","PrMan","Ps151"]
for book in books
bc = book + ".1"
bcv = bc + ".1"
bcv_range = bcv + "-" + bc + ".2"
expect(p.parse(bc).osis()).toEqual bc
expect(p.parse(bcv).osis()).toEqual bcv
expect(p.parse(bcv_range).osis()).toEqual bcv_range
p.set_options ps151_strategy: "bc"
expect(p.parse("Ps151.1").osis()).toEqual "Ps.151"
expect(p.parse("Ps151.1.1").osis()).toEqual "Ps.151.1"
expect(p.parse("Ps151.1-Ps151.2").osis()).toEqual "Ps.151.1-Ps.151.2"
p.include_apocrypha false
for book in books
bc = book + ".1"
expect(p.parse(bc).osis()).toEqual ""
it "should handle a preceding character", ->
expect(p.parse(" Gen 1").osis()).toEqual "Gen.1"
expect(p.parse("Matt5John3").osis()).toEqual "Matt.5,John.3"
expect(p.parse("1Ps 1").osis()).toEqual ""
expect(p.parse("11Sam 1").osis()).toEqual ""
describe "Localized book Gen (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Gen (af)", ->
`
expect(p.parse("Genesis 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("Gen 1:1").osis()).toEqual("Gen.1.1")
p.include_apocrypha(false)
expect(p.parse("GENESIS 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("GEN 1:1").osis()).toEqual("Gen.1.1")
`
true
describe "Localized book Exod (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Exod (af)", ->
`
expect(p.parse("Eksodus 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("Exodus 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("Exod 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("Eks 1:1").osis()).toEqual("Exod.1.1")
p.include_apocrypha(false)
expect(p.parse("EKSODUS 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("EXODUS 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("EXOD 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("EKS 1:1").osis()).toEqual("Exod.1.1")
`
true
describe "Localized book Bel (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Bel (af)", ->
`
expect(p.parse("Bel 1:1").osis()).toEqual("Bel.1.1")
`
true
describe "Localized book Lev (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Lev (af)", ->
`
expect(p.parse("Levitikus 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("Levítikus 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("Lev 1:1").osis()).toEqual("Lev.1.1")
p.include_apocrypha(false)
expect(p.parse("LEVITIKUS 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("LEVÍTIKUS 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("LEV 1:1").osis()).toEqual("Lev.1.1")
`
true
describe "Localized book Num (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Num (af)", ->
`
expect(p.parse("Numeri 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("Númeri 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("Num 1:1").osis()).toEqual("Num.1.1")
p.include_apocrypha(false)
expect(p.parse("NUMERI 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("NÚMERI 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("NUM 1:1").osis()).toEqual("Num.1.1")
`
true
describe "Localized book Sir (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Sir (af)", ->
`
expect(p.parse("Sir 1:1").osis()).toEqual("Sir.1.1")
`
true
describe "Localized book Wis (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Wis (af)", ->
`
expect(p.parse("Wis 1:1").osis()).toEqual("Wis.1.1")
`
true
describe "Localized book Lam (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Lam (af)", ->
`
expect(p.parse("Klaagliedere 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("Klaagl 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("Lam 1:1").osis()).toEqual("Lam.1.1")
p.include_apocrypha(false)
expect(p.parse("KLAAGLIEDERE 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("KLAAGL 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("LAM 1:1").osis()).toEqual("Lam.1.1")
`
true
describe "Localized book EpJer (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: EpJer (af)", ->
`
expect(p.parse("EpJer 1:1").osis()).toEqual("EpJer.1.1")
`
true
describe "Localized book Rev (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Rev (af)", ->
`
expect(p.parse("Openbaring 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("Rev 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("Op 1:1").osis()).toEqual("Rev.1.1")
p.include_apocrypha(false)
expect(p.parse("OPENBARING 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("REV 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("OP 1:1").osis()).toEqual("Rev.1.1")
`
true
describe "Localized book PrMan (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: PrMan (af)", ->
`
expect(p.parse("PrMan 1:1").osis()).toEqual("PrMan.1.1")
`
true
describe "Localized book Deut (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Deut (af)", ->
`
expect(p.parse("Deuteronomium 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("Deuteronómium 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("Deut 1:1").osis()).toEqual("Deut.1.1")
p.include_apocrypha(false)
expect(p.parse("DEUTERONOMIUM 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("DEUTERONÓMIUM 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("DEUT 1:1").osis()).toEqual("Deut.1.1")
`
true
describe "Localized book Josh (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: PI:NAME:<NAME>END_PIosh (af)", ->
`
expect(p.parse("Josua 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("Josh 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("Jos 1:1").osis()).toEqual("Josh.1.1")
p.include_apocrypha(false)
expect(p.parse("JOSUA 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("JOSH 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("JOS 1:1").osis()).toEqual("Josh.1.1")
`
true
describe "Localized book Judg (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: PI:NAME:<NAME>END_PIudg (af)", ->
`
expect(p.parse("Rigters 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("Judg 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("Rig 1:1").osis()).toEqual("Judg.1.1")
p.include_apocrypha(false)
expect(p.parse("RIGTERS 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("JUDG 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("RIG 1:1").osis()).toEqual("Judg.1.1")
`
true
describe "Localized book Ruth (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Ruth (af)", ->
`
expect(p.parse("Ruth 1:1").osis()).toEqual("Ruth.1.1")
expect(p.parse("Rut 1:1").osis()).toEqual("Ruth.1.1")
p.include_apocrypha(false)
expect(p.parse("RUTH 1:1").osis()).toEqual("Ruth.1.1")
expect(p.parse("RUT 1:1").osis()).toEqual("Ruth.1.1")
`
true
describe "Localized book 1Esd (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Esd (af)", ->
`
expect(p.parse("1Esd 1:1").osis()).toEqual("1Esd.1.1")
`
true
describe "Localized book 2Esd (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Esd (af)", ->
`
expect(p.parse("2Esd 1:1").osis()).toEqual("2Esd.1.1")
`
true
describe "Localized book Isa (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Isa (af)", ->
`
expect(p.parse("Jesaja 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("Isa 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("Jes 1:1").osis()).toEqual("Isa.1.1")
p.include_apocrypha(false)
expect(p.parse("JESAJA 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("ISA 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("JES 1:1").osis()).toEqual("Isa.1.1")
`
true
describe "Localized book 2Sam (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Sam (af)", ->
`
expect(p.parse("2 Samuel 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 Sam 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2Sam 1:1").osis()).toEqual("2Sam.1.1")
p.include_apocrypha(false)
expect(p.parse("2 SAMUEL 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 SAM 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2SAM 1:1").osis()).toEqual("2Sam.1.1")
`
true
describe "Localized book 1Sam (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Sam (af)", ->
`
expect(p.parse("1 Samuel 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 Sam 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1Sam 1:1").osis()).toEqual("1Sam.1.1")
p.include_apocrypha(false)
expect(p.parse("1 SAMUEL 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 SAM 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1SAM 1:1").osis()).toEqual("1Sam.1.1")
`
true
describe "Localized book 2Kgs (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Kgs (af)", ->
`
expect(p.parse("2 Konings 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 Kon 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2Kgs 1:1").osis()).toEqual("2Kgs.1.1")
p.include_apocrypha(false)
expect(p.parse("2 KONINGS 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2 KON 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2KGS 1:1").osis()).toEqual("2Kgs.1.1")
`
true
describe "Localized book 1Kgs (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Kgs (af)", ->
`
expect(p.parse("1 Konings 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 Kon 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1Kgs 1:1").osis()).toEqual("1Kgs.1.1")
p.include_apocrypha(false)
expect(p.parse("1 KONINGS 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1 KON 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1KGS 1:1").osis()).toEqual("1Kgs.1.1")
`
true
describe "Localized book 2Chr (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Chr (af)", ->
`
expect(p.parse("2 Kronieke 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 Kron 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2Chr 1:1").osis()).toEqual("2Chr.1.1")
p.include_apocrypha(false)
expect(p.parse("2 KRONIEKE 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 KRON 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2CHR 1:1").osis()).toEqual("2Chr.1.1")
`
true
describe "Localized book 1Chr (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Chr (af)", ->
`
expect(p.parse("1 Kronieke 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 Kron 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1Chr 1:1").osis()).toEqual("1Chr.1.1")
p.include_apocrypha(false)
expect(p.parse("1 KRONIEKE 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 KRON 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1CHR 1:1").osis()).toEqual("1Chr.1.1")
`
true
describe "Localized book Ezra (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Ezra (af)", ->
`
expect(p.parse("Esra 1:1").osis()).toEqual("Ezra.1.1")
expect(p.parse("Ezra 1:1").osis()).toEqual("Ezra.1.1")
p.include_apocrypha(false)
expect(p.parse("ESRA 1:1").osis()).toEqual("Ezra.1.1")
expect(p.parse("EZRA 1:1").osis()).toEqual("Ezra.1.1")
`
true
describe "Localized book Neh (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Neh (af)", ->
`
expect(p.parse("Nehemia 1:1").osis()).toEqual("Neh.1.1")
expect(p.parse("Neh 1:1").osis()).toEqual("Neh.1.1")
p.include_apocrypha(false)
expect(p.parse("NEHEMIA 1:1").osis()).toEqual("Neh.1.1")
expect(p.parse("NEH 1:1").osis()).toEqual("Neh.1.1")
`
true
describe "Localized book GkEsth (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: GkEsth (af)", ->
`
expect(p.parse("GkEsth 1:1").osis()).toEqual("GkEsth.1.1")
`
true
describe "Localized book Esth (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Esth (af)", ->
`
expect(p.parse("Ester 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("Esth 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("Est 1:1").osis()).toEqual("Esth.1.1")
p.include_apocrypha(false)
expect(p.parse("ESTER 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("ESTH 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("EST 1:1").osis()).toEqual("Esth.1.1")
`
true
describe "Localized book Job (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Job (af)", ->
`
expect(p.parse("Job 1:1").osis()).toEqual("Job.1.1")
p.include_apocrypha(false)
expect(p.parse("JOB 1:1").osis()).toEqual("Job.1.1")
`
true
describe "Localized book Ps (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Ps (af)", ->
`
expect(p.parse("Psalms 1:1").osis()).toEqual("Ps.1.1")
expect(p.parse("Psalm 1:1").osis()).toEqual("Ps.1.1")
expect(p.parse("Ps 1:1").osis()).toEqual("Ps.1.1")
p.include_apocrypha(false)
expect(p.parse("PSALMS 1:1").osis()).toEqual("Ps.1.1")
expect(p.parse("PSALM 1:1").osis()).toEqual("Ps.1.1")
expect(p.parse("PS 1:1").osis()).toEqual("Ps.1.1")
`
true
describe "Localized book PrAzar (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: PrAzar (af)", ->
`
expect(p.parse("PrAzar 1:1").osis()).toEqual("PrAzar.1.1")
`
true
describe "Localized book Prov (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Prov (af)", ->
`
expect(p.parse("Spreuke 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("Prov 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("Spr 1:1").osis()).toEqual("Prov.1.1")
p.include_apocrypha(false)
expect(p.parse("SPREUKE 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("PROV 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("SPR 1:1").osis()).toEqual("Prov.1.1")
`
true
describe "Localized book Eccl (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Eccl (af)", ->
`
expect(p.parse("Prediker 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("Eccl 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("Pred 1:1").osis()).toEqual("Eccl.1.1")
p.include_apocrypha(false)
expect(p.parse("PREDIKER 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("ECCL 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("PRED 1:1").osis()).toEqual("Eccl.1.1")
`
true
describe "Localized book SgThree (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: SgThree (af)", ->
`
expect(p.parse("SgThree 1:1").osis()).toEqual("SgThree.1.1")
`
true
describe "Localized book Song (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Song (af)", ->
`
expect(p.parse("Hooglied 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("Hoogl 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("Song 1:1").osis()).toEqual("Song.1.1")
p.include_apocrypha(false)
expect(p.parse("HOOGLIED 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("HOOGL 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("SONG 1:1").osis()).toEqual("Song.1.1")
`
true
describe "Localized book Jer (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Jer (af)", ->
`
expect(p.parse("Jeremia 1:1").osis()).toEqual("Jer.1.1")
expect(p.parse("Jer 1:1").osis()).toEqual("Jer.1.1")
p.include_apocrypha(false)
expect(p.parse("JEREMIA 1:1").osis()).toEqual("Jer.1.1")
expect(p.parse("JER 1:1").osis()).toEqual("Jer.1.1")
`
true
describe "Localized book Ezek (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Ezek (af)", ->
`
expect(p.parse("Esegiel 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("Esegiël 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("Eseg 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("Ezek 1:1").osis()).toEqual("Ezek.1.1")
p.include_apocrypha(false)
expect(p.parse("ESEGIEL 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("ESEGIËL 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("ESEG 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("EZEK 1:1").osis()).toEqual("Ezek.1.1")
`
true
describe "Localized book Dan (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: PI:NAME:<NAME>END_PI (af)", ->
`
expect(p.parse("PI:NAME:<NAME>END_PI 1:1").osis()).toEqual("Dan.1.1")
expect(p.parse("Daniël 1:1").osis()).toEqual("Dan.1.1")
expect(p.parse("Dan 1:1").osis()).toEqual("Dan.1.1")
p.include_apocrypha(false)
expect(p.parse("DANIEL 1:1").osis()).toEqual("Dan.1.1")
expect(p.parse("DANIËL 1:1").osis()).toEqual("Dan.1.1")
expect(p.parse("DAN 1:1").osis()).toEqual("Dan.1.1")
`
true
describe "Localized book PI:NAME:<NAME>END_PIos (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: PI:NAME:<NAME>END_PIos (af)", ->
`
expect(p.parse("Hosea 1:1").osis()).toEqual("Hos.1.1")
expect(p.parse("Hos 1:1").osis()).toEqual("Hos.1.1")
p.include_apocrypha(false)
expect(p.parse("HOSEA 1:1").osis()).toEqual("Hos.1.1")
expect(p.parse("HOS 1:1").osis()).toEqual("Hos.1.1")
`
true
describe "Localized book PI:NAME:<NAME>END_PI (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Joel (af)", ->
`
expect(p.parse("Joel 1:1").osis()).toEqual("Joel.1.1")
expect(p.parse("Joël 1:1").osis()).toEqual("Joel.1.1")
p.include_apocrypha(false)
expect(p.parse("JOEL 1:1").osis()).toEqual("Joel.1.1")
expect(p.parse("JOËL 1:1").osis()).toEqual("Joel.1.1")
`
true
describe "Localized book Amos (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Amos (af)", ->
`
expect(p.parse("Amos 1:1").osis()).toEqual("Amos.1.1")
expect(p.parse("Am 1:1").osis()).toEqual("Amos.1.1")
p.include_apocrypha(false)
expect(p.parse("AMOS 1:1").osis()).toEqual("Amos.1.1")
expect(p.parse("AM 1:1").osis()).toEqual("Amos.1.1")
`
true
describe "Localized book Obad (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Obad (af)", ->
`
expect(p.parse("Obadja 1:1").osis()).toEqual("Obad.1.1")
expect(p.parse("Obad 1:1").osis()).toEqual("Obad.1.1")
expect(p.parse("Ob 1:1").osis()).toEqual("Obad.1.1")
p.include_apocrypha(false)
expect(p.parse("OBADJA 1:1").osis()).toEqual("Obad.1.1")
expect(p.parse("OBAD 1:1").osis()).toEqual("Obad.1.1")
expect(p.parse("OB 1:1").osis()).toEqual("Obad.1.1")
`
true
describe "Localized book PI:NAME:<NAME>END_PIah (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Jonah (af)", ->
`
expect(p.parse("Jonah 1:1").osis()).toEqual("Jonah.1.1")
expect(p.parse("Jona 1:1").osis()).toEqual("Jonah.1.1")
p.include_apocrypha(false)
expect(p.parse("JONAH 1:1").osis()).toEqual("Jonah.1.1")
expect(p.parse("JONA 1:1").osis()).toEqual("Jonah.1.1")
`
true
describe "Localized book PI:NAME:<NAME>END_PIic (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Mic (af)", ->
`
expect(p.parse("Miga 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("Mic 1:1").osis()).toEqual("Mic.1.1")
p.include_apocrypha(false)
expect(p.parse("MIGA 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("MIC 1:1").osis()).toEqual("Mic.1.1")
`
true
describe "Localized book Nah (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Nah (af)", ->
`
expect(p.parse("Nahum 1:1").osis()).toEqual("Nah.1.1")
expect(p.parse("Nah 1:1").osis()).toEqual("Nah.1.1")
p.include_apocrypha(false)
expect(p.parse("NAHUM 1:1").osis()).toEqual("Nah.1.1")
expect(p.parse("NAH 1:1").osis()).toEqual("Nah.1.1")
`
true
describe "Localized book Hab (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Hab (af)", ->
`
expect(p.parse("Habakuk 1:1").osis()).toEqual("Hab.1.1")
expect(p.parse("Hab 1:1").osis()).toEqual("Hab.1.1")
p.include_apocrypha(false)
expect(p.parse("HABAKUK 1:1").osis()).toEqual("Hab.1.1")
expect(p.parse("HAB 1:1").osis()).toEqual("Hab.1.1")
`
true
describe "Localized book Zeph (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Zeph (af)", ->
`
expect(p.parse("Sefanja 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("Zeph 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("Sef 1:1").osis()).toEqual("Zeph.1.1")
p.include_apocrypha(false)
expect(p.parse("SEFANJA 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("ZEPH 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("SEF 1:1").osis()).toEqual("Zeph.1.1")
`
true
describe "Localized book Hag (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Hag (af)", ->
`
expect(p.parse("Haggai 1:1").osis()).toEqual("Hag.1.1")
expect(p.parse("Hag 1:1").osis()).toEqual("Hag.1.1")
p.include_apocrypha(false)
expect(p.parse("HAGGAI 1:1").osis()).toEqual("Hag.1.1")
expect(p.parse("HAG 1:1").osis()).toEqual("Hag.1.1")
`
true
describe "Localized book Zech (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Zech (af)", ->
`
expect(p.parse("Sagaria 1:1").osis()).toEqual("Zech.1.1")
expect(p.parse("Zech 1:1").osis()).toEqual("Zech.1.1")
expect(p.parse("Sag 1:1").osis()).toEqual("Zech.1.1")
p.include_apocrypha(false)
expect(p.parse("SAGARIA 1:1").osis()).toEqual("Zech.1.1")
expect(p.parse("ZECH 1:1").osis()).toEqual("Zech.1.1")
expect(p.parse("SAG 1:1").osis()).toEqual("Zech.1.1")
`
true
describe "Localized book Mal (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Mal (af)", ->
`
expect(p.parse("Maleagi 1:1").osis()).toEqual("Mal.1.1")
expect(p.parse("Mal 1:1").osis()).toEqual("Mal.1.1")
p.include_apocrypha(false)
expect(p.parse("MALEAGI 1:1").osis()).toEqual("Mal.1.1")
expect(p.parse("MAL 1:1").osis()).toEqual("Mal.1.1")
`
true
describe "Localized book PI:NAME:<NAME>END_PI PI:NAME:<NAME>END_PI (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: PI:NAME:<NAME>END_PI (af)", ->
`
expect(p.parse("Mattheus 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("Mattheüs 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("Matthéus 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("Matthéüs 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("Matteus 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("Matt 1:1").osis()).toEqual("Matt.1.1")
p.include_apocrypha(false)
expect(p.parse("MATTHEUS 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("MATTHEÜS 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("MATTHÉUS 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("MATTHÉÜS 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("MATTEUS 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("MATT 1:1").osis()).toEqual("Matt.1.1")
`
true
describe "Localized book Mark (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Mark (af)", ->
`
expect(p.parse("Markus 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("Mark 1:1").osis()).toEqual("Mark.1.1")
p.include_apocrypha(false)
expect(p.parse("MARKUS 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("MARK 1:1").osis()).toEqual("Mark.1.1")
`
true
describe "Localized book PI:NAME:<NAME>END_PI (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: PI:NAME:<NAME>END_PI (af)", ->
`
expect(p.parse("Lukas 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("Luke 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("Luk 1:1").osis()).toEqual("Luke.1.1")
p.include_apocrypha(false)
expect(p.parse("LUKAS 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("LUKE 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("LUK 1:1").osis()).toEqual("Luke.1.1")
`
true
describe "Localized book 1John (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1John (af)", ->
`
expect(p.parse("1. Johannes 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("I. Johannes 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 Johannes 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("I Johannes 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1. Joh 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("I. Joh 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 Joh 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1John 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("I Joh 1:1").osis()).toEqual("1John.1.1")
p.include_apocrypha(false)
expect(p.parse("1. JOHANNES 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("I. JOHANNES 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 JOHANNES 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("I JOHANNES 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1. JOH 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("I. JOH 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 JOH 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1JOHN 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("I JOH 1:1").osis()).toEqual("1John.1.1")
`
true
describe "Localized book 2John (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2John (af)", ->
`
expect(p.parse("II. Johannes 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2. Johannes 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("II Johannes 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 Johannes 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("II. Joh 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2. Joh 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("II Joh 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 Joh 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2John 1:1").osis()).toEqual("2John.1.1")
p.include_apocrypha(false)
expect(p.parse("II. JOHANNES 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2. JOHANNES 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("II JOHANNES 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 JOHANNES 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("II. JOH 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2. JOH 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("II JOH 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 JOH 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2JOHN 1:1").osis()).toEqual("2John.1.1")
`
true
describe "Localized book 3John (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 3John (af)", ->
`
expect(p.parse("III. Johannes 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("III Johannes 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3. Johannes 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 Johannes 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("III. Joh 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("III Joh 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3. Joh 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 Joh 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3John 1:1").osis()).toEqual("3John.1.1")
p.include_apocrypha(false)
expect(p.parse("III. JOHANNES 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("III JOHANNES 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3. JOHANNES 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 JOHANNES 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("III. JOH 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("III JOH 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3. JOH 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 JOH 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3JOHN 1:1").osis()).toEqual("3John.1.1")
`
true
describe "Localized book PI:NAME:<NAME>END_PI (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: PI:NAME:<NAME>END_PI (af)", ->
`
expect(p.parse("Johannes 1:1").osis()).toEqual("John.1.1")
expect(p.parse("John 1:1").osis()).toEqual("John.1.1")
expect(p.parse("Joh 1:1").osis()).toEqual("John.1.1")
p.include_apocrypha(false)
expect(p.parse("JOHANNES 1:1").osis()).toEqual("John.1.1")
expect(p.parse("JOHN 1:1").osis()).toEqual("John.1.1")
expect(p.parse("JOH 1:1").osis()).toEqual("John.1.1")
`
true
describe "Localized book Acts (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Acts (af)", ->
`
expect(p.parse("Handelinge 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("Acts 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("Hand 1:1").osis()).toEqual("Acts.1.1")
p.include_apocrypha(false)
expect(p.parse("HANDELINGE 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("ACTS 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("HAND 1:1").osis()).toEqual("Acts.1.1")
`
true
describe "Localized book Rom (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Rom (af)", ->
`
expect(p.parse("Romeine 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("Rom 1:1").osis()).toEqual("Rom.1.1")
p.include_apocrypha(false)
expect(p.parse("ROMEINE 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROM 1:1").osis()).toEqual("Rom.1.1")
`
true
describe "Localized book 2Cor (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Cor (af)", ->
`
expect(p.parse("II. Korinthiers 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. Korinthiërs 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. Korinthiers 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. Korinthiërs 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II Korinthiers 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II Korinthiërs 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. Korintiers 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. Korintiërs 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 Korinthiers 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 Korinthiërs 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. Korintiers 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. Korintiërs 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II Korintiers 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II Korintiërs 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 Korintiers 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 Korintiërs 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. Kor 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. Kor 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II Kor 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 Kor 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2Cor 1:1").osis()).toEqual("2Cor.1.1")
p.include_apocrypha(false)
expect(p.parse("II. KORINTHIERS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. KORINTHIËRS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. KORINTHIERS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. KORINTHIËRS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II KORINTHIERS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II KORINTHIËRS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. KORINTIERS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. KORINTIËRS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KORINTHIERS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KORINTHIËRS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. KORINTIERS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. KORINTIËRS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II KORINTIERS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II KORINTIËRS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KORINTIERS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KORINTIËRS 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II. KOR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2. KOR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("II KOR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KOR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2COR 1:1").osis()).toEqual("2Cor.1.1")
`
true
describe "Localized book 1Cor (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Cor (af)", ->
`
expect(p.parse("1. Korinthiers 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. Korinthiërs 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. Korinthiers 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. Korinthiërs 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 Korinthiers 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 Korinthiërs 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. Korintiers 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. Korintiërs 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I Korinthiers 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I Korinthiërs 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. Korintiers 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. Korintiërs 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 Korintiers 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 Korintiërs 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I Korintiers 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I Korintiërs 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. Kor 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. Kor 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 Kor 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I Kor 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1Cor 1:1").osis()).toEqual("1Cor.1.1")
p.include_apocrypha(false)
expect(p.parse("1. KORINTHIERS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. KORINTHIËRS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. KORINTHIERS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. KORINTHIËRS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KORINTHIERS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KORINTHIËRS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. KORINTIERS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. KORINTIËRS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I KORINTHIERS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I KORINTHIËRS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. KORINTIERS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. KORINTIËRS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KORINTIERS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KORINTIËRS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I KORINTIERS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I KORINTIËRS 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1. KOR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I. KOR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KOR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("I KOR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1COR 1:1").osis()).toEqual("1Cor.1.1")
`
true
describe "Localized book Gal (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Gal (af)", ->
`
expect(p.parse("Galasiers 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("Galasiërs 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("Galásiers 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("Galásiërs 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("Gal 1:1").osis()).toEqual("Gal.1.1")
p.include_apocrypha(false)
expect(p.parse("GALASIERS 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALASIËRS 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALÁSIERS 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALÁSIËRS 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GAL 1:1").osis()).toEqual("Gal.1.1")
`
true
describe "Localized book Eph (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Eph (af)", ->
`
expect(p.parse("Efesiers 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("Efesiërs 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("Efésiers 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("Efésiërs 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("Eph 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("Ef 1:1").osis()).toEqual("Eph.1.1")
p.include_apocrypha(false)
expect(p.parse("EFESIERS 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EFESIËRS 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EFÉSIERS 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EFÉSIËRS 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EPH 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EF 1:1").osis()).toEqual("Eph.1.1")
`
true
describe "Localized book PI:NAME:<NAME>END_PI (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: PI:NAME:<NAME>END_PI (af)", ->
`
expect(p.parse("Filippense 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("Phil 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("Fil 1:1").osis()).toEqual("Phil.1.1")
p.include_apocrypha(false)
expect(p.parse("FILIPPENSE 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("PHIL 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("FIL 1:1").osis()).toEqual("Phil.1.1")
`
true
describe "Localized book Col (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Col (af)", ->
`
expect(p.parse("Kolossense 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("Col 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("Kol 1:1").osis()).toEqual("Col.1.1")
p.include_apocrypha(false)
expect(p.parse("KOLOSSENSE 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("COL 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KOL 1:1").osis()).toEqual("Col.1.1")
`
true
describe "Localized book 2Thess (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Thess (af)", ->
`
expect(p.parse("II. Tessalonisense 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. Tessalonisense 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II Tessalonisense 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II. Tessaonicense 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 Tessalonisense 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. Tessaonicense 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II Tessaonicense 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 Tessaonicense 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II. Thess 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. Thess 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II Thess 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II. Tess 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 Thess 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. Tess 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II Tess 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 Tess 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2Thess 1:1").osis()).toEqual("2Thess.1.1")
p.include_apocrypha(false)
expect(p.parse("II. TESSALONISENSE 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. TESSALONISENSE 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II TESSALONISENSE 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II. TESSAONICENSE 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 TESSALONISENSE 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. TESSAONICENSE 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II TESSAONICENSE 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 TESSAONICENSE 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II. THESS 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. THESS 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II THESS 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II. TESS 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 THESS 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2. TESS 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("II TESS 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 TESS 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2THESS 1:1").osis()).toEqual("2Thess.1.1")
`
true
describe "Localized book 1Thess (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Thess (af)", ->
`
expect(p.parse("1. Tessalonisense 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I. Tessalonisense 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 Tessalonisense 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. Tessaonicense 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I Tessalonisense 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I. Tessaonicense 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 Tessaonicense 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I Tessaonicense 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. Thess 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I. Thess 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 Thess 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. Tess 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I Thess 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I. Tess 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 Tess 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1Thess 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I Tess 1:1").osis()).toEqual("1Thess.1.1")
p.include_apocrypha(false)
expect(p.parse("1. TESSALONISENSE 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I. TESSALONISENSE 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 TESSALONISENSE 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. TESSAONICENSE 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I TESSALONISENSE 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I. TESSAONICENSE 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 TESSAONICENSE 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I TESSAONICENSE 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. THESS 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I. THESS 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 THESS 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1. TESS 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I THESS 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I. TESS 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 TESS 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1THESS 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("I TESS 1:1").osis()).toEqual("1Thess.1.1")
`
true
describe "Localized book 2Tim (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Tim (af)", ->
`
expect(p.parse("II. Timoteus 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. Timoteus 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("II Timoteus 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 Timoteus 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("II. Tim 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. Tim 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("II Tim 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 Tim 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2Tim 1:1").osis()).toEqual("2Tim.1.1")
p.include_apocrypha(false)
expect(p.parse("II. TIMOTEUS 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. TIMOTEUS 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("II TIMOTEUS 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 TIMOTEUS 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("II. TIM 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2. TIM 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("II TIM 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 TIM 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2TIM 1:1").osis()).toEqual("2Tim.1.1")
`
true
describe "Localized book 1Tim (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Tim (af)", ->
`
expect(p.parse("1. Timoteus 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("I. Timoteus 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 Timoteus 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("I Timoteus 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. Tim 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("I. Tim 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 Tim 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("I Tim 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1Tim 1:1").osis()).toEqual("1Tim.1.1")
p.include_apocrypha(false)
expect(p.parse("1. TIMOTEUS 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("I. TIMOTEUS 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 TIMOTEUS 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("I TIMOTEUS 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1. TIM 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("I. TIM 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 TIM 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("I TIM 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1TIM 1:1").osis()).toEqual("1Tim.1.1")
`
true
describe "Localized book Titus (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Titus (af)", ->
`
expect(p.parse("Titus 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("Tit 1:1").osis()).toEqual("Titus.1.1")
p.include_apocrypha(false)
expect(p.parse("TITUS 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("TIT 1:1").osis()).toEqual("Titus.1.1")
`
true
describe "Localized book Phlm (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Phlm (af)", ->
`
expect(p.parse("Filemon 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("Filem 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("Phlm 1:1").osis()).toEqual("Phlm.1.1")
p.include_apocrypha(false)
expect(p.parse("FILEMON 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("FILEM 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("PHLM 1:1").osis()).toEqual("Phlm.1.1")
`
true
describe "Localized book Heb (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Heb (af)", ->
`
expect(p.parse("Hebreers 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("Hebreërs 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("Heb 1:1").osis()).toEqual("Heb.1.1")
p.include_apocrypha(false)
expect(p.parse("HEBREERS 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("HEBREËRS 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("HEB 1:1").osis()).toEqual("Heb.1.1")
`
true
describe "Localized book Jas (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Jas (af)", ->
`
expect(p.parse("Jakobus 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("Jak 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("Jas 1:1").osis()).toEqual("Jas.1.1")
p.include_apocrypha(false)
expect(p.parse("JAKOBUS 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("JAK 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("JAS 1:1").osis()).toEqual("Jas.1.1")
`
true
describe "Localized book 2Pet (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Pet (af)", ->
`
expect(p.parse("II. Petrus 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2. Petrus 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("II Petrus 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 Petrus 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("II. Pet 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2. Pet 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("II Pet 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 Pet 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2Pet 1:1").osis()).toEqual("2Pet.1.1")
p.include_apocrypha(false)
expect(p.parse("II. PETRUS 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2. PETRUS 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("II PETRUS 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 PETRUS 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("II. PET 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2. PET 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("II PET 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 PET 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2PET 1:1").osis()).toEqual("2Pet.1.1")
`
true
describe "Localized book 1Pet (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Pet (af)", ->
`
expect(p.parse("1. Petrus 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("I. Petrus 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 Petrus 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("I Petrus 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1. Pet 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("I. Pet 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 Pet 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("I Pet 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1Pet 1:1").osis()).toEqual("1Pet.1.1")
p.include_apocrypha(false)
expect(p.parse("1. PETRUS 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("I. PETRUS 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 PETRUS 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("I PETRUS 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1. PET 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("I. PET 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 PET 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("I PET 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1PET 1:1").osis()).toEqual("1Pet.1.1")
`
true
describe "Localized book Jude (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Jude (af)", ->
`
expect(p.parse("Judas 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("Jude 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("Jud 1:1").osis()).toEqual("Jude.1.1")
p.include_apocrypha(false)
expect(p.parse("JUDAS 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("JUDE 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("JUD 1:1").osis()).toEqual("Jude.1.1")
`
true
describe "Localized book Tob (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Tob (af)", ->
`
expect(p.parse("Tob 1:1").osis()).toEqual("Tob.1.1")
`
true
describe "Localized book Jdt (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Jdt (af)", ->
`
expect(p.parse("Jdt 1:1").osis()).toEqual("Jdt.1.1")
`
true
describe "Localized book Bar (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Bar (af)", ->
`
expect(p.parse("Bar 1:1").osis()).toEqual("Bar.1.1")
`
true
describe "Localized book Sus (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Sus (af)", ->
`
expect(p.parse("Sus 1:1").osis()).toEqual("Sus.1.1")
`
true
describe "Localized book 2Macc (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Macc (af)", ->
`
expect(p.parse("2Macc 1:1").osis()).toEqual("2Macc.1.1")
`
true
describe "Localized book 3Macc (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 3Macc (af)", ->
`
expect(p.parse("3Macc 1:1").osis()).toEqual("3Macc.1.1")
`
true
describe "Localized book 4Macc (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 4Macc (af)", ->
`
expect(p.parse("4Macc 1:1").osis()).toEqual("4Macc.1.1")
`
true
describe "Localized book 1Macc (af)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Macc (af)", ->
`
expect(p.parse("1Macc 1:1").osis()).toEqual("1Macc.1.1")
`
true
describe "Miscellaneous tests", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore", book_sequence_strategy: "ignore", osis_compaction_strategy: "bc", captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should return the expected language", ->
expect(p.languages).toEqual ["af"]
it "should handle ranges (af)", ->
expect(p.parse("Titus 1:1 tot 2").osis()).toEqual "Titus.1.1-Titus.1.2"
expect(p.parse("Matt 1tot2").osis()).toEqual "Matt.1-Matt.2"
expect(p.parse("Phlm 2 TOT 3").osis()).toEqual "Phlm.1.2-Phlm.1.3"
it "should handle chapters (af)", ->
expect(p.parse("Titus 1:1, hoofstuk 2").osis()).toEqual "Titus.1.1,Titus.2"
expect(p.parse("Matt 3:4 HOOFSTUK 6").osis()).toEqual "Matt.3.4,Matt.6"
expect(p.parse("Titus 1:1, hfst. 2").osis()).toEqual "Titus.1.1,Titus.2"
expect(p.parse("Matt 3:4 HFST. 6").osis()).toEqual "Matt.3.4,Matt.6"
expect(p.parse("Titus 1:1, hfst 2").osis()).toEqual "Titus.1.1,Titus.2"
expect(p.parse("Matt 3:4 HFST 6").osis()).toEqual "Matt.3.4,Matt.6"
it "should handle verses (af)", ->
expect(p.parse("Exod 1:1 vers 3").osis()).toEqual "Exod.1.1,Exod.1.3"
expect(p.parse("Phlm VERS 6").osis()).toEqual "Phlm.1.6"
expect(p.parse("Exod 1:1 Bybelvers 3").osis()).toEqual "Exod.1.1,Exod.1.3"
expect(p.parse("Phlm BYBELVERS 6").osis()).toEqual "Phlm.1.6"
it "should handle 'and' (af)", ->
expect(p.parse("Exod 1:1 en 3").osis()).toEqual "Exod.1.1,Exod.1.3"
expect(p.parse("Phlm 2 EN 6").osis()).toEqual "Phlm.1.2,Phlm.1.6"
expect(p.parse("Exod 1:1 asook 3").osis()).toEqual "Exod.1.1,Exod.1.3"
expect(p.parse("Phlm 2 ASOOK 6").osis()).toEqual "Phlm.1.2,Phlm.1.6"
it "should handle titles (af)", ->
expect(p.parse("Ps 3 title, 4:2, 5:title").osis()).toEqual "Ps.3.1,Ps.4.2,Ps.5.1"
expect(p.parse("PS 3 TITLE, 4:2, 5:TITLE").osis()).toEqual "Ps.3.1,Ps.4.2,Ps.5.1"
it "should handle 'ff' (af)", ->
expect(p.parse("Rev 3ff., 4:2ff.").osis()).toEqual "Rev.3-Rev.22,Rev.4.2-Rev.4.11"
expect(p.parse("REV 3 FF., 4:2 FF.").osis()).toEqual "Rev.3-Rev.22,Rev.4.2-Rev.4.11"
expect(p.parse("Rev 3ff, 4:2ff").osis()).toEqual "Rev.3-Rev.22,Rev.4.2-Rev.4.11"
expect(p.parse("REV 3 FF, 4:2 FF").osis()).toEqual "Rev.3-Rev.22,Rev.4.2-Rev.4.11"
it "should handle translations (af)", ->
expect(p.parse("Lev 1 (AFR53)").osis_and_translations()).toEqual [["Lev.1", "AFR53"]]
expect(p.parse("lev 1 afr53").osis_and_translations()).toEqual [["Lev.1", "AFR53"]]
expect(p.parse("Lev 1 (AFR83)").osis_and_translations()).toEqual [["Lev.1", "AFR83"]]
expect(p.parse("lev 1 afr83").osis_and_translations()).toEqual [["Lev.1", "AFR83"]]
it "should handle book ranges (af)", ->
p.set_options {book_alone_strategy: "full", book_range_strategy: "include"}
expect(p.parse("1 tot 3 Joh").osis()).toEqual "1John.1-3John.1"
it "should handle boundaries (af)", ->
p.set_options {book_alone_strategy: "full"}
expect(p.parse("\u2014Matt\u2014").osis()).toEqual "Matt.1-Matt.28"
expect(p.parse("\u201cMatt 1:1\u201d").osis()).toEqual "Matt.1.1"
|
[
{
"context": "r.forms['default'];\n # Fixes: https://github.com/kelp404/angular-form-builder/blob/master/example/demo.js\n",
"end": 452,
"score": 0.9996596574783325,
"start": 445,
"tag": "USERNAME",
"value": "kelp404"
},
{
"context": "ob/master/example/demo.js\n \n $scope.greeting... | coffee/controller/InitController.coffee | MichaelBakker1986/framings | 0 | ###
InitController
//bliep
Initializer for the page.
###
app.controller 'InitController', ['$scope', '$builder', 'UserService', 'FormService', 'ComponentService', ($scope, $builder, $user, $form, $component) ->
$scope.path = 'ROOT'
$scope.greeting = '<b>Hello</b>';
for component of components
$component.register("scorecard", components[component]);
$scope.form = $builder.forms['default'];
# Fixes: https://github.com/kelp404/angular-form-builder/blob/master/example/demo.js
$scope.greeting = 'Hey Finan'
$scope.click = ->
console.log $form.get($scope.path)
return
$scope.goLevelUp = ->
alert "Gedrukt"
return
return
] | 119209 | ###
InitController
//bliep
Initializer for the page.
###
app.controller 'InitController', ['$scope', '$builder', 'UserService', 'FormService', 'ComponentService', ($scope, $builder, $user, $form, $component) ->
$scope.path = 'ROOT'
$scope.greeting = '<b>Hello</b>';
for component of components
$component.register("scorecard", components[component]);
$scope.form = $builder.forms['default'];
# Fixes: https://github.com/kelp404/angular-form-builder/blob/master/example/demo.js
$scope.greeting = '<NAME>'
$scope.click = ->
console.log $form.get($scope.path)
return
$scope.goLevelUp = ->
alert "Gedrukt"
return
return
] | true | ###
InitController
//bliep
Initializer for the page.
###
app.controller 'InitController', ['$scope', '$builder', 'UserService', 'FormService', 'ComponentService', ($scope, $builder, $user, $form, $component) ->
$scope.path = 'ROOT'
$scope.greeting = '<b>Hello</b>';
for component of components
$component.register("scorecard", components[component]);
$scope.form = $builder.forms['default'];
# Fixes: https://github.com/kelp404/angular-form-builder/blob/master/example/demo.js
$scope.greeting = 'PI:NAME:<NAME>END_PI'
$scope.click = ->
console.log $form.get($scope.path)
return
$scope.goLevelUp = ->
alert "Gedrukt"
return
return
] |
[
{
"context": "###\n Copyright (c) 2014, Andrew Cantino\n Copyright (c) 2009, Andrew Cantino & Kyle Maxwe",
"end": 40,
"score": 0.9998607039451599,
"start": 26,
"tag": "NAME",
"value": "Andrew Cantino"
},
{
"context": "ght (c) 2014, Andrew Cantino\n Copyright (c) 2009, Andrew Canti... | lib/js/jquery.json-editor.js.coffee | cantino/jsoneditor | 14 | ###
Copyright (c) 2014, Andrew Cantino
Copyright (c) 2009, Andrew Cantino & Kyle Maxwell
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
You will probably need to tell the editor where to find its 'add' and 'delete' images. In your
code, before you make the editor, do something like this:
JSONEditor.prototype.ADD_IMG = '/javascripts/jsoneditor/add.png';
JSONEditor.prototype.DELETE_IMG = '/javascripts/jsoneditor/delete.png';
You can enable or disable visual truncation in the structure editor with the following:
myEditor.doTruncation(false);
myEditor.doTruncation(true); // The default
You can show a 'w'ipe button that does a more aggressive delete by calling showWipe(true|false) or by passing in 'showWipe: true'.
###
class window.JSONEditor
constructor: (wrapped, options = {}) ->
@builderShowing = true
@ADD_IMG ||= options.ADD_IMG || 'lib/images/add.png'
@DELETE_IMG ||= options.DELETE_IMG || 'lib/images/delete.png'
@functionButtonsEnabled = false
@_doTruncation = true
@_showWipe = options.showWipe
@history = []
@historyPointer = -1
throw("Must provide an element to wrap.") if wrapped == null || (wrapped.get && wrapped.get(0) == null)
@wrapped = $(wrapped)
@wrapped.wrap('<div class="json-editor"></div>')
@container = $(@wrapped.parent())
@wrapped.hide()
@container.css("position", "relative")
@doAutoFocus = false
@editingUnfocused()
@rebuild()
braceUI: (key, struct) ->
$('<a class="icon" href="#"><strong>{</strong></a>').click (e) =>
e.preventDefault()
struct[key] = { "??": struct[key] }
@doAutoFocus = true
@rebuild()
bracketUI: (key, struct) ->
$('<a class="icon" href="#"><strong>[</a>').click (e) =>
e.preventDefault()
struct[key] = [ struct[key] ]
@doAutoFocus = true
@rebuild()
deleteUI: (key, struct, fullDelete) ->
$("<a class='icon' href='#' title='delete'><img src='#{@DELETE_IMG}' border=0 /></a>").click (e) =>
e.preventDefault()
if !fullDelete
didSomething = false
if struct[key] instanceof Array
if struct[key].length > 0
struct[key] = struct[key][0]
didSomething = true
else if struct[key] instanceof Object
for subkey, subval of struct[key]
struct[key] = struct[key][subkey]
didSomething = true
break
if didSomething
@rebuild()
return
if struct instanceof Array
struct.splice(key, 1)
else
delete struct[key]
@rebuild()
wipeUI: (key, struct) ->
$('<a class="icon" href="#" title="wipe"><strong>W</strong></a>').click (e) =>
e.preventDefault()
if struct instanceof Array
struct.splice(key, 1)
else
delete struct[key]
@rebuild()
addUI: (struct) ->
$("<a class='icon' href='#' title='add'><img src='#{@ADD_IMG}' border=0/></a>").click (e) =>
e.preventDefault()
if struct instanceof Array
struct.push('??')
else
struct['??'] = '??'
@doAutoFocus = true
@rebuild()
undo: ->
if @saveStateIfTextChanged()
@historyPointer -= 1 if @historyPointer > 0
@restore()
redo: ->
if @historyPointer + 1 < @history.length
if @saveStateIfTextChanged()
@historyPointer += 1
@restore()
showBuilder: ->
if @checkJsonInText()
@setJsonFromText()
@rebuild()
@wrapped.hide()
@builder.show()
true
else
alert "Sorry, there appears to be an error in your JSON input. Please fix it before continuing."
false
showText: ->
@builder.hide()
@wrapped.show()
toggleBuilder: ->
if @builderShowing
@showText()
@builderShowing = !@builderShowing
else
if @showBuilder()
@builderShowing = !@builderShowing
showFunctionButtons: (insider) ->
@functionButtonsEnabled = true unless insider
if @functionButtonsEnabled && !@functionButtons
@functionButtons = $('<div class="function_buttons"></div>')
@functionButtons.append $('<a href="#" style="padding-right: 10px;">Undo</a>').click (e) =>
e.preventDefault()
@undo()
@functionButtons.append $('<a href="#" style="padding-right: 10px;">Redo</a>').click (e) =>
e.preventDefault()
@redo()
@functionButtons.append $('<a id="toggle_view" href="#" style="padding-right: 10px; float: right;">Toggle View</a>').click (e) =>
e.preventDefault()
@toggleBuilder()
@container.prepend(@functionButtons)
saveStateIfTextChanged: ->
if JSON.stringify(@json, null, 2) != @wrapped.get(0).value
if @checkJsonInText()
@saveState(true)
else
if confirm("The current JSON is malformed. If you continue, the current JSON will not be saved. Do you wish to continue?")
@historyPointer += 1
true
else
false
true
restore: ->
if @history[@historyPointer]
@wrapped.get(0).value = @history[@historyPointer]
@rebuild(true)
saveState: (skipStoreText) ->
if @json
@storeToText() unless skipStoreText
text = @wrapped.get(0).value
if @history[@historyPointer] != text
@historyTruncate()
@history.push(text)
@historyPointer += 1
fireChange: ->
$(@wrapped).trigger 'change'
historyTruncate: ->
if @historyPointer + 1 < @history.length
@history.splice(@historyPointer + 1, @history.length - @historyPointer)
storeToText: ->
@wrapped.get(0).value = JSON.stringify(@json, null, 2)
getJSONText: ->
@rebuild()
@wrapped.get(0).value
getJSON: ->
@rebuild()
@json
rebuild: (doNotRefreshText) ->
@setJsonFromText() unless @json
changed = @haveThingsChanged()
if @json && !doNotRefreshText
@saveState()
@cleanBuilder()
@setJsonFromText()
@alreadyFocused = false
elem = @build(@json, @builder, null, null, @json)
@recoverScrollPosition()
# Auto-focus to edit '??' keys and values.
if elem && elem.text() == '??' && !@alreadyFocused && @doAutoFocus
@alreadyFocused = true
@doAutoFocus = false
elem = elem.find('.editable')
elem.click()
elem.find('input').focus().select()
# still missing a proper scrolling into the selected input
@fireChange() if changed
haveThingsChanged: ->
@json && JSON.stringify(@json, null, 2) != @wrapped.get(0).value
saveScrollPosition: ->
@oldScrollHeight = @builder.scrollTop()
recoverScrollPosition: ->
@builder.scrollTop @oldScrollHeight
setJsonFromText: ->
@wrapped.get(0).value = "{}" if @wrapped.get(0).value.length == 0
try
@wrapped.get(0).value = @wrapped.get(0).value.replace(/((^|[^\\])(\\\\)*)\\n/g, '$1\\\\n').replace(/((^|[^\\])(\\\\)*)\\t/g, '$1\\\\t')
@json = JSON.parse(@wrapped.get(0).value)
catch e
alert "Got bad JSON from text."
checkJsonInText: ->
try
JSON.parse @wrapped.get(0).value
true
catch e
false
logJSON: ->
console.log(JSON.stringify(@json, null, 2))
cleanBuilder: ->
unless @builder
@builder = $('<div class="builder"></div>')
@container.append(@builder)
@saveScrollPosition()
@builder.text('')
@showFunctionButtons("defined")
updateStruct: (struct, key, val, kind, selectionStart, selectionEnd) ->
if kind == 'key'
if selectionStart && selectionEnd
val = key.substring(0, selectionStart) + val + key.substring(selectionEnd, key.length)
struct[val] = struct[key]
# order keys
orderrest = 0
$.each struct, (index, value) ->
# re-set rest of the keys
if orderrest & index != val
tempval = struct[index]
delete struct[index]
struct[index] = tempval
if key == index
orderrest = 1
# end of order keys
delete struct[key] if key != val
else
if selectionStart && selectionEnd
val = struct[key].substring(0, selectionStart) + val + struct[key].substring(selectionEnd, struct[key].length)
struct[key] = val
getValFromStruct: (struct, key, kind) ->
if kind == 'key'
key
else
struct[key]
doTruncation: (trueOrFalse) ->
if @_doTruncation != trueOrFalse
@_doTruncation = trueOrFalse
@rebuild()
showWipe: (trueOrFalse) ->
if @_showWipe != trueOrFalse
@_showWipe = trueOrFalse
@rebuild()
truncate: (text, length) ->
return '-empty-' if text.length == 0
if @_doTruncation && text.length > (length || 30)
return text.substring(0, (length || 30)) + '...'
text
replaceLastSelectedFieldIfRecent: (text) ->
if @lastEditingUnfocusedTime > (new Date()).getTime() - 200 # Short delay for unfocus to occur.
@setLastEditingFocus(text)
@rebuild()
editingUnfocused: (elem, struct, key, root, kind) ->
selectionStart = elem?.selectionStart
selectionEnd = elem?.selectionEnd
@setLastEditingFocus = (text) =>
@updateStruct(struct, key, text, kind, selectionStart, selectionEnd)
@json = root # Because self.json is a new reference due to rebuild.
@lastEditingUnfocusedTime = (new Date()).getTime()
edit: ($elem, key, struct, root, kind) ->
form = $("<form></form>").css('display', 'inline')
$input = $("<input />")
$input.val @getValFromStruct(struct, key, kind)
$input.addClass 'edit_field'
blurHandler = =>
val = $input.val()
@updateStruct(struct, key, val, kind)
@editingUnfocused($elem, struct, (kind == 'key' ? val : key), root, kind)
$elem.text(@truncate(val))
$elem.get(0).editing = false
@rebuild() if key != val
$input.blur blurHandler
$input.keydown (e) =>
if e.keyCode == 9 || e.keyCode == 13 # Tab and enter
@doAutoFocus = true
blurHandler()
$(form).append($input).submit (e) =>
e.preventDefault()
@doAutoFocus = true
blurHandler()
$elem.html(form)
$input.focus()
editable: (text, key, struct, root, kind) ->
self = this;
elem = $('<span class="editable" href="#"></span>').text(@truncate(text)).click( (e) ->
unless @editing
@editing = true;
self.edit($(this), key, struct, root, kind)
true
)
elem
build: (json, node, parent, key, root) ->
elem = null
if json instanceof Array
bq = $(document.createElement("BLOCKQUOTE"))
bq.append($('<div class="brackets">[</div>'))
bq.prepend(@addUI(json))
if parent
bq.prepend(@wipeUI(key, parent)) if @_showWipe
bq.prepend(@deleteUI(key, parent))
for i in [0...json.length]
innerbq = $(document.createElement("BLOCKQUOTE"))
newElem = @build(json[i], innerbq, json, i, root)
elem = newElem if newElem && newElem.text() == "??"
bq.append(innerbq)
bq.append($('<div class="brackets">]</div>'))
node.append(bq)
else if json instanceof Object
bq = $(document.createElement("BLOCKQUOTE"))
bq.append($('<div class="bracers">{</div>'))
for jsonkey, jsonvalue of json
innerbq = $(document.createElement("BLOCKQUOTE"))
newElem = @editable(jsonkey.toString(), jsonkey.toString(), json, root, 'key').wrap('<span class="key"></b>').parent()
innerbq.append(newElem)
elem = newElem if newElem && newElem.text() == "??"
if typeof jsonvalue != 'string'
innerbq.prepend(@braceUI(jsonkey, json))
innerbq.prepend(@bracketUI(jsonkey, json))
innerbq.prepend(@wipeUI(jsonkey, json)) if @_showWipe
innerbq.prepend(@deleteUI(jsonkey, json, true))
innerbq.append($('<span class="colon">: </span>'))
newElem = @build(jsonvalue, innerbq, json, jsonkey, root)
elem = newElem if !elem && newElem && newElem.text() == "??"
bq.append(innerbq)
bq.prepend(@addUI(json))
if parent
bq.prepend(@wipeUI(key, parent)) if @_showWipe
bq.prepend(@deleteUI(key, parent))
bq.append($('<div class="bracers">}</div>'))
node.append(bq)
else
if json == null
json = ''
elem = @editable(json.toString(), key, parent, root, 'value').wrap('<span class="val"></span>').parent();
node.append(elem)
node.prepend(@braceUI(key, parent))
node.prepend(@bracketUI(key, parent))
if parent
node.prepend(@wipeUI(key, parent)) if @_showWipe
node.prepend(@deleteUI(key, parent))
elem
| 182973 | ###
Copyright (c) 2014, <NAME>
Copyright (c) 2009, <NAME> & <NAME>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
You will probably need to tell the editor where to find its 'add' and 'delete' images. In your
code, before you make the editor, do something like this:
JSONEditor.prototype.ADD_IMG = '/javascripts/jsoneditor/add.png';
JSONEditor.prototype.DELETE_IMG = '/javascripts/jsoneditor/delete.png';
You can enable or disable visual truncation in the structure editor with the following:
myEditor.doTruncation(false);
myEditor.doTruncation(true); // The default
You can show a 'w'ipe button that does a more aggressive delete by calling showWipe(true|false) or by passing in 'showWipe: true'.
###
class window.JSONEditor
constructor: (wrapped, options = {}) ->
@builderShowing = true
@ADD_IMG ||= options.ADD_IMG || 'lib/images/add.png'
@DELETE_IMG ||= options.DELETE_IMG || 'lib/images/delete.png'
@functionButtonsEnabled = false
@_doTruncation = true
@_showWipe = options.showWipe
@history = []
@historyPointer = -1
throw("Must provide an element to wrap.") if wrapped == null || (wrapped.get && wrapped.get(0) == null)
@wrapped = $(wrapped)
@wrapped.wrap('<div class="json-editor"></div>')
@container = $(@wrapped.parent())
@wrapped.hide()
@container.css("position", "relative")
@doAutoFocus = false
@editingUnfocused()
@rebuild()
braceUI: (key, struct) ->
$('<a class="icon" href="#"><strong>{</strong></a>').click (e) =>
e.preventDefault()
struct[key] = { "??": struct[key] }
@doAutoFocus = true
@rebuild()
bracketUI: (key, struct) ->
$('<a class="icon" href="#"><strong>[</a>').click (e) =>
e.preventDefault()
struct[key] = [ struct[key] ]
@doAutoFocus = true
@rebuild()
deleteUI: (key, struct, fullDelete) ->
$("<a class='icon' href='#' title='delete'><img src='#{@DELETE_IMG}' border=0 /></a>").click (e) =>
e.preventDefault()
if !fullDelete
didSomething = false
if struct[key] instanceof Array
if struct[key].length > 0
struct[key] = struct[key][0]
didSomething = true
else if struct[key] instanceof Object
for subkey, subval of struct[key]
struct[key] = struct[key][subkey]
didSomething = true
break
if didSomething
@rebuild()
return
if struct instanceof Array
struct.splice(key, 1)
else
delete struct[key]
@rebuild()
wipeUI: (key, struct) ->
$('<a class="icon" href="#" title="wipe"><strong>W</strong></a>').click (e) =>
e.preventDefault()
if struct instanceof Array
struct.splice(key, 1)
else
delete struct[key]
@rebuild()
addUI: (struct) ->
$("<a class='icon' href='#' title='add'><img src='#{@ADD_IMG}' border=0/></a>").click (e) =>
e.preventDefault()
if struct instanceof Array
struct.push('??')
else
struct['??'] = '??'
@doAutoFocus = true
@rebuild()
undo: ->
if @saveStateIfTextChanged()
@historyPointer -= 1 if @historyPointer > 0
@restore()
redo: ->
if @historyPointer + 1 < @history.length
if @saveStateIfTextChanged()
@historyPointer += 1
@restore()
showBuilder: ->
if @checkJsonInText()
@setJsonFromText()
@rebuild()
@wrapped.hide()
@builder.show()
true
else
alert "Sorry, there appears to be an error in your JSON input. Please fix it before continuing."
false
showText: ->
@builder.hide()
@wrapped.show()
toggleBuilder: ->
if @builderShowing
@showText()
@builderShowing = !@builderShowing
else
if @showBuilder()
@builderShowing = !@builderShowing
showFunctionButtons: (insider) ->
@functionButtonsEnabled = true unless insider
if @functionButtonsEnabled && !@functionButtons
@functionButtons = $('<div class="function_buttons"></div>')
@functionButtons.append $('<a href="#" style="padding-right: 10px;">Undo</a>').click (e) =>
e.preventDefault()
@undo()
@functionButtons.append $('<a href="#" style="padding-right: 10px;">Redo</a>').click (e) =>
e.preventDefault()
@redo()
@functionButtons.append $('<a id="toggle_view" href="#" style="padding-right: 10px; float: right;">Toggle View</a>').click (e) =>
e.preventDefault()
@toggleBuilder()
@container.prepend(@functionButtons)
saveStateIfTextChanged: ->
if JSON.stringify(@json, null, 2) != @wrapped.get(0).value
if @checkJsonInText()
@saveState(true)
else
if confirm("The current JSON is malformed. If you continue, the current JSON will not be saved. Do you wish to continue?")
@historyPointer += 1
true
else
false
true
restore: ->
if @history[@historyPointer]
@wrapped.get(0).value = @history[@historyPointer]
@rebuild(true)
saveState: (skipStoreText) ->
if @json
@storeToText() unless skipStoreText
text = @wrapped.get(0).value
if @history[@historyPointer] != text
@historyTruncate()
@history.push(text)
@historyPointer += 1
fireChange: ->
$(@wrapped).trigger 'change'
historyTruncate: ->
if @historyPointer + 1 < @history.length
@history.splice(@historyPointer + 1, @history.length - @historyPointer)
storeToText: ->
@wrapped.get(0).value = JSON.stringify(@json, null, 2)
getJSONText: ->
@rebuild()
@wrapped.get(0).value
getJSON: ->
@rebuild()
@json
rebuild: (doNotRefreshText) ->
@setJsonFromText() unless @json
changed = @haveThingsChanged()
if @json && !doNotRefreshText
@saveState()
@cleanBuilder()
@setJsonFromText()
@alreadyFocused = false
elem = @build(@json, @builder, null, null, @json)
@recoverScrollPosition()
# Auto-focus to edit '??' keys and values.
if elem && elem.text() == '??' && !@alreadyFocused && @doAutoFocus
@alreadyFocused = true
@doAutoFocus = false
elem = elem.find('.editable')
elem.click()
elem.find('input').focus().select()
# still missing a proper scrolling into the selected input
@fireChange() if changed
haveThingsChanged: ->
@json && JSON.stringify(@json, null, 2) != @wrapped.get(0).value
saveScrollPosition: ->
@oldScrollHeight = @builder.scrollTop()
recoverScrollPosition: ->
@builder.scrollTop @oldScrollHeight
setJsonFromText: ->
@wrapped.get(0).value = "{}" if @wrapped.get(0).value.length == 0
try
@wrapped.get(0).value = @wrapped.get(0).value.replace(/((^|[^\\])(\\\\)*)\\n/g, '$1\\\\n').replace(/((^|[^\\])(\\\\)*)\\t/g, '$1\\\\t')
@json = JSON.parse(@wrapped.get(0).value)
catch e
alert "Got bad JSON from text."
checkJsonInText: ->
try
JSON.parse @wrapped.get(0).value
true
catch e
false
logJSON: ->
console.log(JSON.stringify(@json, null, 2))
cleanBuilder: ->
unless @builder
@builder = $('<div class="builder"></div>')
@container.append(@builder)
@saveScrollPosition()
@builder.text('')
@showFunctionButtons("defined")
updateStruct: (struct, key, val, kind, selectionStart, selectionEnd) ->
if kind == 'key'
if selectionStart && selectionEnd
val = key.substring(0, selectionStart) + val + key.substring(selectionEnd, key.length)
struct[val] = struct[key]
# order keys
orderrest = 0
$.each struct, (index, value) ->
# re-set rest of the keys
if orderrest & index != val
tempval = struct[index]
delete struct[index]
struct[index] = tempval
if key == index
orderrest = 1
# end of order keys
delete struct[key] if key != val
else
if selectionStart && selectionEnd
val = struct[key].substring(0, selectionStart) + val + struct[key].substring(selectionEnd, struct[key].length)
struct[key] = val
getValFromStruct: (struct, key, kind) ->
if kind == 'key'
key
else
struct[key]
doTruncation: (trueOrFalse) ->
if @_doTruncation != trueOrFalse
@_doTruncation = trueOrFalse
@rebuild()
showWipe: (trueOrFalse) ->
if @_showWipe != trueOrFalse
@_showWipe = trueOrFalse
@rebuild()
truncate: (text, length) ->
return '-empty-' if text.length == 0
if @_doTruncation && text.length > (length || 30)
return text.substring(0, (length || 30)) + '...'
text
replaceLastSelectedFieldIfRecent: (text) ->
if @lastEditingUnfocusedTime > (new Date()).getTime() - 200 # Short delay for unfocus to occur.
@setLastEditingFocus(text)
@rebuild()
editingUnfocused: (elem, struct, key, root, kind) ->
selectionStart = elem?.selectionStart
selectionEnd = elem?.selectionEnd
@setLastEditingFocus = (text) =>
@updateStruct(struct, key, text, kind, selectionStart, selectionEnd)
@json = root # Because self.json is a new reference due to rebuild.
@lastEditingUnfocusedTime = (new Date()).getTime()
edit: ($elem, key, struct, root, kind) ->
form = $("<form></form>").css('display', 'inline')
$input = $("<input />")
$input.val @getValFromStruct(struct, key, kind)
$input.addClass 'edit_field'
blurHandler = =>
val = $input.val()
@updateStruct(struct, key, val, kind)
@editingUnfocused($elem, struct, (kind == 'key' ? val : key), root, kind)
$elem.text(@truncate(val))
$elem.get(0).editing = false
@rebuild() if key != val
$input.blur blurHandler
$input.keydown (e) =>
if e.keyCode == 9 || e.keyCode == 13 # Tab and enter
@doAutoFocus = true
blurHandler()
$(form).append($input).submit (e) =>
e.preventDefault()
@doAutoFocus = true
blurHandler()
$elem.html(form)
$input.focus()
editable: (text, key, struct, root, kind) ->
self = this;
elem = $('<span class="editable" href="#"></span>').text(@truncate(text)).click( (e) ->
unless @editing
@editing = true;
self.edit($(this), key, struct, root, kind)
true
)
elem
build: (json, node, parent, key, root) ->
elem = null
if json instanceof Array
bq = $(document.createElement("BLOCKQUOTE"))
bq.append($('<div class="brackets">[</div>'))
bq.prepend(@addUI(json))
if parent
bq.prepend(@wipeUI(key, parent)) if @_showWipe
bq.prepend(@deleteUI(key, parent))
for i in [0...json.length]
innerbq = $(document.createElement("BLOCKQUOTE"))
newElem = @build(json[i], innerbq, json, i, root)
elem = newElem if newElem && newElem.text() == "??"
bq.append(innerbq)
bq.append($('<div class="brackets">]</div>'))
node.append(bq)
else if json instanceof Object
bq = $(document.createElement("BLOCKQUOTE"))
bq.append($('<div class="bracers">{</div>'))
for jsonkey, jsonvalue of json
innerbq = $(document.createElement("BLOCKQUOTE"))
newElem = @editable(jsonkey.toString(), jsonkey.toString(), json, root, 'key').wrap('<span class="key"></b>').parent()
innerbq.append(newElem)
elem = newElem if newElem && newElem.text() == "??"
if typeof jsonvalue != 'string'
innerbq.prepend(@braceUI(jsonkey, json))
innerbq.prepend(@bracketUI(jsonkey, json))
innerbq.prepend(@wipeUI(jsonkey, json)) if @_showWipe
innerbq.prepend(@deleteUI(jsonkey, json, true))
innerbq.append($('<span class="colon">: </span>'))
newElem = @build(jsonvalue, innerbq, json, jsonkey, root)
elem = newElem if !elem && newElem && newElem.text() == "??"
bq.append(innerbq)
bq.prepend(@addUI(json))
if parent
bq.prepend(@wipeUI(key, parent)) if @_showWipe
bq.prepend(@deleteUI(key, parent))
bq.append($('<div class="bracers">}</div>'))
node.append(bq)
else
if json == null
json = ''
elem = @editable(json.toString(), key, parent, root, 'value').wrap('<span class="val"></span>').parent();
node.append(elem)
node.prepend(@braceUI(key, parent))
node.prepend(@bracketUI(key, parent))
if parent
node.prepend(@wipeUI(key, parent)) if @_showWipe
node.prepend(@deleteUI(key, parent))
elem
| true | ###
Copyright (c) 2014, PI:NAME:<NAME>END_PI
Copyright (c) 2009, PI:NAME:<NAME>END_PI & PI:NAME:<NAME>END_PI
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
You will probably need to tell the editor where to find its 'add' and 'delete' images. In your
code, before you make the editor, do something like this:
JSONEditor.prototype.ADD_IMG = '/javascripts/jsoneditor/add.png';
JSONEditor.prototype.DELETE_IMG = '/javascripts/jsoneditor/delete.png';
You can enable or disable visual truncation in the structure editor with the following:
myEditor.doTruncation(false);
myEditor.doTruncation(true); // The default
You can show a 'w'ipe button that does a more aggressive delete by calling showWipe(true|false) or by passing in 'showWipe: true'.
###
class window.JSONEditor
constructor: (wrapped, options = {}) ->
@builderShowing = true
@ADD_IMG ||= options.ADD_IMG || 'lib/images/add.png'
@DELETE_IMG ||= options.DELETE_IMG || 'lib/images/delete.png'
@functionButtonsEnabled = false
@_doTruncation = true
@_showWipe = options.showWipe
@history = []
@historyPointer = -1
throw("Must provide an element to wrap.") if wrapped == null || (wrapped.get && wrapped.get(0) == null)
@wrapped = $(wrapped)
@wrapped.wrap('<div class="json-editor"></div>')
@container = $(@wrapped.parent())
@wrapped.hide()
@container.css("position", "relative")
@doAutoFocus = false
@editingUnfocused()
@rebuild()
braceUI: (key, struct) ->
$('<a class="icon" href="#"><strong>{</strong></a>').click (e) =>
e.preventDefault()
struct[key] = { "??": struct[key] }
@doAutoFocus = true
@rebuild()
bracketUI: (key, struct) ->
$('<a class="icon" href="#"><strong>[</a>').click (e) =>
e.preventDefault()
struct[key] = [ struct[key] ]
@doAutoFocus = true
@rebuild()
deleteUI: (key, struct, fullDelete) ->
$("<a class='icon' href='#' title='delete'><img src='#{@DELETE_IMG}' border=0 /></a>").click (e) =>
e.preventDefault()
if !fullDelete
didSomething = false
if struct[key] instanceof Array
if struct[key].length > 0
struct[key] = struct[key][0]
didSomething = true
else if struct[key] instanceof Object
for subkey, subval of struct[key]
struct[key] = struct[key][subkey]
didSomething = true
break
if didSomething
@rebuild()
return
if struct instanceof Array
struct.splice(key, 1)
else
delete struct[key]
@rebuild()
wipeUI: (key, struct) ->
$('<a class="icon" href="#" title="wipe"><strong>W</strong></a>').click (e) =>
e.preventDefault()
if struct instanceof Array
struct.splice(key, 1)
else
delete struct[key]
@rebuild()
addUI: (struct) ->
$("<a class='icon' href='#' title='add'><img src='#{@ADD_IMG}' border=0/></a>").click (e) =>
e.preventDefault()
if struct instanceof Array
struct.push('??')
else
struct['??'] = '??'
@doAutoFocus = true
@rebuild()
undo: ->
if @saveStateIfTextChanged()
@historyPointer -= 1 if @historyPointer > 0
@restore()
redo: ->
if @historyPointer + 1 < @history.length
if @saveStateIfTextChanged()
@historyPointer += 1
@restore()
showBuilder: ->
if @checkJsonInText()
@setJsonFromText()
@rebuild()
@wrapped.hide()
@builder.show()
true
else
alert "Sorry, there appears to be an error in your JSON input. Please fix it before continuing."
false
showText: ->
@builder.hide()
@wrapped.show()
toggleBuilder: ->
if @builderShowing
@showText()
@builderShowing = !@builderShowing
else
if @showBuilder()
@builderShowing = !@builderShowing
showFunctionButtons: (insider) ->
@functionButtonsEnabled = true unless insider
if @functionButtonsEnabled && !@functionButtons
@functionButtons = $('<div class="function_buttons"></div>')
@functionButtons.append $('<a href="#" style="padding-right: 10px;">Undo</a>').click (e) =>
e.preventDefault()
@undo()
@functionButtons.append $('<a href="#" style="padding-right: 10px;">Redo</a>').click (e) =>
e.preventDefault()
@redo()
@functionButtons.append $('<a id="toggle_view" href="#" style="padding-right: 10px; float: right;">Toggle View</a>').click (e) =>
e.preventDefault()
@toggleBuilder()
@container.prepend(@functionButtons)
saveStateIfTextChanged: ->
if JSON.stringify(@json, null, 2) != @wrapped.get(0).value
if @checkJsonInText()
@saveState(true)
else
if confirm("The current JSON is malformed. If you continue, the current JSON will not be saved. Do you wish to continue?")
@historyPointer += 1
true
else
false
true
restore: ->
if @history[@historyPointer]
@wrapped.get(0).value = @history[@historyPointer]
@rebuild(true)
saveState: (skipStoreText) ->
if @json
@storeToText() unless skipStoreText
text = @wrapped.get(0).value
if @history[@historyPointer] != text
@historyTruncate()
@history.push(text)
@historyPointer += 1
fireChange: ->
$(@wrapped).trigger 'change'
historyTruncate: ->
if @historyPointer + 1 < @history.length
@history.splice(@historyPointer + 1, @history.length - @historyPointer)
storeToText: ->
@wrapped.get(0).value = JSON.stringify(@json, null, 2)
getJSONText: ->
@rebuild()
@wrapped.get(0).value
getJSON: ->
@rebuild()
@json
rebuild: (doNotRefreshText) ->
@setJsonFromText() unless @json
changed = @haveThingsChanged()
if @json && !doNotRefreshText
@saveState()
@cleanBuilder()
@setJsonFromText()
@alreadyFocused = false
elem = @build(@json, @builder, null, null, @json)
@recoverScrollPosition()
# Auto-focus to edit '??' keys and values.
if elem && elem.text() == '??' && !@alreadyFocused && @doAutoFocus
@alreadyFocused = true
@doAutoFocus = false
elem = elem.find('.editable')
elem.click()
elem.find('input').focus().select()
# still missing a proper scrolling into the selected input
@fireChange() if changed
haveThingsChanged: ->
@json && JSON.stringify(@json, null, 2) != @wrapped.get(0).value
saveScrollPosition: ->
@oldScrollHeight = @builder.scrollTop()
recoverScrollPosition: ->
@builder.scrollTop @oldScrollHeight
setJsonFromText: ->
@wrapped.get(0).value = "{}" if @wrapped.get(0).value.length == 0
try
@wrapped.get(0).value = @wrapped.get(0).value.replace(/((^|[^\\])(\\\\)*)\\n/g, '$1\\\\n').replace(/((^|[^\\])(\\\\)*)\\t/g, '$1\\\\t')
@json = JSON.parse(@wrapped.get(0).value)
catch e
alert "Got bad JSON from text."
checkJsonInText: ->
try
JSON.parse @wrapped.get(0).value
true
catch e
false
logJSON: ->
console.log(JSON.stringify(@json, null, 2))
cleanBuilder: ->
unless @builder
@builder = $('<div class="builder"></div>')
@container.append(@builder)
@saveScrollPosition()
@builder.text('')
@showFunctionButtons("defined")
updateStruct: (struct, key, val, kind, selectionStart, selectionEnd) ->
if kind == 'key'
if selectionStart && selectionEnd
val = key.substring(0, selectionStart) + val + key.substring(selectionEnd, key.length)
struct[val] = struct[key]
# order keys
orderrest = 0
$.each struct, (index, value) ->
# re-set rest of the keys
if orderrest & index != val
tempval = struct[index]
delete struct[index]
struct[index] = tempval
if key == index
orderrest = 1
# end of order keys
delete struct[key] if key != val
else
if selectionStart && selectionEnd
val = struct[key].substring(0, selectionStart) + val + struct[key].substring(selectionEnd, struct[key].length)
struct[key] = val
getValFromStruct: (struct, key, kind) ->
if kind == 'key'
key
else
struct[key]
doTruncation: (trueOrFalse) ->
if @_doTruncation != trueOrFalse
@_doTruncation = trueOrFalse
@rebuild()
showWipe: (trueOrFalse) ->
if @_showWipe != trueOrFalse
@_showWipe = trueOrFalse
@rebuild()
truncate: (text, length) ->
return '-empty-' if text.length == 0
if @_doTruncation && text.length > (length || 30)
return text.substring(0, (length || 30)) + '...'
text
replaceLastSelectedFieldIfRecent: (text) ->
if @lastEditingUnfocusedTime > (new Date()).getTime() - 200 # Short delay for unfocus to occur.
@setLastEditingFocus(text)
@rebuild()
editingUnfocused: (elem, struct, key, root, kind) ->
selectionStart = elem?.selectionStart
selectionEnd = elem?.selectionEnd
@setLastEditingFocus = (text) =>
@updateStruct(struct, key, text, kind, selectionStart, selectionEnd)
@json = root # Because self.json is a new reference due to rebuild.
@lastEditingUnfocusedTime = (new Date()).getTime()
edit: ($elem, key, struct, root, kind) ->
form = $("<form></form>").css('display', 'inline')
$input = $("<input />")
$input.val @getValFromStruct(struct, key, kind)
$input.addClass 'edit_field'
blurHandler = =>
val = $input.val()
@updateStruct(struct, key, val, kind)
@editingUnfocused($elem, struct, (kind == 'key' ? val : key), root, kind)
$elem.text(@truncate(val))
$elem.get(0).editing = false
@rebuild() if key != val
$input.blur blurHandler
$input.keydown (e) =>
if e.keyCode == 9 || e.keyCode == 13 # Tab and enter
@doAutoFocus = true
blurHandler()
$(form).append($input).submit (e) =>
e.preventDefault()
@doAutoFocus = true
blurHandler()
$elem.html(form)
$input.focus()
editable: (text, key, struct, root, kind) ->
self = this;
elem = $('<span class="editable" href="#"></span>').text(@truncate(text)).click( (e) ->
unless @editing
@editing = true;
self.edit($(this), key, struct, root, kind)
true
)
elem
build: (json, node, parent, key, root) ->
elem = null
if json instanceof Array
bq = $(document.createElement("BLOCKQUOTE"))
bq.append($('<div class="brackets">[</div>'))
bq.prepend(@addUI(json))
if parent
bq.prepend(@wipeUI(key, parent)) if @_showWipe
bq.prepend(@deleteUI(key, parent))
for i in [0...json.length]
innerbq = $(document.createElement("BLOCKQUOTE"))
newElem = @build(json[i], innerbq, json, i, root)
elem = newElem if newElem && newElem.text() == "??"
bq.append(innerbq)
bq.append($('<div class="brackets">]</div>'))
node.append(bq)
else if json instanceof Object
bq = $(document.createElement("BLOCKQUOTE"))
bq.append($('<div class="bracers">{</div>'))
for jsonkey, jsonvalue of json
innerbq = $(document.createElement("BLOCKQUOTE"))
newElem = @editable(jsonkey.toString(), jsonkey.toString(), json, root, 'key').wrap('<span class="key"></b>').parent()
innerbq.append(newElem)
elem = newElem if newElem && newElem.text() == "??"
if typeof jsonvalue != 'string'
innerbq.prepend(@braceUI(jsonkey, json))
innerbq.prepend(@bracketUI(jsonkey, json))
innerbq.prepend(@wipeUI(jsonkey, json)) if @_showWipe
innerbq.prepend(@deleteUI(jsonkey, json, true))
innerbq.append($('<span class="colon">: </span>'))
newElem = @build(jsonvalue, innerbq, json, jsonkey, root)
elem = newElem if !elem && newElem && newElem.text() == "??"
bq.append(innerbq)
bq.prepend(@addUI(json))
if parent
bq.prepend(@wipeUI(key, parent)) if @_showWipe
bq.prepend(@deleteUI(key, parent))
bq.append($('<div class="bracers">}</div>'))
node.append(bq)
else
if json == null
json = ''
elem = @editable(json.toString(), key, parent, root, 'value').wrap('<span class="val"></span>').parent();
node.append(elem)
node.prepend(@braceUI(key, parent))
node.prepend(@bracketUI(key, parent))
if parent
node.prepend(@wipeUI(key, parent)) if @_showWipe
node.prepend(@deleteUI(key, parent))
elem
|
[
{
"context": "ead\n })\n\n style = layer.style\n blockKey = sketchIDToUniqueKey(layer.objectID)\n block =\n to",
"end": 9121,
"score": 0.7514516711235046,
"start": 9113,
"tag": "KEY",
"value": "sketchID"
}
] | sketch-importer/importer.coffee | caffed/pagedraw | 3,213 | fs = require 'fs'
path = require 'path'
child_process = require 'child_process'
crc64 = require 'crc64-ecma182'
_l = require 'lodash'
async = require 'async'
tinycolor = require 'tinycolor2'
PromisePool = require 'es6-promise-pool'
mkdirp = require 'mkdirp'
{Doc} = require '../src/doc'
{Dynamicable} = require '../src/dynamicable'
Block = require '../src/block'
TextBlock = require '../src/blocks/text-block'
{InstanceBlock} = require '../src/blocks/instance-block'
ArtboardBlock = require '../src/blocks/artboard-block'
LayoutBlock = require '../src/blocks/layout-block'
ImageBlock = require '../src/blocks/image-block'
{fontsByName, LocalUserFont} = require '../src/fonts'
{Model} = require '../src/model'
{preprocess_sketch} = require './preprocess-sketch'
{stubbable} = require '../src/test-stubber'
DEBUG = false
## utils
walkLayerTree = (layer, {preorder, postorder, ctx}) ->
child_ctx = preorder?(layer, ctx)
accum = layer.layers?.map((child) -> walkLayerTree(child, {preorder, postorder, ctx: child_ctx})) ? []
return postorder?(layer, accum, ctx)
foreachLayer = (layer, fn) ->
walkLayerTree layer,
postorder: (pd) ->
fn(pd)
# NOTE mapLayerTree is not pure: it does not make copies of nodes before handing them to fn
mapLayerTree = (pdom, fn) ->
walkPdom pdom, postorder: (pd, layers) ->
pd.layers = layers
return fn(pd)
fontMapper = {
'.SFNSDisplay-Regular' : 'San Francisco'
}
log = (txt) -> console.log txt if DEBUG
hasChildren = (layer) -> layer.layers?.length > 0
isLeaf = (layer) -> not hasChildren(layer)
rgbaTransform = (str) ->
rgba_match = /rgba\((.*),(.*),(.*),(.*)\)/.exec(str)?.slice(1)?.map(Number)
[r, g, b, a] = rgba_match
scale = (color) -> Math.round(color * 255)
return "rgba(#{scale(r)}, #{scale(g)}, #{scale(b)}, #{a})"
isImage = (layer) ->
return true if _l.head(layer.style?.fills)?.image?
switch layer['<class>']
when 'MSTextLayer'
return false
when 'MSRectangleShape'
return false
when 'MSSymbolInstance'
return false
when 'MSArtboardGroup'
return false
when 'MSSymbolMaster'
return false
else
return _l.every layer.layers, isImage
sketchIDToUniqueKey = (objectID) ->
ret1 = crc64.crc64(objectID)
hash = crc64.toUInt64String(ret1)
return hash.substring(0, 16)
set_dyn_attr = (block, prop, value, blockUniqueKey) ->
# as a safety precaution, don't allow undefined staticValues. If we're assigning a value from the sketchdump,
# and for whatever reason the sketchdump doesn't have the value, we'll assign a staticValue of undefined (if we
# didn't have this check). This is preventing some small edge cases where it would be valid to have staticValue
# undefined. It should only work in times when the type of the prop is Dynamicable(Maybe X), which we should
# not use undefinds for anyway.
# We could change the behavior of Model for deserialize to require that all props be present, but that has some
# kinds of meh implications.
# The most correct thing to do is to give explicit default values everywhere in the sketch importer. In case
# a value is missing, we don't necessarily want to use the Pagedraw defaults, but the Sketch defaults.
return if value? == false
block[prop].staticValue = value
block[prop].uniqueKey = sketchIDToUniqueKey("#{blockUniqueKey}:#{prop}")
setOpacity = (rgb, opacity) =>
return rgb if not opacity?
color = tinycolor(rgb)
return color.setAlpha(opacity).toRgbString()
layerInGroup = (groupObjectID, layer) =>
return true if layer.objectID == groupObjectID
return layerInGroup(groupObjectID, layer.parent) if layer.parent?
return false
fontWeightNames =
thin: '100'
extralight: '200'
ultralight: '200'
light: '300'
book: '400'
normal: '400'
regular: '400'
roman: '400'
medium: '500'
semibold: '600'
demibold: '600'
bold: '700'
extrabold: '800'
ultrabold: '800'
black: '900'
heavy: '900'
parseFont = (name, family = null) =>
isItalics = Boolean name?.match('Italic')
attemptToMatchFragment = (seperator) =>
fontFragments = name?.split(seperator)
return null if not (fontFragments?.length > 1)
if fontWeight = fontWeightNames[_l.last(fontFragments).toLowerCase()]
return {fontFamily, fontWeight, isItalics} if fontFamily = fontsByName[_l.initial(fontFragments).join('-')]
return {fontFamily, fontWeight, isItalics} if fontFamily = fontsByName[_l.initial(fontFragments).join(' ')]
return null
return parsedFont if parsedFont = attemptToMatchFragment('-')
return parsedFont if parsedFont = attemptToMatchFragment(' ')
if family and not family == 'System Font Regular'
return {fontFamily: new LocalUserFont({name: family}), fontWeight: "400", isItalics}
# fontFamily is an exact match with Pagedraw font name
return {fontFamily, fontWeight: "400", isItalics} if fontFamily = fontsByName[family]
# HACK: Use LocalUserFont({name: 'replace-me'}), if font is not uploaded AND not installed locally. We use name 'replace-me'
# because sketchtool gives no info about what this font is. This will get overwritten later but needs to be a font to serialize.
# FIXME: Modify the sketchtool dump result to have the correct font names
return {fontFamily: new LocalUserFont({name: 'replace-me'}), fontWeight: "400", isItalics}
class BiMap
constructor: ->
@forwardMap = new Map()
@reverseMap = new Map()
getForward: (key) -> @forwardMap.get(key)
getReverse: (key) -> @reverseMap.get(key)
set: (key, val) ->
@forwardMap.set(key, val)
@reverseMap.set(val, key)
clear: ->
@forwardMap.clear()
@reverseMap.clear()
keys: -> @forwardMap.keys()
values: -> @forwardMap.values()
merge: (map) -> Array.from(map.forwardMap).forEach ([key, val]) => @set(key, val)
importPage = (sketchPage) ->
# Add parents to everyone
walkLayerTree sketchPage,
preorder: (layer, ctx) -> {parent: layer}
postorder: (layer, _accum, ctx={}) -> _l.extend layer, {parent: ctx.parent}
[blocks, blockLayerBiMap] = importLayer(sketchPage)
# Make sure no artboards are outside canvas by positioning them all starting at 100, 100
[minLeft, minTop] = [_l.min(_l.map(blocks, 'left')), _l.min(_l.map(blocks, 'top'))]
for block in blocks
block.left += 100 - minLeft
block.top += 100 - minTop
blocksToRemove = []
masks = Array.from(blockLayerBiMap.values())
.filter((layer) => layer.parent?.hasClippingMask == 1)
.map (maskLayer) =>
[blockLayerBiMap.getReverse(maskLayer), maskLayer.parent.parent?.objectID]
# Sketch masks crop the layers above them. Order is reflected in the sketch layerlist
# such that blocks listed higher within a group are above those listed lower.
for block in blocks
layer = blockLayerBiMap.getForward(block)
maskingBlock = masks.find ([mask_block, grandparent_object_id]) =>
block != mask_block and layerInGroup(grandparent_object_id, layer)
continue if not maskingBlock?
# Check that masking block is below block to be masked. Maps always iterate in insertion order and sketchtool dumps in layerlist order
continue if block == Array.from(blockLayerBiMap.keys()).find (b) => b == block or b == maskingBlock[0]
masked_geometry = block.intersection(maskingBlock[0])
if masked_geometry == null
blocksToRemove.push(block)
continue
index = blocks.findIndex (b) => b.uniqueKey == block.uniqueKey
blocks[index] = _l.extend block, {geometry: masked_geometry}
# Remove any block completely covered by another unless its an artboard (don't mess with components)
blocks = blocks.filter (block1, i) => not blocks.some (block2, j) =>
block2.contains(block1) and i < j and block1 not instanceof ArtboardBlock
# Remove any blocks completely covered by a mask
blocks = blocks.filter (block) => block not in blocksToRemove
return [blocks, blockLayerBiMap]
importLayer = (layer, depth = 0, parent_x = 0, parent_y = 0) ->
blockLayerBiMap = new BiMap()
importChildren = (layer, depth, x, y) ->
return _l.flatten layer.layers.map (l) ->
[children, subLayerBiMap] = importLayer(l, depth + 1, x, y)
blockLayerBiMap.merge(subLayerBiMap)
return children
return [[], blockLayerBiMap] unless layer.isVisible
x = parent_x + layer.frame.x
y = parent_y + layer.frame.y
log depth + ' / ' + layer['<class>'] + ' / ' + layer.name
createShadow = (obj) => new Model.tuple_named['box-shadow']({
color: obj.color.value
offsetX: obj.offsetX
offsetY: obj.offsetY
blurRadius: obj.blurRadius
spreadRadius: obj.spread
})
style = layer.style
blockKey = sketchIDToUniqueKey(layer.objectID)
block =
top: y
left: x
width: layer.frame.width
height: layer.frame.height
name: layer.name
uniqueKey: blockKey
## Image block
# We identify something as an image if it and its recursive sublayers don't have any non-image (other primitives)
if isImage(layer)
# strip extension and scaling from objectID to match format add_exports.coffee will mutate export names to
exportName = "#{layer.objectID.substr(0, 36)}.png"
# FIXME this is awful code. Never just attach a new property to an object of an existing type.
# Especially on a Model like ImageBlock where ImageBlock.properties are enumerated. If a property exists on
# one object of a type, the property must exist on all objects of that type.
image_block = _l.extend new ImageBlock(block), {exportInfo: {name: exportName, type: 'png'}}
blockLayerBiMap.set(image_block, layer)
return [[image_block], blockLayerBiMap]
## Artboard block
else if layer['<class>'] == 'MSArtboardGroup' or layer['<class>'] == 'MSSymbolMaster'
artboard_block = _l.extend new ArtboardBlock(block), {symbolId: layer.symbolID}
# FIXME support gradients
set_dyn_attr(artboard_block, 'color', layer.backgroundColor.value, blockKey) if layer.backgroundColor?["<class>"] == 'MSColor'
artboard_block.includeColorInCompilation = false if layer['<class>'] == 'MSSymbolMaster' and layer.includeBackgroundColorInExport == 0
# we assume all artboards in Sketch are pages, all symbolmasters aren't
artboard_block.is_screenfull = (layer['<class>'] == 'MSArtboardGroup')
children = importChildren(layer, depth, x, y)
# Sketch artboards mask child layers, so clip blocks inside artboards
# note that there are rules for when we can do this and when we can't. Let's fix incrementally.
# - Images must be masked; clipping will always be wrong
# - Text cannot be clipped in any meaningful way. Text Layers may be larger than they need to be and
# hopefully we're only clipping empty space
# - Borders on rectangles may be offscreen on 3 out of 4 sides. Plain rectangles are otherwise perfect
# to clip.
masked_children = _l.compact children.map (child) ->
masked_geometry = child.intersection(artboard_block)
# if child is entirely outside artboard, the intersection is null
if masked_geometry == null then return null
return _l.extend child, {geometry: masked_geometry}
blockLayerBiMap.set(artboard_block, layer)
arboardWithChildren = _l.concat [artboard_block], masked_children
return [arboardWithChildren, blockLayerBiMap]
## Text block
else if layer['<class>'] == 'MSTextLayer'
block.isUnderline = true if style.textStyle?['NSUnderline'] == 1
# Fixme: Line height is coming from maximumLineHeight. Not sure what it should be in Sketch
lineHeight = style.textStyle?['NSParagraphStyle']?.style?.maximumLineHeight
block.lineHeight = lineHeight if lineHeight? and lineHeight != 0 and lineHeight != block.fontSize
# Right now width: auto is very bad in Pagedraw so we never do it. If you want widtH: auto, set it
# explicitly in our editor
block.contentDeterminesWidth = false
# Sketch uses numbers to describe textAlignment
alignmentOptions = {'0': 'left', '1': 'right', '2': 'center', '3': 'justify'}
block.textAlign = alignmentOptions[Number style.textStyle?['NSParagraphStyle']?.style.alignment]
text_block = new TextBlock(block)
# Remap font family from Sketch -> Pagedraw
{fontFamily, fontWeight, isItalics} = parseFont style.textStyle?['NSFont']?['name'], style.textStyle?['NSFont']?['family']
text_block.fontFamily = fontFamily
text_block.isItalics = isItalics
if fontWeight == "700"
text_block.isBold = true
else if fontWeight != "400"
text_block.hasCustomFontWeight = true
set_dyn_attr text_block, 'fontWeight', fontWeight, blockKey
if layer.attributedString.value.text == 'Crash if importer encounters this exact text'
"".property.that.doesnt.exist = 9
set_dyn_attr(text_block, 'textContent', layer.attributedString.value.text, blockKey)
set_dyn_attr(text_block, 'fontSize', style.textStyle?['NSFont']['attributes']['NSFontSizeAttribute'], blockKey)
set_dyn_attr(text_block, 'kerning', style.textStyle?['NSKern'], blockKey)
if style.textStyle?['NSColor']?.color?
set_dyn_attr(text_block, 'fontColor', setOpacity(rgbaTransform(style.textStyle?['NSColor'].color), style.contextSettings.opacity), blockKey)
else if style.textStyle?['MSAttributedStringColorAttribute']?.value?
set_dyn_attr(text_block, 'fontColor', style.textStyle['MSAttributedStringColorAttribute'].value, blockKey)
else if style.textStyle?.MSAttributedStringColorDictionaryAttribute?
colorMap = {red: 'r', green: 'g', blue: 'b', alpha: 'a'}
color = tinycolor _l.transform style.textStyle.MSAttributedStringColorDictionaryAttribute, (acc, val, key) =>
acc[colorMap[key]] = Math.round(255 * val)
set_dyn_attr(text_block, 'fontColor', color.toRgbString(), blockKey)
else if style.fills?[0]?.isEnabled == 1 and style.fills[0].color?.value?
set_dyn_attr(text_block, 'fontColor', tinycolor(style.fills[0].color.value).toRgbString(), blockKey)
blockLayerBiMap.set(text_block, layer)
return [[text_block], blockLayerBiMap]
## Layout block
else if layer['<class>'] == 'MSRectangleShape'
# In Sketch, the color of a MSRectangleShape comes from the parent
block.borderRadius = layer.fixedRadius
parentStyle = layer.parent?.style
getRgbaValue = =>
layerOpacity = parentStyle.contextSettings?.opacity
return parentStyle.fills[0].color.value if not layerOpacity?
color = tinycolor(parentStyle.fills[0].color.value)
return color.setAlpha(layerOpacity * color.getAlpha()).toRgbString()
getAngleDegrees = (opp, adj) =>
return 180 if adj == 0 and opp > 0
return 0 if adj == 0 and opp < 0
angle = Math.atan(opp / adj) * (180 / Math.PI)
return angle + 270 if (0 <= angle <= 90 and adj < 0) or (-90 <= angle < 0 and adj < 0)
return angle + 90 if (-90 <= angle <= 0 and adj > 0) or (0 < angle <= 90 and adj > 0)
border = parentStyle.borders[0]
if border?.isEnabled == 1
block.borderThickness = border.thickness
block.borderColor = border.color.value
# Pagedraw has no border outside property, so we simulate it by increasing the block size
if border.position == 2
block.left -= border.thickness
block.top -= border.thickness
block.width += (border.thickness * 2)
block.height += (border.thickness * 2)
block.outerBoxShadows = parentStyle.shadows.filter((shadow) => shadow.isEnabled == 1).map createShadow
block.innerBoxShadows = parentStyle.innerShadows.filter((shadow) => shadow.isEnabled == 1).map createShadow
# FillType 0 is for solid fills
block.hasGradient = true if parentStyle.fills[0]?.fillType != 0
layout_block = new LayoutBlock(block)
set_dyn_attr(layout_block, 'color', getRgbaValue(), blockKey) if parentStyle.fills[0]?
gradient = parentStyle.fills[0]?.gradient
if parentStyle.fills[0]?.fillType != 0 and gradient?
set_dyn_attr(layout_block, 'color', gradient.stops[0]?.color.value, blockKey)
set_dyn_attr(layout_block, 'gradientEndColor', gradient.stops[1]?.color.value, blockKey)
set_dyn_attr(layout_block, 'gradientDirection', getAngleDegrees((gradient.to.y - gradient.from.y), (gradient.to.x - gradient.from.x)), blockKey)
set_dyn_attr(layout_block, 'color', setOpacity(getRgbaValue(), 0), blockKey) if parentStyle.fills[0]?.isEnabled == 0
blockLayerBiMap.set(layout_block, layer)
return [[layout_block], blockLayerBiMap]
## Instance block
else if layer['<class>'] == 'MSSymbolInstance'
instance_block = _l.extend new InstanceBlock(block), {symbolId: layer.symbolID}
blockLayerBiMap.set(instance_block, layer)
return [[instance_block], blockLayerBiMap]
## Recursive case
else if hasChildren(layer)
return [importChildren(layer, depth, x, y), blockLayerBiMap]
## Unknown Layer class
else
console.log 'Unknown layer class: ' + layer['<class>']
return [[], blockLayerBiMap]
exports.importFromSketch = importFromSketch = (inputSketchFilePath, temp_dir, image_upload_strategy, STUB_FOR_TESTS = false) ->
preprocess_sketch_dir = path.join(temp_dir, 'extracted/')
sketchFilePath = path.join(temp_dir, 'with-exports.sketch')
export_dir = path.join(temp_dir, 'artifacts/')
processId = Math.floor(Math.random() * 10000)
# declare the variable up here so it's not scoped to the inner function
blocks = []
fontsUsed = []
blockLayerBiMap = new BiMap()
console.time "#{processId}-total startup"
Promise.all [preprocess_sketch(inputSketchFilePath, sketchFilePath, preprocess_sketch_dir, STUB_FOR_TESTS),
stubbable "sketch-import-sketchtooldump", inputSketchFilePath, -> new Promise (resolve, reject) ->
console.time "#{processId}-dump"
sketchDump = ""
stderr = ""
dumpProcess = child_process.spawn("sketchtool", ["dump", inputSketchFilePath])
# some sketch dump outputs are too big to fit in a node string. To avoid getting multiple
# of the same error we use .spawn to catch these errors ourself
dumpProcess.stdout.on "data", (data) =>
try
sketchDump += data
catch error
reject({reason: "Node.js string length exceeded", error})
dumpProcess.stderr.on "data", (data) => stderr += data
dumpProcess.on "close", (code) ->
console.timeEnd "#{processId}-dump"
console.log 'sketchtool dump ended'
reject({reason: 'Malformed Sketch file. Unable to Sketch dump', stderr}) if code != 0
resolve(sketchDump)
.catch (e) ->
console.log e
throw e.reason
.then (data) ->
console.time "#{processId}-parse"
try
JSON.parse(data)
catch e
throw 'Malformed Sketch file. Unable to parse JSON'
.then (sketchJson) ->
console.timeEnd "#{processId}-parse"
console.time "#{processId}-parse"
console.time "total import layer time-#{processId}"
blocks_by_page = sketchJson.pages.map (page) ->
[pageBlocks, mapArray] = _l.zip(importPage(page))
blockLayerBiMap.merge(subLayerMap) for subLayerMap in mapArray
return pageBlocks[0]
# concat pages vertically
space_between_pages = 140
next_page_start = 100
for blocks_in_page in blocks_by_page
# get the independent frame of the page
page_outer_geometry = Block.unionBlock(blocks_in_page)
# skip this page if it's empty
continue if page_outer_geometry == null
for block in blocks_in_page
fontsUsed.push(block.fontFamily) if block instanceof TextBlock and block.fontFamily?.name?
# move the blocks in the page to their place in the unified page
delta_y = next_page_start - page_outer_geometry.top
block.top += delta_y for block in blocks_in_page
# add the block's pages to the doc
blocks.push(block) for block in blocks_in_page
# start the next page space_between_pages pixels after the last page
next_page_start = next_page_start + page_outer_geometry.height + space_between_pages
# Resolve instance and component refs from Sketch symbols
potentialComponents = blocks.filter (b) -> b instanceof ArtboardBlock
for block in blocks when block instanceof InstanceBlock
sourceComponent = _l.find potentialComponents, (c) -> c.symbolId == block.symbolId
block.sourceRef = sourceComponent?.componentSpec.componentRef
console.timeEnd "total import layer time-#{processId}"
# we're mutating the blocks
return null
]
# Export all images that will have ImageBlocks
.then ([localFontIdMapping]) -> return new Promise (resolve, reject) ->
console.timeEnd "#{processId}-total startup"
console.time "#{processId}-total export time"
throw "Sketch Importer imported an empty doc" if Object.keys(blocks).length <= 1
images = blocks.filter((b) -> b.exportInfo?)
MAX_BATCH_SIZE = 400
PARALLEL_BATCHES = 8
effective_batch_size = Math.min MAX_BATCH_SIZE, Math.ceil(images.length / PARALLEL_BATCHES)
uploadPromises = []
blockAndLayerHaveMatchingLocalFont = (block, layer, layerId) => block.fontFamily instanceof LocalUserFont and block.fontFamily.name == 'replace-me' and layer.objectID == layerId
# Match local user fonts with their blocks, convert to LocalUserFonts if we can't reconcile with a pagedraw font
for layerId, fontName of localFontIdMapping
for [block, layer] in Array.from(blockLayerBiMap.forwardMap.entries()).filter(([block, layer]) => blockAndLayerHaveMatchingLocalFont(block, layer, layerId))
# Now that we have more font info, try parsing again.
{fontFamily, fontWeight, isItalics} = parseFont fontName
if fontFamily.name == 'replace-me'
# if parsing still fails then the font must not be installed locally and not in our doc
# in our doc fallback to a LocalUserFont with the plist name
fontFamily.name = fontName
block.isItalics = isItalics
if fontWeight == "700"
block.isBold = true
else if fontWeight != "400"
block.hasCustomFontWeight = true
set_dyn_attr block, 'fontWeight', fontWeight, sketchIDToUniqueKey blockLayerBiMap.getForward(block).objectID
fontsUsed.push block.fontFamily = fontFamily
importChunk = (chunk) ->
image_export = stubbable "sketch-import-image-export", export_dir, chunk, -> new Promise (resolve, reject) ->
batchId = Math.floor(Math.random() * 10000)
console.time(batchId + '-export')
layer_export_process = child_process.spawn 'sketchtool', ['export', 'layers', sketchFilePath, "--output=#{export_dir}", '--use-id-for-name', '--save-for-web', "--items=#{chunk.map((block) => blockLayerBiMap.getForward(block).objectID).join(',')}"]
layer_export_process.on 'close', (code) ->
console.timeEnd(batchId + '-export')
reject('Unable to export images. Sketchtool returned non-zero') if code != 0
resolve()
# Wait for sketchdump export to finish
.then ->
console.log 'batch_size:', effective_batch_size, 'parallel_batches:', PARALLEL_BATCHES, 'image_format:', 'png'
uploadPromises = uploadPromises.concat chunk.map (block) -> new Promise (resolve, reject) ->
fs.readFile path.resolve(export_dir, block.exportInfo.name), (err, data) ->
if err
console.log "Unable to read file #{block.exportInfo.name}. Error #{err}. Proceeding cautiously."
return resolve()
# We use current timestamp to make sure subsequent uploads go to different paths. Otherwise, chrome will cache
# and the editor won't update. Ideally, we'd use content-addressable addressing.
uploadPath = "#{block.uniqueKey}-#{Date.now()}-#{block.exportInfo.name}"
content_type = switch block.exportInfo.type
when "svg" then "image/svg+xml"
else "binary/octet-stream"
# FIXME if it's an SVG we should probably set preserveAspectRatio="none" on it so it stretches on resize in Pagedraw
image_upload_strategy uploadPath, data, content_type, (err, data) ->
if err
console.log "Unable to upload file to #{uploadPath} in S3. Error #{err}. Proceeding cautiously."
return resolve()
block.image = (Dynamicable String).from(data.Location)
resolve()
imageChunks = _l.chunk(images, effective_batch_size)
promiseProducer = =>
return null if _l.isEmpty(imageChunks)
return importChunk(imageChunks.pop())
pool = new PromisePool(promiseProducer, PARALLEL_BATCHES)
pool.start().then () =>
Promise.all uploadPromises
.then () => resolve()
.catch (e) ->
console.log 'Error batch exporting images:', e
throw e
.then ->
console.timeEnd "#{processId}-total export time"
# LocalUserFont objects are instantiated every time we need it. So we compare by font name to remove duplicates
new Doc({blocks, fonts: _l.uniqBy(fontsUsed, (font) => font.name).filter (font) => font.name != 'replace-me'}).serialize()
| 135947 | fs = require 'fs'
path = require 'path'
child_process = require 'child_process'
crc64 = require 'crc64-ecma182'
_l = require 'lodash'
async = require 'async'
tinycolor = require 'tinycolor2'
PromisePool = require 'es6-promise-pool'
mkdirp = require 'mkdirp'
{Doc} = require '../src/doc'
{Dynamicable} = require '../src/dynamicable'
Block = require '../src/block'
TextBlock = require '../src/blocks/text-block'
{InstanceBlock} = require '../src/blocks/instance-block'
ArtboardBlock = require '../src/blocks/artboard-block'
LayoutBlock = require '../src/blocks/layout-block'
ImageBlock = require '../src/blocks/image-block'
{fontsByName, LocalUserFont} = require '../src/fonts'
{Model} = require '../src/model'
{preprocess_sketch} = require './preprocess-sketch'
{stubbable} = require '../src/test-stubber'
DEBUG = false
## utils
walkLayerTree = (layer, {preorder, postorder, ctx}) ->
child_ctx = preorder?(layer, ctx)
accum = layer.layers?.map((child) -> walkLayerTree(child, {preorder, postorder, ctx: child_ctx})) ? []
return postorder?(layer, accum, ctx)
foreachLayer = (layer, fn) ->
walkLayerTree layer,
postorder: (pd) ->
fn(pd)
# NOTE mapLayerTree is not pure: it does not make copies of nodes before handing them to fn
mapLayerTree = (pdom, fn) ->
walkPdom pdom, postorder: (pd, layers) ->
pd.layers = layers
return fn(pd)
fontMapper = {
'.SFNSDisplay-Regular' : 'San Francisco'
}
log = (txt) -> console.log txt if DEBUG
hasChildren = (layer) -> layer.layers?.length > 0
isLeaf = (layer) -> not hasChildren(layer)
rgbaTransform = (str) ->
rgba_match = /rgba\((.*),(.*),(.*),(.*)\)/.exec(str)?.slice(1)?.map(Number)
[r, g, b, a] = rgba_match
scale = (color) -> Math.round(color * 255)
return "rgba(#{scale(r)}, #{scale(g)}, #{scale(b)}, #{a})"
isImage = (layer) ->
return true if _l.head(layer.style?.fills)?.image?
switch layer['<class>']
when 'MSTextLayer'
return false
when 'MSRectangleShape'
return false
when 'MSSymbolInstance'
return false
when 'MSArtboardGroup'
return false
when 'MSSymbolMaster'
return false
else
return _l.every layer.layers, isImage
sketchIDToUniqueKey = (objectID) ->
ret1 = crc64.crc64(objectID)
hash = crc64.toUInt64String(ret1)
return hash.substring(0, 16)
set_dyn_attr = (block, prop, value, blockUniqueKey) ->
# as a safety precaution, don't allow undefined staticValues. If we're assigning a value from the sketchdump,
# and for whatever reason the sketchdump doesn't have the value, we'll assign a staticValue of undefined (if we
# didn't have this check). This is preventing some small edge cases where it would be valid to have staticValue
# undefined. It should only work in times when the type of the prop is Dynamicable(Maybe X), which we should
# not use undefinds for anyway.
# We could change the behavior of Model for deserialize to require that all props be present, but that has some
# kinds of meh implications.
# The most correct thing to do is to give explicit default values everywhere in the sketch importer. In case
# a value is missing, we don't necessarily want to use the Pagedraw defaults, but the Sketch defaults.
return if value? == false
block[prop].staticValue = value
block[prop].uniqueKey = sketchIDToUniqueKey("#{blockUniqueKey}:#{prop}")
setOpacity = (rgb, opacity) =>
return rgb if not opacity?
color = tinycolor(rgb)
return color.setAlpha(opacity).toRgbString()
layerInGroup = (groupObjectID, layer) =>
return true if layer.objectID == groupObjectID
return layerInGroup(groupObjectID, layer.parent) if layer.parent?
return false
fontWeightNames =
thin: '100'
extralight: '200'
ultralight: '200'
light: '300'
book: '400'
normal: '400'
regular: '400'
roman: '400'
medium: '500'
semibold: '600'
demibold: '600'
bold: '700'
extrabold: '800'
ultrabold: '800'
black: '900'
heavy: '900'
parseFont = (name, family = null) =>
isItalics = Boolean name?.match('Italic')
attemptToMatchFragment = (seperator) =>
fontFragments = name?.split(seperator)
return null if not (fontFragments?.length > 1)
if fontWeight = fontWeightNames[_l.last(fontFragments).toLowerCase()]
return {fontFamily, fontWeight, isItalics} if fontFamily = fontsByName[_l.initial(fontFragments).join('-')]
return {fontFamily, fontWeight, isItalics} if fontFamily = fontsByName[_l.initial(fontFragments).join(' ')]
return null
return parsedFont if parsedFont = attemptToMatchFragment('-')
return parsedFont if parsedFont = attemptToMatchFragment(' ')
if family and not family == 'System Font Regular'
return {fontFamily: new LocalUserFont({name: family}), fontWeight: "400", isItalics}
# fontFamily is an exact match with Pagedraw font name
return {fontFamily, fontWeight: "400", isItalics} if fontFamily = fontsByName[family]
# HACK: Use LocalUserFont({name: 'replace-me'}), if font is not uploaded AND not installed locally. We use name 'replace-me'
# because sketchtool gives no info about what this font is. This will get overwritten later but needs to be a font to serialize.
# FIXME: Modify the sketchtool dump result to have the correct font names
return {fontFamily: new LocalUserFont({name: 'replace-me'}), fontWeight: "400", isItalics}
class BiMap
constructor: ->
@forwardMap = new Map()
@reverseMap = new Map()
getForward: (key) -> @forwardMap.get(key)
getReverse: (key) -> @reverseMap.get(key)
set: (key, val) ->
@forwardMap.set(key, val)
@reverseMap.set(val, key)
clear: ->
@forwardMap.clear()
@reverseMap.clear()
keys: -> @forwardMap.keys()
values: -> @forwardMap.values()
merge: (map) -> Array.from(map.forwardMap).forEach ([key, val]) => @set(key, val)
importPage = (sketchPage) ->
# Add parents to everyone
walkLayerTree sketchPage,
preorder: (layer, ctx) -> {parent: layer}
postorder: (layer, _accum, ctx={}) -> _l.extend layer, {parent: ctx.parent}
[blocks, blockLayerBiMap] = importLayer(sketchPage)
# Make sure no artboards are outside canvas by positioning them all starting at 100, 100
[minLeft, minTop] = [_l.min(_l.map(blocks, 'left')), _l.min(_l.map(blocks, 'top'))]
for block in blocks
block.left += 100 - minLeft
block.top += 100 - minTop
blocksToRemove = []
masks = Array.from(blockLayerBiMap.values())
.filter((layer) => layer.parent?.hasClippingMask == 1)
.map (maskLayer) =>
[blockLayerBiMap.getReverse(maskLayer), maskLayer.parent.parent?.objectID]
# Sketch masks crop the layers above them. Order is reflected in the sketch layerlist
# such that blocks listed higher within a group are above those listed lower.
for block in blocks
layer = blockLayerBiMap.getForward(block)
maskingBlock = masks.find ([mask_block, grandparent_object_id]) =>
block != mask_block and layerInGroup(grandparent_object_id, layer)
continue if not maskingBlock?
# Check that masking block is below block to be masked. Maps always iterate in insertion order and sketchtool dumps in layerlist order
continue if block == Array.from(blockLayerBiMap.keys()).find (b) => b == block or b == maskingBlock[0]
masked_geometry = block.intersection(maskingBlock[0])
if masked_geometry == null
blocksToRemove.push(block)
continue
index = blocks.findIndex (b) => b.uniqueKey == block.uniqueKey
blocks[index] = _l.extend block, {geometry: masked_geometry}
# Remove any block completely covered by another unless its an artboard (don't mess with components)
blocks = blocks.filter (block1, i) => not blocks.some (block2, j) =>
block2.contains(block1) and i < j and block1 not instanceof ArtboardBlock
# Remove any blocks completely covered by a mask
blocks = blocks.filter (block) => block not in blocksToRemove
return [blocks, blockLayerBiMap]
importLayer = (layer, depth = 0, parent_x = 0, parent_y = 0) ->
blockLayerBiMap = new BiMap()
importChildren = (layer, depth, x, y) ->
return _l.flatten layer.layers.map (l) ->
[children, subLayerBiMap] = importLayer(l, depth + 1, x, y)
blockLayerBiMap.merge(subLayerBiMap)
return children
return [[], blockLayerBiMap] unless layer.isVisible
x = parent_x + layer.frame.x
y = parent_y + layer.frame.y
log depth + ' / ' + layer['<class>'] + ' / ' + layer.name
createShadow = (obj) => new Model.tuple_named['box-shadow']({
color: obj.color.value
offsetX: obj.offsetX
offsetY: obj.offsetY
blurRadius: obj.blurRadius
spreadRadius: obj.spread
})
style = layer.style
blockKey = <KEY>ToUniqueKey(layer.objectID)
block =
top: y
left: x
width: layer.frame.width
height: layer.frame.height
name: layer.name
uniqueKey: blockKey
## Image block
# We identify something as an image if it and its recursive sublayers don't have any non-image (other primitives)
if isImage(layer)
# strip extension and scaling from objectID to match format add_exports.coffee will mutate export names to
exportName = "#{layer.objectID.substr(0, 36)}.png"
# FIXME this is awful code. Never just attach a new property to an object of an existing type.
# Especially on a Model like ImageBlock where ImageBlock.properties are enumerated. If a property exists on
# one object of a type, the property must exist on all objects of that type.
image_block = _l.extend new ImageBlock(block), {exportInfo: {name: exportName, type: 'png'}}
blockLayerBiMap.set(image_block, layer)
return [[image_block], blockLayerBiMap]
## Artboard block
else if layer['<class>'] == 'MSArtboardGroup' or layer['<class>'] == 'MSSymbolMaster'
artboard_block = _l.extend new ArtboardBlock(block), {symbolId: layer.symbolID}
# FIXME support gradients
set_dyn_attr(artboard_block, 'color', layer.backgroundColor.value, blockKey) if layer.backgroundColor?["<class>"] == 'MSColor'
artboard_block.includeColorInCompilation = false if layer['<class>'] == 'MSSymbolMaster' and layer.includeBackgroundColorInExport == 0
# we assume all artboards in Sketch are pages, all symbolmasters aren't
artboard_block.is_screenfull = (layer['<class>'] == 'MSArtboardGroup')
children = importChildren(layer, depth, x, y)
# Sketch artboards mask child layers, so clip blocks inside artboards
# note that there are rules for when we can do this and when we can't. Let's fix incrementally.
# - Images must be masked; clipping will always be wrong
# - Text cannot be clipped in any meaningful way. Text Layers may be larger than they need to be and
# hopefully we're only clipping empty space
# - Borders on rectangles may be offscreen on 3 out of 4 sides. Plain rectangles are otherwise perfect
# to clip.
masked_children = _l.compact children.map (child) ->
masked_geometry = child.intersection(artboard_block)
# if child is entirely outside artboard, the intersection is null
if masked_geometry == null then return null
return _l.extend child, {geometry: masked_geometry}
blockLayerBiMap.set(artboard_block, layer)
arboardWithChildren = _l.concat [artboard_block], masked_children
return [arboardWithChildren, blockLayerBiMap]
## Text block
else if layer['<class>'] == 'MSTextLayer'
block.isUnderline = true if style.textStyle?['NSUnderline'] == 1
# Fixme: Line height is coming from maximumLineHeight. Not sure what it should be in Sketch
lineHeight = style.textStyle?['NSParagraphStyle']?.style?.maximumLineHeight
block.lineHeight = lineHeight if lineHeight? and lineHeight != 0 and lineHeight != block.fontSize
# Right now width: auto is very bad in Pagedraw so we never do it. If you want widtH: auto, set it
# explicitly in our editor
block.contentDeterminesWidth = false
# Sketch uses numbers to describe textAlignment
alignmentOptions = {'0': 'left', '1': 'right', '2': 'center', '3': 'justify'}
block.textAlign = alignmentOptions[Number style.textStyle?['NSParagraphStyle']?.style.alignment]
text_block = new TextBlock(block)
# Remap font family from Sketch -> Pagedraw
{fontFamily, fontWeight, isItalics} = parseFont style.textStyle?['NSFont']?['name'], style.textStyle?['NSFont']?['family']
text_block.fontFamily = fontFamily
text_block.isItalics = isItalics
if fontWeight == "700"
text_block.isBold = true
else if fontWeight != "400"
text_block.hasCustomFontWeight = true
set_dyn_attr text_block, 'fontWeight', fontWeight, blockKey
if layer.attributedString.value.text == 'Crash if importer encounters this exact text'
"".property.that.doesnt.exist = 9
set_dyn_attr(text_block, 'textContent', layer.attributedString.value.text, blockKey)
set_dyn_attr(text_block, 'fontSize', style.textStyle?['NSFont']['attributes']['NSFontSizeAttribute'], blockKey)
set_dyn_attr(text_block, 'kerning', style.textStyle?['NSKern'], blockKey)
if style.textStyle?['NSColor']?.color?
set_dyn_attr(text_block, 'fontColor', setOpacity(rgbaTransform(style.textStyle?['NSColor'].color), style.contextSettings.opacity), blockKey)
else if style.textStyle?['MSAttributedStringColorAttribute']?.value?
set_dyn_attr(text_block, 'fontColor', style.textStyle['MSAttributedStringColorAttribute'].value, blockKey)
else if style.textStyle?.MSAttributedStringColorDictionaryAttribute?
colorMap = {red: 'r', green: 'g', blue: 'b', alpha: 'a'}
color = tinycolor _l.transform style.textStyle.MSAttributedStringColorDictionaryAttribute, (acc, val, key) =>
acc[colorMap[key]] = Math.round(255 * val)
set_dyn_attr(text_block, 'fontColor', color.toRgbString(), blockKey)
else if style.fills?[0]?.isEnabled == 1 and style.fills[0].color?.value?
set_dyn_attr(text_block, 'fontColor', tinycolor(style.fills[0].color.value).toRgbString(), blockKey)
blockLayerBiMap.set(text_block, layer)
return [[text_block], blockLayerBiMap]
## Layout block
else if layer['<class>'] == 'MSRectangleShape'
# In Sketch, the color of a MSRectangleShape comes from the parent
block.borderRadius = layer.fixedRadius
parentStyle = layer.parent?.style
getRgbaValue = =>
layerOpacity = parentStyle.contextSettings?.opacity
return parentStyle.fills[0].color.value if not layerOpacity?
color = tinycolor(parentStyle.fills[0].color.value)
return color.setAlpha(layerOpacity * color.getAlpha()).toRgbString()
getAngleDegrees = (opp, adj) =>
return 180 if adj == 0 and opp > 0
return 0 if adj == 0 and opp < 0
angle = Math.atan(opp / adj) * (180 / Math.PI)
return angle + 270 if (0 <= angle <= 90 and adj < 0) or (-90 <= angle < 0 and adj < 0)
return angle + 90 if (-90 <= angle <= 0 and adj > 0) or (0 < angle <= 90 and adj > 0)
border = parentStyle.borders[0]
if border?.isEnabled == 1
block.borderThickness = border.thickness
block.borderColor = border.color.value
# Pagedraw has no border outside property, so we simulate it by increasing the block size
if border.position == 2
block.left -= border.thickness
block.top -= border.thickness
block.width += (border.thickness * 2)
block.height += (border.thickness * 2)
block.outerBoxShadows = parentStyle.shadows.filter((shadow) => shadow.isEnabled == 1).map createShadow
block.innerBoxShadows = parentStyle.innerShadows.filter((shadow) => shadow.isEnabled == 1).map createShadow
# FillType 0 is for solid fills
block.hasGradient = true if parentStyle.fills[0]?.fillType != 0
layout_block = new LayoutBlock(block)
set_dyn_attr(layout_block, 'color', getRgbaValue(), blockKey) if parentStyle.fills[0]?
gradient = parentStyle.fills[0]?.gradient
if parentStyle.fills[0]?.fillType != 0 and gradient?
set_dyn_attr(layout_block, 'color', gradient.stops[0]?.color.value, blockKey)
set_dyn_attr(layout_block, 'gradientEndColor', gradient.stops[1]?.color.value, blockKey)
set_dyn_attr(layout_block, 'gradientDirection', getAngleDegrees((gradient.to.y - gradient.from.y), (gradient.to.x - gradient.from.x)), blockKey)
set_dyn_attr(layout_block, 'color', setOpacity(getRgbaValue(), 0), blockKey) if parentStyle.fills[0]?.isEnabled == 0
blockLayerBiMap.set(layout_block, layer)
return [[layout_block], blockLayerBiMap]
## Instance block
else if layer['<class>'] == 'MSSymbolInstance'
instance_block = _l.extend new InstanceBlock(block), {symbolId: layer.symbolID}
blockLayerBiMap.set(instance_block, layer)
return [[instance_block], blockLayerBiMap]
## Recursive case
else if hasChildren(layer)
return [importChildren(layer, depth, x, y), blockLayerBiMap]
## Unknown Layer class
else
console.log 'Unknown layer class: ' + layer['<class>']
return [[], blockLayerBiMap]
exports.importFromSketch = importFromSketch = (inputSketchFilePath, temp_dir, image_upload_strategy, STUB_FOR_TESTS = false) ->
preprocess_sketch_dir = path.join(temp_dir, 'extracted/')
sketchFilePath = path.join(temp_dir, 'with-exports.sketch')
export_dir = path.join(temp_dir, 'artifacts/')
processId = Math.floor(Math.random() * 10000)
# declare the variable up here so it's not scoped to the inner function
blocks = []
fontsUsed = []
blockLayerBiMap = new BiMap()
console.time "#{processId}-total startup"
Promise.all [preprocess_sketch(inputSketchFilePath, sketchFilePath, preprocess_sketch_dir, STUB_FOR_TESTS),
stubbable "sketch-import-sketchtooldump", inputSketchFilePath, -> new Promise (resolve, reject) ->
console.time "#{processId}-dump"
sketchDump = ""
stderr = ""
dumpProcess = child_process.spawn("sketchtool", ["dump", inputSketchFilePath])
# some sketch dump outputs are too big to fit in a node string. To avoid getting multiple
# of the same error we use .spawn to catch these errors ourself
dumpProcess.stdout.on "data", (data) =>
try
sketchDump += data
catch error
reject({reason: "Node.js string length exceeded", error})
dumpProcess.stderr.on "data", (data) => stderr += data
dumpProcess.on "close", (code) ->
console.timeEnd "#{processId}-dump"
console.log 'sketchtool dump ended'
reject({reason: 'Malformed Sketch file. Unable to Sketch dump', stderr}) if code != 0
resolve(sketchDump)
.catch (e) ->
console.log e
throw e.reason
.then (data) ->
console.time "#{processId}-parse"
try
JSON.parse(data)
catch e
throw 'Malformed Sketch file. Unable to parse JSON'
.then (sketchJson) ->
console.timeEnd "#{processId}-parse"
console.time "#{processId}-parse"
console.time "total import layer time-#{processId}"
blocks_by_page = sketchJson.pages.map (page) ->
[pageBlocks, mapArray] = _l.zip(importPage(page))
blockLayerBiMap.merge(subLayerMap) for subLayerMap in mapArray
return pageBlocks[0]
# concat pages vertically
space_between_pages = 140
next_page_start = 100
for blocks_in_page in blocks_by_page
# get the independent frame of the page
page_outer_geometry = Block.unionBlock(blocks_in_page)
# skip this page if it's empty
continue if page_outer_geometry == null
for block in blocks_in_page
fontsUsed.push(block.fontFamily) if block instanceof TextBlock and block.fontFamily?.name?
# move the blocks in the page to their place in the unified page
delta_y = next_page_start - page_outer_geometry.top
block.top += delta_y for block in blocks_in_page
# add the block's pages to the doc
blocks.push(block) for block in blocks_in_page
# start the next page space_between_pages pixels after the last page
next_page_start = next_page_start + page_outer_geometry.height + space_between_pages
# Resolve instance and component refs from Sketch symbols
potentialComponents = blocks.filter (b) -> b instanceof ArtboardBlock
for block in blocks when block instanceof InstanceBlock
sourceComponent = _l.find potentialComponents, (c) -> c.symbolId == block.symbolId
block.sourceRef = sourceComponent?.componentSpec.componentRef
console.timeEnd "total import layer time-#{processId}"
# we're mutating the blocks
return null
]
# Export all images that will have ImageBlocks
.then ([localFontIdMapping]) -> return new Promise (resolve, reject) ->
console.timeEnd "#{processId}-total startup"
console.time "#{processId}-total export time"
throw "Sketch Importer imported an empty doc" if Object.keys(blocks).length <= 1
images = blocks.filter((b) -> b.exportInfo?)
MAX_BATCH_SIZE = 400
PARALLEL_BATCHES = 8
effective_batch_size = Math.min MAX_BATCH_SIZE, Math.ceil(images.length / PARALLEL_BATCHES)
uploadPromises = []
blockAndLayerHaveMatchingLocalFont = (block, layer, layerId) => block.fontFamily instanceof LocalUserFont and block.fontFamily.name == 'replace-me' and layer.objectID == layerId
# Match local user fonts with their blocks, convert to LocalUserFonts if we can't reconcile with a pagedraw font
for layerId, fontName of localFontIdMapping
for [block, layer] in Array.from(blockLayerBiMap.forwardMap.entries()).filter(([block, layer]) => blockAndLayerHaveMatchingLocalFont(block, layer, layerId))
# Now that we have more font info, try parsing again.
{fontFamily, fontWeight, isItalics} = parseFont fontName
if fontFamily.name == 'replace-me'
# if parsing still fails then the font must not be installed locally and not in our doc
# in our doc fallback to a LocalUserFont with the plist name
fontFamily.name = fontName
block.isItalics = isItalics
if fontWeight == "700"
block.isBold = true
else if fontWeight != "400"
block.hasCustomFontWeight = true
set_dyn_attr block, 'fontWeight', fontWeight, sketchIDToUniqueKey blockLayerBiMap.getForward(block).objectID
fontsUsed.push block.fontFamily = fontFamily
importChunk = (chunk) ->
image_export = stubbable "sketch-import-image-export", export_dir, chunk, -> new Promise (resolve, reject) ->
batchId = Math.floor(Math.random() * 10000)
console.time(batchId + '-export')
layer_export_process = child_process.spawn 'sketchtool', ['export', 'layers', sketchFilePath, "--output=#{export_dir}", '--use-id-for-name', '--save-for-web', "--items=#{chunk.map((block) => blockLayerBiMap.getForward(block).objectID).join(',')}"]
layer_export_process.on 'close', (code) ->
console.timeEnd(batchId + '-export')
reject('Unable to export images. Sketchtool returned non-zero') if code != 0
resolve()
# Wait for sketchdump export to finish
.then ->
console.log 'batch_size:', effective_batch_size, 'parallel_batches:', PARALLEL_BATCHES, 'image_format:', 'png'
uploadPromises = uploadPromises.concat chunk.map (block) -> new Promise (resolve, reject) ->
fs.readFile path.resolve(export_dir, block.exportInfo.name), (err, data) ->
if err
console.log "Unable to read file #{block.exportInfo.name}. Error #{err}. Proceeding cautiously."
return resolve()
# We use current timestamp to make sure subsequent uploads go to different paths. Otherwise, chrome will cache
# and the editor won't update. Ideally, we'd use content-addressable addressing.
uploadPath = "#{block.uniqueKey}-#{Date.now()}-#{block.exportInfo.name}"
content_type = switch block.exportInfo.type
when "svg" then "image/svg+xml"
else "binary/octet-stream"
# FIXME if it's an SVG we should probably set preserveAspectRatio="none" on it so it stretches on resize in Pagedraw
image_upload_strategy uploadPath, data, content_type, (err, data) ->
if err
console.log "Unable to upload file to #{uploadPath} in S3. Error #{err}. Proceeding cautiously."
return resolve()
block.image = (Dynamicable String).from(data.Location)
resolve()
imageChunks = _l.chunk(images, effective_batch_size)
promiseProducer = =>
return null if _l.isEmpty(imageChunks)
return importChunk(imageChunks.pop())
pool = new PromisePool(promiseProducer, PARALLEL_BATCHES)
pool.start().then () =>
Promise.all uploadPromises
.then () => resolve()
.catch (e) ->
console.log 'Error batch exporting images:', e
throw e
.then ->
console.timeEnd "#{processId}-total export time"
# LocalUserFont objects are instantiated every time we need it. So we compare by font name to remove duplicates
new Doc({blocks, fonts: _l.uniqBy(fontsUsed, (font) => font.name).filter (font) => font.name != 'replace-me'}).serialize()
| true | fs = require 'fs'
path = require 'path'
child_process = require 'child_process'
crc64 = require 'crc64-ecma182'
_l = require 'lodash'
async = require 'async'
tinycolor = require 'tinycolor2'
PromisePool = require 'es6-promise-pool'
mkdirp = require 'mkdirp'
{Doc} = require '../src/doc'
{Dynamicable} = require '../src/dynamicable'
Block = require '../src/block'
TextBlock = require '../src/blocks/text-block'
{InstanceBlock} = require '../src/blocks/instance-block'
ArtboardBlock = require '../src/blocks/artboard-block'
LayoutBlock = require '../src/blocks/layout-block'
ImageBlock = require '../src/blocks/image-block'
{fontsByName, LocalUserFont} = require '../src/fonts'
{Model} = require '../src/model'
{preprocess_sketch} = require './preprocess-sketch'
{stubbable} = require '../src/test-stubber'
DEBUG = false
## utils
walkLayerTree = (layer, {preorder, postorder, ctx}) ->
child_ctx = preorder?(layer, ctx)
accum = layer.layers?.map((child) -> walkLayerTree(child, {preorder, postorder, ctx: child_ctx})) ? []
return postorder?(layer, accum, ctx)
foreachLayer = (layer, fn) ->
walkLayerTree layer,
postorder: (pd) ->
fn(pd)
# NOTE mapLayerTree is not pure: it does not make copies of nodes before handing them to fn
mapLayerTree = (pdom, fn) ->
walkPdom pdom, postorder: (pd, layers) ->
pd.layers = layers
return fn(pd)
fontMapper = {
'.SFNSDisplay-Regular' : 'San Francisco'
}
log = (txt) -> console.log txt if DEBUG
hasChildren = (layer) -> layer.layers?.length > 0
isLeaf = (layer) -> not hasChildren(layer)
rgbaTransform = (str) ->
rgba_match = /rgba\((.*),(.*),(.*),(.*)\)/.exec(str)?.slice(1)?.map(Number)
[r, g, b, a] = rgba_match
scale = (color) -> Math.round(color * 255)
return "rgba(#{scale(r)}, #{scale(g)}, #{scale(b)}, #{a})"
isImage = (layer) ->
return true if _l.head(layer.style?.fills)?.image?
switch layer['<class>']
when 'MSTextLayer'
return false
when 'MSRectangleShape'
return false
when 'MSSymbolInstance'
return false
when 'MSArtboardGroup'
return false
when 'MSSymbolMaster'
return false
else
return _l.every layer.layers, isImage
sketchIDToUniqueKey = (objectID) ->
ret1 = crc64.crc64(objectID)
hash = crc64.toUInt64String(ret1)
return hash.substring(0, 16)
set_dyn_attr = (block, prop, value, blockUniqueKey) ->
# as a safety precaution, don't allow undefined staticValues. If we're assigning a value from the sketchdump,
# and for whatever reason the sketchdump doesn't have the value, we'll assign a staticValue of undefined (if we
# didn't have this check). This is preventing some small edge cases where it would be valid to have staticValue
# undefined. It should only work in times when the type of the prop is Dynamicable(Maybe X), which we should
# not use undefinds for anyway.
# We could change the behavior of Model for deserialize to require that all props be present, but that has some
# kinds of meh implications.
# The most correct thing to do is to give explicit default values everywhere in the sketch importer. In case
# a value is missing, we don't necessarily want to use the Pagedraw defaults, but the Sketch defaults.
return if value? == false
block[prop].staticValue = value
block[prop].uniqueKey = sketchIDToUniqueKey("#{blockUniqueKey}:#{prop}")
setOpacity = (rgb, opacity) =>
return rgb if not opacity?
color = tinycolor(rgb)
return color.setAlpha(opacity).toRgbString()
layerInGroup = (groupObjectID, layer) =>
return true if layer.objectID == groupObjectID
return layerInGroup(groupObjectID, layer.parent) if layer.parent?
return false
fontWeightNames =
thin: '100'
extralight: '200'
ultralight: '200'
light: '300'
book: '400'
normal: '400'
regular: '400'
roman: '400'
medium: '500'
semibold: '600'
demibold: '600'
bold: '700'
extrabold: '800'
ultrabold: '800'
black: '900'
heavy: '900'
parseFont = (name, family = null) =>
isItalics = Boolean name?.match('Italic')
attemptToMatchFragment = (seperator) =>
fontFragments = name?.split(seperator)
return null if not (fontFragments?.length > 1)
if fontWeight = fontWeightNames[_l.last(fontFragments).toLowerCase()]
return {fontFamily, fontWeight, isItalics} if fontFamily = fontsByName[_l.initial(fontFragments).join('-')]
return {fontFamily, fontWeight, isItalics} if fontFamily = fontsByName[_l.initial(fontFragments).join(' ')]
return null
return parsedFont if parsedFont = attemptToMatchFragment('-')
return parsedFont if parsedFont = attemptToMatchFragment(' ')
if family and not family == 'System Font Regular'
return {fontFamily: new LocalUserFont({name: family}), fontWeight: "400", isItalics}
# fontFamily is an exact match with Pagedraw font name
return {fontFamily, fontWeight: "400", isItalics} if fontFamily = fontsByName[family]
# HACK: Use LocalUserFont({name: 'replace-me'}), if font is not uploaded AND not installed locally. We use name 'replace-me'
# because sketchtool gives no info about what this font is. This will get overwritten later but needs to be a font to serialize.
# FIXME: Modify the sketchtool dump result to have the correct font names
return {fontFamily: new LocalUserFont({name: 'replace-me'}), fontWeight: "400", isItalics}
class BiMap
constructor: ->
@forwardMap = new Map()
@reverseMap = new Map()
getForward: (key) -> @forwardMap.get(key)
getReverse: (key) -> @reverseMap.get(key)
set: (key, val) ->
@forwardMap.set(key, val)
@reverseMap.set(val, key)
clear: ->
@forwardMap.clear()
@reverseMap.clear()
keys: -> @forwardMap.keys()
values: -> @forwardMap.values()
merge: (map) -> Array.from(map.forwardMap).forEach ([key, val]) => @set(key, val)
importPage = (sketchPage) ->
# Add parents to everyone
walkLayerTree sketchPage,
preorder: (layer, ctx) -> {parent: layer}
postorder: (layer, _accum, ctx={}) -> _l.extend layer, {parent: ctx.parent}
[blocks, blockLayerBiMap] = importLayer(sketchPage)
# Make sure no artboards are outside canvas by positioning them all starting at 100, 100
[minLeft, minTop] = [_l.min(_l.map(blocks, 'left')), _l.min(_l.map(blocks, 'top'))]
for block in blocks
block.left += 100 - minLeft
block.top += 100 - minTop
blocksToRemove = []
masks = Array.from(blockLayerBiMap.values())
.filter((layer) => layer.parent?.hasClippingMask == 1)
.map (maskLayer) =>
[blockLayerBiMap.getReverse(maskLayer), maskLayer.parent.parent?.objectID]
# Sketch masks crop the layers above them. Order is reflected in the sketch layerlist
# such that blocks listed higher within a group are above those listed lower.
for block in blocks
layer = blockLayerBiMap.getForward(block)
maskingBlock = masks.find ([mask_block, grandparent_object_id]) =>
block != mask_block and layerInGroup(grandparent_object_id, layer)
continue if not maskingBlock?
# Check that masking block is below block to be masked. Maps always iterate in insertion order and sketchtool dumps in layerlist order
continue if block == Array.from(blockLayerBiMap.keys()).find (b) => b == block or b == maskingBlock[0]
masked_geometry = block.intersection(maskingBlock[0])
if masked_geometry == null
blocksToRemove.push(block)
continue
index = blocks.findIndex (b) => b.uniqueKey == block.uniqueKey
blocks[index] = _l.extend block, {geometry: masked_geometry}
# Remove any block completely covered by another unless its an artboard (don't mess with components)
blocks = blocks.filter (block1, i) => not blocks.some (block2, j) =>
block2.contains(block1) and i < j and block1 not instanceof ArtboardBlock
# Remove any blocks completely covered by a mask
blocks = blocks.filter (block) => block not in blocksToRemove
return [blocks, blockLayerBiMap]
importLayer = (layer, depth = 0, parent_x = 0, parent_y = 0) ->
blockLayerBiMap = new BiMap()
importChildren = (layer, depth, x, y) ->
return _l.flatten layer.layers.map (l) ->
[children, subLayerBiMap] = importLayer(l, depth + 1, x, y)
blockLayerBiMap.merge(subLayerBiMap)
return children
return [[], blockLayerBiMap] unless layer.isVisible
x = parent_x + layer.frame.x
y = parent_y + layer.frame.y
log depth + ' / ' + layer['<class>'] + ' / ' + layer.name
createShadow = (obj) => new Model.tuple_named['box-shadow']({
color: obj.color.value
offsetX: obj.offsetX
offsetY: obj.offsetY
blurRadius: obj.blurRadius
spreadRadius: obj.spread
})
style = layer.style
blockKey = PI:KEY:<KEY>END_PIToUniqueKey(layer.objectID)
block =
top: y
left: x
width: layer.frame.width
height: layer.frame.height
name: layer.name
uniqueKey: blockKey
## Image block
# We identify something as an image if it and its recursive sublayers don't have any non-image (other primitives)
if isImage(layer)
# strip extension and scaling from objectID to match format add_exports.coffee will mutate export names to
exportName = "#{layer.objectID.substr(0, 36)}.png"
# FIXME this is awful code. Never just attach a new property to an object of an existing type.
# Especially on a Model like ImageBlock where ImageBlock.properties are enumerated. If a property exists on
# one object of a type, the property must exist on all objects of that type.
image_block = _l.extend new ImageBlock(block), {exportInfo: {name: exportName, type: 'png'}}
blockLayerBiMap.set(image_block, layer)
return [[image_block], blockLayerBiMap]
## Artboard block
else if layer['<class>'] == 'MSArtboardGroup' or layer['<class>'] == 'MSSymbolMaster'
artboard_block = _l.extend new ArtboardBlock(block), {symbolId: layer.symbolID}
# FIXME support gradients
set_dyn_attr(artboard_block, 'color', layer.backgroundColor.value, blockKey) if layer.backgroundColor?["<class>"] == 'MSColor'
artboard_block.includeColorInCompilation = false if layer['<class>'] == 'MSSymbolMaster' and layer.includeBackgroundColorInExport == 0
# we assume all artboards in Sketch are pages, all symbolmasters aren't
artboard_block.is_screenfull = (layer['<class>'] == 'MSArtboardGroup')
children = importChildren(layer, depth, x, y)
# Sketch artboards mask child layers, so clip blocks inside artboards
# note that there are rules for when we can do this and when we can't. Let's fix incrementally.
# - Images must be masked; clipping will always be wrong
# - Text cannot be clipped in any meaningful way. Text Layers may be larger than they need to be and
# hopefully we're only clipping empty space
# - Borders on rectangles may be offscreen on 3 out of 4 sides. Plain rectangles are otherwise perfect
# to clip.
masked_children = _l.compact children.map (child) ->
masked_geometry = child.intersection(artboard_block)
# if child is entirely outside artboard, the intersection is null
if masked_geometry == null then return null
return _l.extend child, {geometry: masked_geometry}
blockLayerBiMap.set(artboard_block, layer)
arboardWithChildren = _l.concat [artboard_block], masked_children
return [arboardWithChildren, blockLayerBiMap]
## Text block
else if layer['<class>'] == 'MSTextLayer'
block.isUnderline = true if style.textStyle?['NSUnderline'] == 1
# Fixme: Line height is coming from maximumLineHeight. Not sure what it should be in Sketch
lineHeight = style.textStyle?['NSParagraphStyle']?.style?.maximumLineHeight
block.lineHeight = lineHeight if lineHeight? and lineHeight != 0 and lineHeight != block.fontSize
# Right now width: auto is very bad in Pagedraw so we never do it. If you want widtH: auto, set it
# explicitly in our editor
block.contentDeterminesWidth = false
# Sketch uses numbers to describe textAlignment
alignmentOptions = {'0': 'left', '1': 'right', '2': 'center', '3': 'justify'}
block.textAlign = alignmentOptions[Number style.textStyle?['NSParagraphStyle']?.style.alignment]
text_block = new TextBlock(block)
# Remap font family from Sketch -> Pagedraw
{fontFamily, fontWeight, isItalics} = parseFont style.textStyle?['NSFont']?['name'], style.textStyle?['NSFont']?['family']
text_block.fontFamily = fontFamily
text_block.isItalics = isItalics
if fontWeight == "700"
text_block.isBold = true
else if fontWeight != "400"
text_block.hasCustomFontWeight = true
set_dyn_attr text_block, 'fontWeight', fontWeight, blockKey
if layer.attributedString.value.text == 'Crash if importer encounters this exact text'
"".property.that.doesnt.exist = 9
set_dyn_attr(text_block, 'textContent', layer.attributedString.value.text, blockKey)
set_dyn_attr(text_block, 'fontSize', style.textStyle?['NSFont']['attributes']['NSFontSizeAttribute'], blockKey)
set_dyn_attr(text_block, 'kerning', style.textStyle?['NSKern'], blockKey)
if style.textStyle?['NSColor']?.color?
set_dyn_attr(text_block, 'fontColor', setOpacity(rgbaTransform(style.textStyle?['NSColor'].color), style.contextSettings.opacity), blockKey)
else if style.textStyle?['MSAttributedStringColorAttribute']?.value?
set_dyn_attr(text_block, 'fontColor', style.textStyle['MSAttributedStringColorAttribute'].value, blockKey)
else if style.textStyle?.MSAttributedStringColorDictionaryAttribute?
colorMap = {red: 'r', green: 'g', blue: 'b', alpha: 'a'}
color = tinycolor _l.transform style.textStyle.MSAttributedStringColorDictionaryAttribute, (acc, val, key) =>
acc[colorMap[key]] = Math.round(255 * val)
set_dyn_attr(text_block, 'fontColor', color.toRgbString(), blockKey)
else if style.fills?[0]?.isEnabled == 1 and style.fills[0].color?.value?
set_dyn_attr(text_block, 'fontColor', tinycolor(style.fills[0].color.value).toRgbString(), blockKey)
blockLayerBiMap.set(text_block, layer)
return [[text_block], blockLayerBiMap]
## Layout block
else if layer['<class>'] == 'MSRectangleShape'
# In Sketch, the color of a MSRectangleShape comes from the parent
block.borderRadius = layer.fixedRadius
parentStyle = layer.parent?.style
getRgbaValue = =>
layerOpacity = parentStyle.contextSettings?.opacity
return parentStyle.fills[0].color.value if not layerOpacity?
color = tinycolor(parentStyle.fills[0].color.value)
return color.setAlpha(layerOpacity * color.getAlpha()).toRgbString()
getAngleDegrees = (opp, adj) =>
return 180 if adj == 0 and opp > 0
return 0 if adj == 0 and opp < 0
angle = Math.atan(opp / adj) * (180 / Math.PI)
return angle + 270 if (0 <= angle <= 90 and adj < 0) or (-90 <= angle < 0 and adj < 0)
return angle + 90 if (-90 <= angle <= 0 and adj > 0) or (0 < angle <= 90 and adj > 0)
border = parentStyle.borders[0]
if border?.isEnabled == 1
block.borderThickness = border.thickness
block.borderColor = border.color.value
# Pagedraw has no border outside property, so we simulate it by increasing the block size
if border.position == 2
block.left -= border.thickness
block.top -= border.thickness
block.width += (border.thickness * 2)
block.height += (border.thickness * 2)
block.outerBoxShadows = parentStyle.shadows.filter((shadow) => shadow.isEnabled == 1).map createShadow
block.innerBoxShadows = parentStyle.innerShadows.filter((shadow) => shadow.isEnabled == 1).map createShadow
# FillType 0 is for solid fills
block.hasGradient = true if parentStyle.fills[0]?.fillType != 0
layout_block = new LayoutBlock(block)
set_dyn_attr(layout_block, 'color', getRgbaValue(), blockKey) if parentStyle.fills[0]?
gradient = parentStyle.fills[0]?.gradient
if parentStyle.fills[0]?.fillType != 0 and gradient?
set_dyn_attr(layout_block, 'color', gradient.stops[0]?.color.value, blockKey)
set_dyn_attr(layout_block, 'gradientEndColor', gradient.stops[1]?.color.value, blockKey)
set_dyn_attr(layout_block, 'gradientDirection', getAngleDegrees((gradient.to.y - gradient.from.y), (gradient.to.x - gradient.from.x)), blockKey)
set_dyn_attr(layout_block, 'color', setOpacity(getRgbaValue(), 0), blockKey) if parentStyle.fills[0]?.isEnabled == 0
blockLayerBiMap.set(layout_block, layer)
return [[layout_block], blockLayerBiMap]
## Instance block
else if layer['<class>'] == 'MSSymbolInstance'
instance_block = _l.extend new InstanceBlock(block), {symbolId: layer.symbolID}
blockLayerBiMap.set(instance_block, layer)
return [[instance_block], blockLayerBiMap]
## Recursive case
else if hasChildren(layer)
return [importChildren(layer, depth, x, y), blockLayerBiMap]
## Unknown Layer class
else
console.log 'Unknown layer class: ' + layer['<class>']
return [[], blockLayerBiMap]
exports.importFromSketch = importFromSketch = (inputSketchFilePath, temp_dir, image_upload_strategy, STUB_FOR_TESTS = false) ->
preprocess_sketch_dir = path.join(temp_dir, 'extracted/')
sketchFilePath = path.join(temp_dir, 'with-exports.sketch')
export_dir = path.join(temp_dir, 'artifacts/')
processId = Math.floor(Math.random() * 10000)
# declare the variable up here so it's not scoped to the inner function
blocks = []
fontsUsed = []
blockLayerBiMap = new BiMap()
console.time "#{processId}-total startup"
Promise.all [preprocess_sketch(inputSketchFilePath, sketchFilePath, preprocess_sketch_dir, STUB_FOR_TESTS),
stubbable "sketch-import-sketchtooldump", inputSketchFilePath, -> new Promise (resolve, reject) ->
console.time "#{processId}-dump"
sketchDump = ""
stderr = ""
dumpProcess = child_process.spawn("sketchtool", ["dump", inputSketchFilePath])
# some sketch dump outputs are too big to fit in a node string. To avoid getting multiple
# of the same error we use .spawn to catch these errors ourself
dumpProcess.stdout.on "data", (data) =>
try
sketchDump += data
catch error
reject({reason: "Node.js string length exceeded", error})
dumpProcess.stderr.on "data", (data) => stderr += data
dumpProcess.on "close", (code) ->
console.timeEnd "#{processId}-dump"
console.log 'sketchtool dump ended'
reject({reason: 'Malformed Sketch file. Unable to Sketch dump', stderr}) if code != 0
resolve(sketchDump)
.catch (e) ->
console.log e
throw e.reason
.then (data) ->
console.time "#{processId}-parse"
try
JSON.parse(data)
catch e
throw 'Malformed Sketch file. Unable to parse JSON'
.then (sketchJson) ->
console.timeEnd "#{processId}-parse"
console.time "#{processId}-parse"
console.time "total import layer time-#{processId}"
blocks_by_page = sketchJson.pages.map (page) ->
[pageBlocks, mapArray] = _l.zip(importPage(page))
blockLayerBiMap.merge(subLayerMap) for subLayerMap in mapArray
return pageBlocks[0]
# concat pages vertically
space_between_pages = 140
next_page_start = 100
for blocks_in_page in blocks_by_page
# get the independent frame of the page
page_outer_geometry = Block.unionBlock(blocks_in_page)
# skip this page if it's empty
continue if page_outer_geometry == null
for block in blocks_in_page
fontsUsed.push(block.fontFamily) if block instanceof TextBlock and block.fontFamily?.name?
# move the blocks in the page to their place in the unified page
delta_y = next_page_start - page_outer_geometry.top
block.top += delta_y for block in blocks_in_page
# add the block's pages to the doc
blocks.push(block) for block in blocks_in_page
# start the next page space_between_pages pixels after the last page
next_page_start = next_page_start + page_outer_geometry.height + space_between_pages
# Resolve instance and component refs from Sketch symbols
potentialComponents = blocks.filter (b) -> b instanceof ArtboardBlock
for block in blocks when block instanceof InstanceBlock
sourceComponent = _l.find potentialComponents, (c) -> c.symbolId == block.symbolId
block.sourceRef = sourceComponent?.componentSpec.componentRef
console.timeEnd "total import layer time-#{processId}"
# we're mutating the blocks
return null
]
# Export all images that will have ImageBlocks
.then ([localFontIdMapping]) -> return new Promise (resolve, reject) ->
console.timeEnd "#{processId}-total startup"
console.time "#{processId}-total export time"
throw "Sketch Importer imported an empty doc" if Object.keys(blocks).length <= 1
images = blocks.filter((b) -> b.exportInfo?)
MAX_BATCH_SIZE = 400
PARALLEL_BATCHES = 8
effective_batch_size = Math.min MAX_BATCH_SIZE, Math.ceil(images.length / PARALLEL_BATCHES)
uploadPromises = []
blockAndLayerHaveMatchingLocalFont = (block, layer, layerId) => block.fontFamily instanceof LocalUserFont and block.fontFamily.name == 'replace-me' and layer.objectID == layerId
# Match local user fonts with their blocks, convert to LocalUserFonts if we can't reconcile with a pagedraw font
for layerId, fontName of localFontIdMapping
for [block, layer] in Array.from(blockLayerBiMap.forwardMap.entries()).filter(([block, layer]) => blockAndLayerHaveMatchingLocalFont(block, layer, layerId))
# Now that we have more font info, try parsing again.
{fontFamily, fontWeight, isItalics} = parseFont fontName
if fontFamily.name == 'replace-me'
# if parsing still fails then the font must not be installed locally and not in our doc
# in our doc fallback to a LocalUserFont with the plist name
fontFamily.name = fontName
block.isItalics = isItalics
if fontWeight == "700"
block.isBold = true
else if fontWeight != "400"
block.hasCustomFontWeight = true
set_dyn_attr block, 'fontWeight', fontWeight, sketchIDToUniqueKey blockLayerBiMap.getForward(block).objectID
fontsUsed.push block.fontFamily = fontFamily
importChunk = (chunk) ->
image_export = stubbable "sketch-import-image-export", export_dir, chunk, -> new Promise (resolve, reject) ->
batchId = Math.floor(Math.random() * 10000)
console.time(batchId + '-export')
layer_export_process = child_process.spawn 'sketchtool', ['export', 'layers', sketchFilePath, "--output=#{export_dir}", '--use-id-for-name', '--save-for-web', "--items=#{chunk.map((block) => blockLayerBiMap.getForward(block).objectID).join(',')}"]
layer_export_process.on 'close', (code) ->
console.timeEnd(batchId + '-export')
reject('Unable to export images. Sketchtool returned non-zero') if code != 0
resolve()
# Wait for sketchdump export to finish
.then ->
console.log 'batch_size:', effective_batch_size, 'parallel_batches:', PARALLEL_BATCHES, 'image_format:', 'png'
uploadPromises = uploadPromises.concat chunk.map (block) -> new Promise (resolve, reject) ->
fs.readFile path.resolve(export_dir, block.exportInfo.name), (err, data) ->
if err
console.log "Unable to read file #{block.exportInfo.name}. Error #{err}. Proceeding cautiously."
return resolve()
# We use current timestamp to make sure subsequent uploads go to different paths. Otherwise, chrome will cache
# and the editor won't update. Ideally, we'd use content-addressable addressing.
uploadPath = "#{block.uniqueKey}-#{Date.now()}-#{block.exportInfo.name}"
content_type = switch block.exportInfo.type
when "svg" then "image/svg+xml"
else "binary/octet-stream"
# FIXME if it's an SVG we should probably set preserveAspectRatio="none" on it so it stretches on resize in Pagedraw
image_upload_strategy uploadPath, data, content_type, (err, data) ->
if err
console.log "Unable to upload file to #{uploadPath} in S3. Error #{err}. Proceeding cautiously."
return resolve()
block.image = (Dynamicable String).from(data.Location)
resolve()
imageChunks = _l.chunk(images, effective_batch_size)
promiseProducer = =>
return null if _l.isEmpty(imageChunks)
return importChunk(imageChunks.pop())
pool = new PromisePool(promiseProducer, PARALLEL_BATCHES)
pool.start().then () =>
Promise.all uploadPromises
.then () => resolve()
.catch (e) ->
console.log 'Error batch exporting images:', e
throw e
.then ->
console.timeEnd "#{processId}-total export time"
# LocalUserFont objects are instantiated every time we need it. So we compare by font name to remove duplicates
new Doc({blocks, fonts: _l.uniqBy(fontsUsed, (font) => font.name).filter (font) => font.name != 'replace-me'}).serialize()
|
[
{
"context": "s file is part of the Konsserto package.\n *\n * (c) Jessym Reziga <jessym@konsserto.com>\n *\n * For the full copyrig",
"end": 74,
"score": 0.9998824000358582,
"start": 61,
"tag": "NAME",
"value": "Jessym Reziga"
},
{
"context": "f the Konsserto package.\n *\n * (c) Je... | node_modules/konsserto/lib/src/Konsserto/Component/Console/Input/InputDefinition.coffee | konsserto/konsserto | 2 | ###
* This file is part of the Konsserto package.
*
* (c) Jessym Reziga <jessym@konsserto.com>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
###
InputOption = use('@Konsserto/Component/Console/Input/InputOption')
Tools = use('@Konsserto/Component/Static/Tools')
#
# InputDefinition is a collection of InputArgument and InputOption
#
# @author Jessym Reziga <jessym@konsserto.com>
#
class InputDefinition
constructor:(definition) ->
@arguments = {}
@requiredCount = 0
@hasAnArrayArgument = false
@hasOptional = false
@options = {}
@shortcuts = {}
@setDefinition(definition)
setDefinition:(definition) ->
_arguments = {}
options = {}
cpt = 0
if definition?
for item in definition
if item.constructor.name == 'InputOption'
options[cpt++] = item
else
_arguments[cpt++] = item
@setArguments(_arguments)
@setOptions(options)
setArguments:(_arguments) ->
@arguments = {}
@requiredCount = 0
@hasOptional = false
@hasAnArrayArgument = false
@addArguments(_arguments)
addArguments:(_arguments) ->
if _arguments?
for index,argument of _arguments
@addArgument(argument)
addArgument:(argument) ->
if @arguments[argument.getName()]
throw new Error('An argument with name '+argument.getName()+' already exists.')
if @hasAnArrayArgument
throw new Error('Cannot add an argument after an array argument.')
if argument.isRequired() && @hasOptional
throw new Error('Cannot add a required argument after an optional one.')
if argument.isArray()
@hasAnArrayArgument = true
if argument.isRequired()
++@requiredCount
else
@hasOptional = true
@arguments[argument.getName()] = argument
getArgument:(name) ->
if !@hasArgument(name)
throw new Error('The '+name+' argument does not exist.')
_arguments = null
if Tools.isInt(name)
_arguments = Tools.toArray(@arguments)
else
_arguments = @arguments
return _arguments[name]
hasArgument:(name) ->
_arguments = null
if Tools.isInt(name)
_arguments = Tools.toArray(@arguments)
else
_arguments = @arguments
return _arguments[name]?
getArguments:() ->
return @arguments
getArgumentCount:() ->
return if @hasAnArrayArgument then Number.MAX_VALUE else @arguments.length
getArgumentRequiredCount:() ->
return @requiredCount
getArgumentDefaults:() ->
values = []
for index,argument of @arguments
values[argument.getName()] = argument.getDefault()
return values
setOptions:(options) ->
@options = {}
@shortcuts = {}
@addOptions(options)
addOptions:(options) ->
for index,option of options
@addOption(option)
addOption:(option) ->
if @options[option.getName()]?
throw new Error('An option named '+option.getName()+' already exists.')
if option.getShortcut()?
for shortcut in option.getShortcut().split('|')
if @shortcuts[shortcut]?
throw new Error('An option with shortcut '+shortcut+' already exists.')
@options[option.getName()] = option
if option.getShortcut()
for shortcut in option.getShortcut().split('|')
@shortcuts[shortcut] = option.getName()
getOption:(name) ->
if !@hasOption(name)
throw new Error('The --'+name+' option does not exist.')
return @options[name]
hasOption:(name) ->
return @options[name]?
getOptions:() ->
return @options
hasShortcut:(name) ->
return @shortcuts[name]?
getOptionForShortcut:(shortcut) ->
return @getOption(@shortcutToName(shortcut))
getOptionDefaults:() ->
values = []
for index,option of @options
values[option.getName()] = option.getDefault()
return values
shortcutToName:(shortcut) ->
if !@shortcuts[shortcut]?
throw new Error('The -'+shortcut+' option does not exist.')
return @shortcuts[shortcut]
getSynopsis:() ->
elements = []
for index,option of @getOptions()
shortcut = if option.getShortcut() then '-'+option.getShortcut()+'|' else ''
out = '['
if option.isValueRequired()
out += shortcut+'--'+option.getName()+'="..."'
else if option.isValueOptional()
out += shortcut+'--'+option.getName()+'[="..."]'
else
out += shortcut+'--'+option.getName()
out +=']'
elements.push(out)
for index,argument of @getArguments()
out = ''
if argument.isRequired()
out += argument.getName()
if argument.isArray()
out += '1'
else
out += '['+argument.getName()+']'
if argument.isArray()
out += '1'
elements.push(out)
if argument.isArray()
elements.push('... ['+argument.getName()+'N]')
return elements.join(' ')
module.exports = InputDefinition; | 137614 | ###
* This file is part of the Konsserto package.
*
* (c) <NAME> <<EMAIL>>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
###
InputOption = use('@Konsserto/Component/Console/Input/InputOption')
Tools = use('@Konsserto/Component/Static/Tools')
#
# InputDefinition is a collection of InputArgument and InputOption
#
# @author <NAME> <<EMAIL>>
#
class InputDefinition
constructor:(definition) ->
@arguments = {}
@requiredCount = 0
@hasAnArrayArgument = false
@hasOptional = false
@options = {}
@shortcuts = {}
@setDefinition(definition)
setDefinition:(definition) ->
_arguments = {}
options = {}
cpt = 0
if definition?
for item in definition
if item.constructor.name == 'InputOption'
options[cpt++] = item
else
_arguments[cpt++] = item
@setArguments(_arguments)
@setOptions(options)
setArguments:(_arguments) ->
@arguments = {}
@requiredCount = 0
@hasOptional = false
@hasAnArrayArgument = false
@addArguments(_arguments)
addArguments:(_arguments) ->
if _arguments?
for index,argument of _arguments
@addArgument(argument)
addArgument:(argument) ->
if @arguments[argument.getName()]
throw new Error('An argument with name '+argument.getName()+' already exists.')
if @hasAnArrayArgument
throw new Error('Cannot add an argument after an array argument.')
if argument.isRequired() && @hasOptional
throw new Error('Cannot add a required argument after an optional one.')
if argument.isArray()
@hasAnArrayArgument = true
if argument.isRequired()
++@requiredCount
else
@hasOptional = true
@arguments[argument.getName()] = argument
getArgument:(name) ->
if !@hasArgument(name)
throw new Error('The '+name+' argument does not exist.')
_arguments = null
if Tools.isInt(name)
_arguments = Tools.toArray(@arguments)
else
_arguments = @arguments
return _arguments[name]
hasArgument:(name) ->
_arguments = null
if Tools.isInt(name)
_arguments = Tools.toArray(@arguments)
else
_arguments = @arguments
return _arguments[name]?
getArguments:() ->
return @arguments
getArgumentCount:() ->
return if @hasAnArrayArgument then Number.MAX_VALUE else @arguments.length
getArgumentRequiredCount:() ->
return @requiredCount
getArgumentDefaults:() ->
values = []
for index,argument of @arguments
values[argument.getName()] = argument.getDefault()
return values
setOptions:(options) ->
@options = {}
@shortcuts = {}
@addOptions(options)
addOptions:(options) ->
for index,option of options
@addOption(option)
addOption:(option) ->
if @options[option.getName()]?
throw new Error('An option named '+option.getName()+' already exists.')
if option.getShortcut()?
for shortcut in option.getShortcut().split('|')
if @shortcuts[shortcut]?
throw new Error('An option with shortcut '+shortcut+' already exists.')
@options[option.getName()] = option
if option.getShortcut()
for shortcut in option.getShortcut().split('|')
@shortcuts[shortcut] = option.getName()
getOption:(name) ->
if !@hasOption(name)
throw new Error('The --'+name+' option does not exist.')
return @options[name]
hasOption:(name) ->
return @options[name]?
getOptions:() ->
return @options
hasShortcut:(name) ->
return @shortcuts[name]?
getOptionForShortcut:(shortcut) ->
return @getOption(@shortcutToName(shortcut))
getOptionDefaults:() ->
values = []
for index,option of @options
values[option.getName()] = option.getDefault()
return values
shortcutToName:(shortcut) ->
if !@shortcuts[shortcut]?
throw new Error('The -'+shortcut+' option does not exist.')
return @shortcuts[shortcut]
getSynopsis:() ->
elements = []
for index,option of @getOptions()
shortcut = if option.getShortcut() then '-'+option.getShortcut()+'|' else ''
out = '['
if option.isValueRequired()
out += shortcut+'--'+option.getName()+'="..."'
else if option.isValueOptional()
out += shortcut+'--'+option.getName()+'[="..."]'
else
out += shortcut+'--'+option.getName()
out +=']'
elements.push(out)
for index,argument of @getArguments()
out = ''
if argument.isRequired()
out += argument.getName()
if argument.isArray()
out += '1'
else
out += '['+argument.getName()+']'
if argument.isArray()
out += '1'
elements.push(out)
if argument.isArray()
elements.push('... ['+argument.getName()+'N]')
return elements.join(' ')
module.exports = InputDefinition; | true | ###
* This file is part of the Konsserto package.
*
* (c) PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
###
InputOption = use('@Konsserto/Component/Console/Input/InputOption')
Tools = use('@Konsserto/Component/Static/Tools')
#
# InputDefinition is a collection of InputArgument and InputOption
#
# @author PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI>
#
class InputDefinition
constructor:(definition) ->
@arguments = {}
@requiredCount = 0
@hasAnArrayArgument = false
@hasOptional = false
@options = {}
@shortcuts = {}
@setDefinition(definition)
setDefinition:(definition) ->
_arguments = {}
options = {}
cpt = 0
if definition?
for item in definition
if item.constructor.name == 'InputOption'
options[cpt++] = item
else
_arguments[cpt++] = item
@setArguments(_arguments)
@setOptions(options)
setArguments:(_arguments) ->
@arguments = {}
@requiredCount = 0
@hasOptional = false
@hasAnArrayArgument = false
@addArguments(_arguments)
addArguments:(_arguments) ->
if _arguments?
for index,argument of _arguments
@addArgument(argument)
addArgument:(argument) ->
if @arguments[argument.getName()]
throw new Error('An argument with name '+argument.getName()+' already exists.')
if @hasAnArrayArgument
throw new Error('Cannot add an argument after an array argument.')
if argument.isRequired() && @hasOptional
throw new Error('Cannot add a required argument after an optional one.')
if argument.isArray()
@hasAnArrayArgument = true
if argument.isRequired()
++@requiredCount
else
@hasOptional = true
@arguments[argument.getName()] = argument
getArgument:(name) ->
if !@hasArgument(name)
throw new Error('The '+name+' argument does not exist.')
_arguments = null
if Tools.isInt(name)
_arguments = Tools.toArray(@arguments)
else
_arguments = @arguments
return _arguments[name]
hasArgument:(name) ->
_arguments = null
if Tools.isInt(name)
_arguments = Tools.toArray(@arguments)
else
_arguments = @arguments
return _arguments[name]?
getArguments:() ->
return @arguments
getArgumentCount:() ->
return if @hasAnArrayArgument then Number.MAX_VALUE else @arguments.length
getArgumentRequiredCount:() ->
return @requiredCount
getArgumentDefaults:() ->
values = []
for index,argument of @arguments
values[argument.getName()] = argument.getDefault()
return values
setOptions:(options) ->
@options = {}
@shortcuts = {}
@addOptions(options)
addOptions:(options) ->
for index,option of options
@addOption(option)
addOption:(option) ->
if @options[option.getName()]?
throw new Error('An option named '+option.getName()+' already exists.')
if option.getShortcut()?
for shortcut in option.getShortcut().split('|')
if @shortcuts[shortcut]?
throw new Error('An option with shortcut '+shortcut+' already exists.')
@options[option.getName()] = option
if option.getShortcut()
for shortcut in option.getShortcut().split('|')
@shortcuts[shortcut] = option.getName()
getOption:(name) ->
if !@hasOption(name)
throw new Error('The --'+name+' option does not exist.')
return @options[name]
hasOption:(name) ->
return @options[name]?
getOptions:() ->
return @options
hasShortcut:(name) ->
return @shortcuts[name]?
getOptionForShortcut:(shortcut) ->
return @getOption(@shortcutToName(shortcut))
getOptionDefaults:() ->
values = []
for index,option of @options
values[option.getName()] = option.getDefault()
return values
shortcutToName:(shortcut) ->
if !@shortcuts[shortcut]?
throw new Error('The -'+shortcut+' option does not exist.')
return @shortcuts[shortcut]
getSynopsis:() ->
elements = []
for index,option of @getOptions()
shortcut = if option.getShortcut() then '-'+option.getShortcut()+'|' else ''
out = '['
if option.isValueRequired()
out += shortcut+'--'+option.getName()+'="..."'
else if option.isValueOptional()
out += shortcut+'--'+option.getName()+'[="..."]'
else
out += shortcut+'--'+option.getName()
out +=']'
elements.push(out)
for index,argument of @getArguments()
out = ''
if argument.isRequired()
out += argument.getName()
if argument.isArray()
out += '1'
else
out += '['+argument.getName()+']'
if argument.isArray()
out += '1'
elements.push(out)
if argument.isArray()
elements.push('... ['+argument.getName()+'N]')
return elements.join(' ')
module.exports = InputDefinition; |
[
{
"context": "l\", \"utf-8\")\n mint.ejs input, {locals: {name: \"My Name\"}}, (error, result) ->\n assert.equal result,",
"end": 4660,
"score": 0.9926389455795288,
"start": 4653,
"tag": "NAME",
"value": "My Name"
},
{
"context": "nt.compile(template: input, engine: 'ejs')(na... | test/mintTest.coffee | lancejpollard/mint.js | 3 | mint = require('../mint')
fs = require('fs')
global.assert = require("chai").assert
describe "mint", ->
it 'should render sequentially based on filename', (done) ->
output = '''
$(document).ready(function() {
return alert("Hello World");
});
'''
mint.render path: "test/fixtures/javascripts/some.extensions.js.coffee.ejs", locals: word: "Hello World", (error, result) ->
assert.equal result, output
done()
it 'should find engine', ->
assert.equal mint.engine(".styl"), "stylus"
assert.equal mint.engine("styl"), "stylus"
assert.equal mint.engine("eco"), "eco"
it "should render minified with clean-css", (done) ->
input = "body { background: red; }"
output = "body{background:red}"
mint.cleanCSS input, {}, (error, result) ->
assert.equal result, output
done()
it "should render minified css with yui", (done) ->
input = "body { background: red; }"
output = "body{background:red}"
mint.yui input, {}, (error, result) ->
assert.equal result, output
done()
it "should use the UglifyJS compressor", (done) ->
input = '''
$(document).ready(function() {
alert("ready!")
});
'''
output = '$(document).ready(function(){alert("ready!")})'
mint.uglifyjs input, {}, (error, result) ->
assert.equal result, output
done()
it "should render stylus", (done) ->
input = fs.readFileSync("./test/fixtures/stylesheets/stylus.styl", "utf-8")
output = fs.readFileSync("./test/fixtures/stylesheets/stylus.css", "utf-8")
mint.stylus input, {}, (error, result) ->
assert.equal result, output
done()
it "should throw error in stylus", (done) ->
path = "test/fixtures/stylesheets/stylus-error.styl"
input = fs.readFileSync(path, "utf-8")
mint.stylus input, path: path, (error, result) ->
assert.equal error.message, '''
stylus:2
1| body
> 2| background: red@
expected "indent", got "outdent", test/fixtures/stylesheets/stylus-error.styl
'''
done()
it "should render jade", (done) ->
input = fs.readFileSync("./test/fixtures/views/jade.jade", "utf-8")
output = fs.readFileSync("./test/fixtures/views/jade.html", "utf-8")
mint.jade input, {}, (error, result) ->
assert.equal result, output
done()
it 'should compile jade', ->
input = fs.readFileSync './test/fixtures/views/jade.jade', 'utf-8'
output = fs.readFileSync './test/fixtures/views/jade.html', 'utf-8'
assert.equal mint.compile(template: input, engine: 'jade')(), output
it 'should render dust', (done) ->
input = fs.readFileSync './test/fixtures/views/dust.dust', 'utf-8'
output = fs.readFileSync './test/fixtures/views/dust.html', 'utf-8'
mint.dust input, {locals: {name: 'World'}}, (error, result) ->
assert.equal result, output
done()
it 'should compile dust', (done) ->
input = fs.readFileSync './test/fixtures/views/dust.dust', 'utf-8'
output = fs.readFileSync './test/fixtures/views/dust.html', 'utf-8'
mint.compile(template: input, engine: 'dust') {name: 'World'}, (error, result) ->
assert.equal result, output
done()
it "should render haml", (done) ->
input = fs.readFileSync("./test/fixtures/views/haml.haml", "utf-8")
output = fs.readFileSync("./test/fixtures/views/haml.html", "utf-8")
mint.haml input, {}, (error, result) ->
assert.equal result, output
done()
it 'should compile haml', ->
input = fs.readFileSync './test/fixtures/views/haml.haml', 'utf-8'
output = fs.readFileSync './test/fixtures/views/haml.html', 'utf-8'
assert.equal mint.compile(template: input, engine: 'haml')(), output
it "should render haml-coffee", (done) ->
input = fs.readFileSync("./test/fixtures/views/haml-coffee.hamlc", "utf-8")
output = fs.readFileSync("./test/fixtures/views/haml-coffee.html", "utf-8")
mint.hamlcoffee input, locals: projects: [{ name: "Mobile app", url: "/projects/1", description: "Iteration 1" }], (error, result) ->
assert.equal result, output
done()
# it "should render doT", ->
# engine = new mint.DoT
# input = fs.readFileSync("./test/fixtures/views/doT.js", "utf-8")
# output = fs.readFileSync("./test/fixtures/views/doT.html", "utf-8")
# engine.render input, (error, result) ->
# assert.equal result, output
it "should render ejs", ->
input = fs.readFileSync("./test/fixtures/views/ejs.ejs", "utf-8")
output = fs.readFileSync("./test/fixtures/views/ejs.html", "utf-8")
mint.ejs input, {locals: {name: "My Name"}}, (error, result) ->
assert.equal result, output
it 'should compile ejs', ->
input = fs.readFileSync './test/fixtures/views/ejs.ejs', 'utf-8'
output = fs.readFileSync './test/fixtures/views/ejs.html', 'utf-8'
assert.equal mint.compile(template: input, engine: 'ejs')(name: 'My Name'), output
it "should render coffee script", ->
input = fs.readFileSync("./test/fixtures/javascripts/coffee.coffee", "utf-8")
output = fs.readFileSync("./test/fixtures/javascripts/coffee.js", "utf-8")
mint.coffee input, {locals: {name: "My Name"}}, (error, result) ->
assert.equal result, output
it "should throw error with coffee script", ->
path = "test/fixtures/javascripts/coffee-error.coffee"
input = fs.readFileSync(path, "utf-8")
mint.coffee input, path: path, (error, result) ->
assert.equal error.message, 'missing ", starting on line 2, test/fixtures/javascripts/coffee-error.coffee'
it "should render eco", (done) ->
input = fs.readFileSync("test/fixtures/views/eco.coffee", "utf-8")
output = fs.readFileSync("test/fixtures/views/eco.html", "utf-8")
mint.eco input, locals: projects: [{ name: "Mobile app", url: "/projects/1", description: "Iteration 1" }], (error, result) ->
assert.equal result, output
done()
it 'should compile eco', ->
input = fs.readFileSync './test/fixtures/views/eco.coffee', 'utf-8'
output = fs.readFileSync './test/fixtures/views/eco.html', 'utf-8'
locals=
projects: [{
name: 'Mobile app',
url: '/projects/1',
description: 'Iteration 1'
}]
assert.equal mint.compile(template: input, engine: 'eco')(locals), output
it "should render coffeekup", (done) ->
input = fs.readFileSync("test/fixtures/views/kup.coffee", "utf-8")
output = fs.readFileSync("test/fixtures/views/kup.html", "utf-8")
mint.coffeekup input, {}, (error, result) ->
assert.equal result, output
done()
it "should render less", ->
input = fs.readFileSync("./test/fixtures/stylesheets/less.less", "utf-8")
output = fs.readFileSync("./test/fixtures/stylesheets/less.css", "utf-8")
mint.less input, (error, result) ->
assert.equal result, output
it "should render mustache", ->
input = fs.readFileSync("./test/fixtures/views/mustache.mustache", "utf-8")
output = fs.readFileSync("./test/fixtures/views/mustache.html", "utf-8")
locals = {name: "World", say_hello: -> "Hello" }
mint.mustache input, locals: locals, (error, result) ->
assert.equal result, output
it "should render handlebars", ->
input = fs.readFileSync("./test/fixtures/views/handlebars.hbs", "utf-8")
output = fs.readFileSync("./test/fixtures/views/handlebars.html", "utf-8")
locals = name: 'Vadim'
mint.handlebars input, locals: locals, (error, result) ->
assert.equal result, output
it 'should compile handlebars', ->
input = fs.readFileSync './test/fixtures/views/handlebars.hbs', 'utf-8'
output = fs.readFileSync './test/fixtures/views/handlebars.html', 'utf-8'
assert.equal mint.compile(template: input, engine: 'handlebars')(name: 'Vadim'), output
it "should render markdown", ->
input = fs.readFileSync("./test/fixtures/docs/markdown.markdown", "utf-8")
output = fs.readFileSync("./test/fixtures/docs/markdown.html", "utf-8")
mint.markdown input, (error, result) ->
assert.equal result, output
it 'should allow preprocessing stylus', ->
input = '''
div
box-shadow: 0 -2px 2px hsl(220, 20%, 40%),
0 -10px 10px hsl(220, 20%, 20%),
0 0 15px black,
inset 0 5px 1px hsla(220, 80%, 10%, 0.4),
inset 0 0 5px hsla(220, 80%, 10%, 0.1),
inset 0 20px 15px hsla(220, 80%, 100%, 1),
inset 0 1px 0 hsl(219, 20%, 0%),
inset 0 -50px 50px -40px hsla(220, 80%, 10%, .3), /* gradient to inset */
inset 0 -1px 0px hsl(220, 20%, 20%),
inset 0 -2px 0px hsl(220, 20%, 40%),
inset 0 -2px 1px hsl(220, 20%, 65%)
'''
output = '''
div {
box-shadow: 0 -2px 2px #525f7a, 0 -10px 10px #29303d, 0 0 15px #000, inset 0 5px 1px rgba(5,19,46,0.40), inset 0 0 5px rgba(5,19,46,0.10), inset 0 20px 15px #fff, inset 0 1px 0 #000, inset 0 -50px 50px -40px rgba(5,19,46,0.30), inset 0 -1px 0px #29303d, inset 0 -2px 0px #525f7a, inset 0 -2px 1px #94a0b8;
}
'''
options =
preprocessor: (content) ->
content.replace /(\s+)(.*),\s+(?:\/\*.*\*\/)?\s*/mg, (_, indent, attribute) ->
"#{indent}#{attribute.replace(/\s+/g, " ")}, "
mint.stylus input, options, (error, result) ->
assert.equal result, output
mint.stylus.preprocessor = (content) ->
content.replace /(\s+)(.*),\s+(?:\/\*.*\*\/)?\s*/mg, (_, indent, attribute) ->
"#{indent}#{attribute.replace(/\s+/g, " ")}, "
mint.stylus input, (error, result) ->
assert.equal result, output
| 92163 | mint = require('../mint')
fs = require('fs')
global.assert = require("chai").assert
describe "mint", ->
it 'should render sequentially based on filename', (done) ->
output = '''
$(document).ready(function() {
return alert("Hello World");
});
'''
mint.render path: "test/fixtures/javascripts/some.extensions.js.coffee.ejs", locals: word: "Hello World", (error, result) ->
assert.equal result, output
done()
it 'should find engine', ->
assert.equal mint.engine(".styl"), "stylus"
assert.equal mint.engine("styl"), "stylus"
assert.equal mint.engine("eco"), "eco"
it "should render minified with clean-css", (done) ->
input = "body { background: red; }"
output = "body{background:red}"
mint.cleanCSS input, {}, (error, result) ->
assert.equal result, output
done()
it "should render minified css with yui", (done) ->
input = "body { background: red; }"
output = "body{background:red}"
mint.yui input, {}, (error, result) ->
assert.equal result, output
done()
it "should use the UglifyJS compressor", (done) ->
input = '''
$(document).ready(function() {
alert("ready!")
});
'''
output = '$(document).ready(function(){alert("ready!")})'
mint.uglifyjs input, {}, (error, result) ->
assert.equal result, output
done()
it "should render stylus", (done) ->
input = fs.readFileSync("./test/fixtures/stylesheets/stylus.styl", "utf-8")
output = fs.readFileSync("./test/fixtures/stylesheets/stylus.css", "utf-8")
mint.stylus input, {}, (error, result) ->
assert.equal result, output
done()
it "should throw error in stylus", (done) ->
path = "test/fixtures/stylesheets/stylus-error.styl"
input = fs.readFileSync(path, "utf-8")
mint.stylus input, path: path, (error, result) ->
assert.equal error.message, '''
stylus:2
1| body
> 2| background: red@
expected "indent", got "outdent", test/fixtures/stylesheets/stylus-error.styl
'''
done()
it "should render jade", (done) ->
input = fs.readFileSync("./test/fixtures/views/jade.jade", "utf-8")
output = fs.readFileSync("./test/fixtures/views/jade.html", "utf-8")
mint.jade input, {}, (error, result) ->
assert.equal result, output
done()
it 'should compile jade', ->
input = fs.readFileSync './test/fixtures/views/jade.jade', 'utf-8'
output = fs.readFileSync './test/fixtures/views/jade.html', 'utf-8'
assert.equal mint.compile(template: input, engine: 'jade')(), output
it 'should render dust', (done) ->
input = fs.readFileSync './test/fixtures/views/dust.dust', 'utf-8'
output = fs.readFileSync './test/fixtures/views/dust.html', 'utf-8'
mint.dust input, {locals: {name: 'World'}}, (error, result) ->
assert.equal result, output
done()
it 'should compile dust', (done) ->
input = fs.readFileSync './test/fixtures/views/dust.dust', 'utf-8'
output = fs.readFileSync './test/fixtures/views/dust.html', 'utf-8'
mint.compile(template: input, engine: 'dust') {name: 'World'}, (error, result) ->
assert.equal result, output
done()
it "should render haml", (done) ->
input = fs.readFileSync("./test/fixtures/views/haml.haml", "utf-8")
output = fs.readFileSync("./test/fixtures/views/haml.html", "utf-8")
mint.haml input, {}, (error, result) ->
assert.equal result, output
done()
it 'should compile haml', ->
input = fs.readFileSync './test/fixtures/views/haml.haml', 'utf-8'
output = fs.readFileSync './test/fixtures/views/haml.html', 'utf-8'
assert.equal mint.compile(template: input, engine: 'haml')(), output
it "should render haml-coffee", (done) ->
input = fs.readFileSync("./test/fixtures/views/haml-coffee.hamlc", "utf-8")
output = fs.readFileSync("./test/fixtures/views/haml-coffee.html", "utf-8")
mint.hamlcoffee input, locals: projects: [{ name: "Mobile app", url: "/projects/1", description: "Iteration 1" }], (error, result) ->
assert.equal result, output
done()
# it "should render doT", ->
# engine = new mint.DoT
# input = fs.readFileSync("./test/fixtures/views/doT.js", "utf-8")
# output = fs.readFileSync("./test/fixtures/views/doT.html", "utf-8")
# engine.render input, (error, result) ->
# assert.equal result, output
it "should render ejs", ->
input = fs.readFileSync("./test/fixtures/views/ejs.ejs", "utf-8")
output = fs.readFileSync("./test/fixtures/views/ejs.html", "utf-8")
mint.ejs input, {locals: {name: "<NAME>"}}, (error, result) ->
assert.equal result, output
it 'should compile ejs', ->
input = fs.readFileSync './test/fixtures/views/ejs.ejs', 'utf-8'
output = fs.readFileSync './test/fixtures/views/ejs.html', 'utf-8'
assert.equal mint.compile(template: input, engine: 'ejs')(name: '<NAME>'), output
it "should render coffee script", ->
input = fs.readFileSync("./test/fixtures/javascripts/coffee.coffee", "utf-8")
output = fs.readFileSync("./test/fixtures/javascripts/coffee.js", "utf-8")
mint.coffee input, {locals: {name: "<NAME>"}}, (error, result) ->
assert.equal result, output
it "should throw error with coffee script", ->
path = "test/fixtures/javascripts/coffee-error.coffee"
input = fs.readFileSync(path, "utf-8")
mint.coffee input, path: path, (error, result) ->
assert.equal error.message, 'missing ", starting on line 2, test/fixtures/javascripts/coffee-error.coffee'
it "should render eco", (done) ->
input = fs.readFileSync("test/fixtures/views/eco.coffee", "utf-8")
output = fs.readFileSync("test/fixtures/views/eco.html", "utf-8")
mint.eco input, locals: projects: [{ name: "Mobile app", url: "/projects/1", description: "Iteration 1" }], (error, result) ->
assert.equal result, output
done()
it 'should compile eco', ->
input = fs.readFileSync './test/fixtures/views/eco.coffee', 'utf-8'
output = fs.readFileSync './test/fixtures/views/eco.html', 'utf-8'
locals=
projects: [{
name: 'Mobile app',
url: '/projects/1',
description: 'Iteration 1'
}]
assert.equal mint.compile(template: input, engine: 'eco')(locals), output
it "should render coffeekup", (done) ->
input = fs.readFileSync("test/fixtures/views/kup.coffee", "utf-8")
output = fs.readFileSync("test/fixtures/views/kup.html", "utf-8")
mint.coffeekup input, {}, (error, result) ->
assert.equal result, output
done()
it "should render less", ->
input = fs.readFileSync("./test/fixtures/stylesheets/less.less", "utf-8")
output = fs.readFileSync("./test/fixtures/stylesheets/less.css", "utf-8")
mint.less input, (error, result) ->
assert.equal result, output
it "should render mustache", ->
input = fs.readFileSync("./test/fixtures/views/mustache.mustache", "utf-8")
output = fs.readFileSync("./test/fixtures/views/mustache.html", "utf-8")
locals = {name: "<NAME>", say_hello: -> "Hello" }
mint.mustache input, locals: locals, (error, result) ->
assert.equal result, output
it "should render handlebars", ->
input = fs.readFileSync("./test/fixtures/views/handlebars.hbs", "utf-8")
output = fs.readFileSync("./test/fixtures/views/handlebars.html", "utf-8")
locals = name: '<NAME>'
mint.handlebars input, locals: locals, (error, result) ->
assert.equal result, output
it 'should compile handlebars', ->
input = fs.readFileSync './test/fixtures/views/handlebars.hbs', 'utf-8'
output = fs.readFileSync './test/fixtures/views/handlebars.html', 'utf-8'
assert.equal mint.compile(template: input, engine: 'handlebars')(name: '<NAME>'), output
it "should render markdown", ->
input = fs.readFileSync("./test/fixtures/docs/markdown.markdown", "utf-8")
output = fs.readFileSync("./test/fixtures/docs/markdown.html", "utf-8")
mint.markdown input, (error, result) ->
assert.equal result, output
it 'should allow preprocessing stylus', ->
input = '''
div
box-shadow: 0 -2px 2px hsl(220, 20%, 40%),
0 -10px 10px hsl(220, 20%, 20%),
0 0 15px black,
inset 0 5px 1px hsla(220, 80%, 10%, 0.4),
inset 0 0 5px hsla(220, 80%, 10%, 0.1),
inset 0 20px 15px hsla(220, 80%, 100%, 1),
inset 0 1px 0 hsl(219, 20%, 0%),
inset 0 -50px 50px -40px hsla(220, 80%, 10%, .3), /* gradient to inset */
inset 0 -1px 0px hsl(220, 20%, 20%),
inset 0 -2px 0px hsl(220, 20%, 40%),
inset 0 -2px 1px hsl(220, 20%, 65%)
'''
output = '''
div {
box-shadow: 0 -2px 2px #525f7a, 0 -10px 10px #29303d, 0 0 15px #000, inset 0 5px 1px rgba(5,19,46,0.40), inset 0 0 5px rgba(5,19,46,0.10), inset 0 20px 15px #fff, inset 0 1px 0 #000, inset 0 -50px 50px -40px rgba(5,19,46,0.30), inset 0 -1px 0px #29303d, inset 0 -2px 0px #525f7a, inset 0 -2px 1px #94a0b8;
}
'''
options =
preprocessor: (content) ->
content.replace /(\s+)(.*),\s+(?:\/\*.*\*\/)?\s*/mg, (_, indent, attribute) ->
"#{indent}#{attribute.replace(/\s+/g, " ")}, "
mint.stylus input, options, (error, result) ->
assert.equal result, output
mint.stylus.preprocessor = (content) ->
content.replace /(\s+)(.*),\s+(?:\/\*.*\*\/)?\s*/mg, (_, indent, attribute) ->
"#{indent}#{attribute.replace(/\s+/g, " ")}, "
mint.stylus input, (error, result) ->
assert.equal result, output
| true | mint = require('../mint')
fs = require('fs')
global.assert = require("chai").assert
describe "mint", ->
it 'should render sequentially based on filename', (done) ->
output = '''
$(document).ready(function() {
return alert("Hello World");
});
'''
mint.render path: "test/fixtures/javascripts/some.extensions.js.coffee.ejs", locals: word: "Hello World", (error, result) ->
assert.equal result, output
done()
it 'should find engine', ->
assert.equal mint.engine(".styl"), "stylus"
assert.equal mint.engine("styl"), "stylus"
assert.equal mint.engine("eco"), "eco"
it "should render minified with clean-css", (done) ->
input = "body { background: red; }"
output = "body{background:red}"
mint.cleanCSS input, {}, (error, result) ->
assert.equal result, output
done()
it "should render minified css with yui", (done) ->
input = "body { background: red; }"
output = "body{background:red}"
mint.yui input, {}, (error, result) ->
assert.equal result, output
done()
it "should use the UglifyJS compressor", (done) ->
input = '''
$(document).ready(function() {
alert("ready!")
});
'''
output = '$(document).ready(function(){alert("ready!")})'
mint.uglifyjs input, {}, (error, result) ->
assert.equal result, output
done()
it "should render stylus", (done) ->
input = fs.readFileSync("./test/fixtures/stylesheets/stylus.styl", "utf-8")
output = fs.readFileSync("./test/fixtures/stylesheets/stylus.css", "utf-8")
mint.stylus input, {}, (error, result) ->
assert.equal result, output
done()
it "should throw error in stylus", (done) ->
path = "test/fixtures/stylesheets/stylus-error.styl"
input = fs.readFileSync(path, "utf-8")
mint.stylus input, path: path, (error, result) ->
assert.equal error.message, '''
stylus:2
1| body
> 2| background: red@
expected "indent", got "outdent", test/fixtures/stylesheets/stylus-error.styl
'''
done()
it "should render jade", (done) ->
input = fs.readFileSync("./test/fixtures/views/jade.jade", "utf-8")
output = fs.readFileSync("./test/fixtures/views/jade.html", "utf-8")
mint.jade input, {}, (error, result) ->
assert.equal result, output
done()
it 'should compile jade', ->
input = fs.readFileSync './test/fixtures/views/jade.jade', 'utf-8'
output = fs.readFileSync './test/fixtures/views/jade.html', 'utf-8'
assert.equal mint.compile(template: input, engine: 'jade')(), output
it 'should render dust', (done) ->
input = fs.readFileSync './test/fixtures/views/dust.dust', 'utf-8'
output = fs.readFileSync './test/fixtures/views/dust.html', 'utf-8'
mint.dust input, {locals: {name: 'World'}}, (error, result) ->
assert.equal result, output
done()
it 'should compile dust', (done) ->
input = fs.readFileSync './test/fixtures/views/dust.dust', 'utf-8'
output = fs.readFileSync './test/fixtures/views/dust.html', 'utf-8'
mint.compile(template: input, engine: 'dust') {name: 'World'}, (error, result) ->
assert.equal result, output
done()
it "should render haml", (done) ->
input = fs.readFileSync("./test/fixtures/views/haml.haml", "utf-8")
output = fs.readFileSync("./test/fixtures/views/haml.html", "utf-8")
mint.haml input, {}, (error, result) ->
assert.equal result, output
done()
it 'should compile haml', ->
input = fs.readFileSync './test/fixtures/views/haml.haml', 'utf-8'
output = fs.readFileSync './test/fixtures/views/haml.html', 'utf-8'
assert.equal mint.compile(template: input, engine: 'haml')(), output
it "should render haml-coffee", (done) ->
input = fs.readFileSync("./test/fixtures/views/haml-coffee.hamlc", "utf-8")
output = fs.readFileSync("./test/fixtures/views/haml-coffee.html", "utf-8")
mint.hamlcoffee input, locals: projects: [{ name: "Mobile app", url: "/projects/1", description: "Iteration 1" }], (error, result) ->
assert.equal result, output
done()
# it "should render doT", ->
# engine = new mint.DoT
# input = fs.readFileSync("./test/fixtures/views/doT.js", "utf-8")
# output = fs.readFileSync("./test/fixtures/views/doT.html", "utf-8")
# engine.render input, (error, result) ->
# assert.equal result, output
it "should render ejs", ->
input = fs.readFileSync("./test/fixtures/views/ejs.ejs", "utf-8")
output = fs.readFileSync("./test/fixtures/views/ejs.html", "utf-8")
mint.ejs input, {locals: {name: "PI:NAME:<NAME>END_PI"}}, (error, result) ->
assert.equal result, output
it 'should compile ejs', ->
input = fs.readFileSync './test/fixtures/views/ejs.ejs', 'utf-8'
output = fs.readFileSync './test/fixtures/views/ejs.html', 'utf-8'
assert.equal mint.compile(template: input, engine: 'ejs')(name: 'PI:NAME:<NAME>END_PI'), output
it "should render coffee script", ->
input = fs.readFileSync("./test/fixtures/javascripts/coffee.coffee", "utf-8")
output = fs.readFileSync("./test/fixtures/javascripts/coffee.js", "utf-8")
mint.coffee input, {locals: {name: "PI:NAME:<NAME>END_PI"}}, (error, result) ->
assert.equal result, output
it "should throw error with coffee script", ->
path = "test/fixtures/javascripts/coffee-error.coffee"
input = fs.readFileSync(path, "utf-8")
mint.coffee input, path: path, (error, result) ->
assert.equal error.message, 'missing ", starting on line 2, test/fixtures/javascripts/coffee-error.coffee'
it "should render eco", (done) ->
input = fs.readFileSync("test/fixtures/views/eco.coffee", "utf-8")
output = fs.readFileSync("test/fixtures/views/eco.html", "utf-8")
mint.eco input, locals: projects: [{ name: "Mobile app", url: "/projects/1", description: "Iteration 1" }], (error, result) ->
assert.equal result, output
done()
it 'should compile eco', ->
input = fs.readFileSync './test/fixtures/views/eco.coffee', 'utf-8'
output = fs.readFileSync './test/fixtures/views/eco.html', 'utf-8'
locals=
projects: [{
name: 'Mobile app',
url: '/projects/1',
description: 'Iteration 1'
}]
assert.equal mint.compile(template: input, engine: 'eco')(locals), output
it "should render coffeekup", (done) ->
input = fs.readFileSync("test/fixtures/views/kup.coffee", "utf-8")
output = fs.readFileSync("test/fixtures/views/kup.html", "utf-8")
mint.coffeekup input, {}, (error, result) ->
assert.equal result, output
done()
it "should render less", ->
input = fs.readFileSync("./test/fixtures/stylesheets/less.less", "utf-8")
output = fs.readFileSync("./test/fixtures/stylesheets/less.css", "utf-8")
mint.less input, (error, result) ->
assert.equal result, output
it "should render mustache", ->
input = fs.readFileSync("./test/fixtures/views/mustache.mustache", "utf-8")
output = fs.readFileSync("./test/fixtures/views/mustache.html", "utf-8")
locals = {name: "PI:NAME:<NAME>END_PI", say_hello: -> "Hello" }
mint.mustache input, locals: locals, (error, result) ->
assert.equal result, output
it "should render handlebars", ->
input = fs.readFileSync("./test/fixtures/views/handlebars.hbs", "utf-8")
output = fs.readFileSync("./test/fixtures/views/handlebars.html", "utf-8")
locals = name: 'PI:NAME:<NAME>END_PI'
mint.handlebars input, locals: locals, (error, result) ->
assert.equal result, output
it 'should compile handlebars', ->
input = fs.readFileSync './test/fixtures/views/handlebars.hbs', 'utf-8'
output = fs.readFileSync './test/fixtures/views/handlebars.html', 'utf-8'
assert.equal mint.compile(template: input, engine: 'handlebars')(name: 'PI:NAME:<NAME>END_PI'), output
it "should render markdown", ->
input = fs.readFileSync("./test/fixtures/docs/markdown.markdown", "utf-8")
output = fs.readFileSync("./test/fixtures/docs/markdown.html", "utf-8")
mint.markdown input, (error, result) ->
assert.equal result, output
it 'should allow preprocessing stylus', ->
input = '''
div
box-shadow: 0 -2px 2px hsl(220, 20%, 40%),
0 -10px 10px hsl(220, 20%, 20%),
0 0 15px black,
inset 0 5px 1px hsla(220, 80%, 10%, 0.4),
inset 0 0 5px hsla(220, 80%, 10%, 0.1),
inset 0 20px 15px hsla(220, 80%, 100%, 1),
inset 0 1px 0 hsl(219, 20%, 0%),
inset 0 -50px 50px -40px hsla(220, 80%, 10%, .3), /* gradient to inset */
inset 0 -1px 0px hsl(220, 20%, 20%),
inset 0 -2px 0px hsl(220, 20%, 40%),
inset 0 -2px 1px hsl(220, 20%, 65%)
'''
output = '''
div {
box-shadow: 0 -2px 2px #525f7a, 0 -10px 10px #29303d, 0 0 15px #000, inset 0 5px 1px rgba(5,19,46,0.40), inset 0 0 5px rgba(5,19,46,0.10), inset 0 20px 15px #fff, inset 0 1px 0 #000, inset 0 -50px 50px -40px rgba(5,19,46,0.30), inset 0 -1px 0px #29303d, inset 0 -2px 0px #525f7a, inset 0 -2px 1px #94a0b8;
}
'''
options =
preprocessor: (content) ->
content.replace /(\s+)(.*),\s+(?:\/\*.*\*\/)?\s*/mg, (_, indent, attribute) ->
"#{indent}#{attribute.replace(/\s+/g, " ")}, "
mint.stylus input, options, (error, result) ->
assert.equal result, output
mint.stylus.preprocessor = (content) ->
content.replace /(\s+)(.*),\s+(?:\/\*.*\*\/)?\s*/mg, (_, indent, attribute) ->
"#{indent}#{attribute.replace(/\s+/g, " ")}, "
mint.stylus input, (error, result) ->
assert.equal result, output
|
[
{
"context": " = \"http://locohost:5000/root.json\"\nPOLICY_KEY = \"whimsy-rpg-S3Policy\"\n\nmodule.exports = (I={}) ->\n defaults I,\n ba",
"end": 118,
"score": 0.9995279312133789,
"start": 99,
"tag": "KEY",
"value": "whimsy-rpg-S3Policy"
}
] | data.coffee | STRd6/whimsy-rpg | 3 | S3Uploader = require "s3-uploader"
policyEndpoint = "http://locohost:5000/root.json"
POLICY_KEY = "whimsy-rpg-S3Policy"
module.exports = (I={}) ->
defaults I,
base: "http://whimsy.space/whimsy-rpg/"
fetchPolicy = ->
console.log "Fetching policy from #{policyEndpoint}"
Q($.getJSON(policyEndpoint))
getS3Credentials = ->
Q.fcall ->
try
policy = JSON.parse localStorage[POLICY_KEY]
if policy
expiration = JSON.parse(atob(policy.policy)).expiration
if (Date.parse(expiration) - new Date) <= 30
console.log "Policy expired"
fetchPolicy()
else
return policy
else
fetchPolicy()
.then (policy) ->
console.log "Policy loaded"
localStorage[POLICY_KEY] = JSON.stringify(policy)
return policy
.fail (e) ->
throw e
self =
upload: (path, blob) ->
getS3Credentials().then (policy) ->
S3Uploader(policy).upload
key: path
blob: blob
cacheControl: 0
getImage: (path) ->
# Using getBuffer so we can get progress events and
# CORS shiz
self.getBuffer(path)
.then (buffer) ->
deferred = Q.defer()
blob = new Blob([buffer])
img = new Image
img.src = window.URL.createObjectURL(blob)
img.onload = ->
deferred.resolve img
img.onerror = deferred.reject
deferred.promise
getBuffer: (path) ->
deferred = Q.defer()
xhr = new XMLHttpRequest()
xhr.open('GET', "#{I.base}#{path}", true)
xhr.responseType = 'arraybuffer'
xhr.onload = (e) ->
if (200 <= this.status < 300) or this.status is 304
deferred.resolve this.response
else
deferred.reject e
xhr.onprogress = deferred.notify
xhr.onerror = deferred.reject
xhr.send()
deferred.promise
| 163364 | S3Uploader = require "s3-uploader"
policyEndpoint = "http://locohost:5000/root.json"
POLICY_KEY = "<KEY>"
module.exports = (I={}) ->
defaults I,
base: "http://whimsy.space/whimsy-rpg/"
fetchPolicy = ->
console.log "Fetching policy from #{policyEndpoint}"
Q($.getJSON(policyEndpoint))
getS3Credentials = ->
Q.fcall ->
try
policy = JSON.parse localStorage[POLICY_KEY]
if policy
expiration = JSON.parse(atob(policy.policy)).expiration
if (Date.parse(expiration) - new Date) <= 30
console.log "Policy expired"
fetchPolicy()
else
return policy
else
fetchPolicy()
.then (policy) ->
console.log "Policy loaded"
localStorage[POLICY_KEY] = JSON.stringify(policy)
return policy
.fail (e) ->
throw e
self =
upload: (path, blob) ->
getS3Credentials().then (policy) ->
S3Uploader(policy).upload
key: path
blob: blob
cacheControl: 0
getImage: (path) ->
# Using getBuffer so we can get progress events and
# CORS shiz
self.getBuffer(path)
.then (buffer) ->
deferred = Q.defer()
blob = new Blob([buffer])
img = new Image
img.src = window.URL.createObjectURL(blob)
img.onload = ->
deferred.resolve img
img.onerror = deferred.reject
deferred.promise
getBuffer: (path) ->
deferred = Q.defer()
xhr = new XMLHttpRequest()
xhr.open('GET', "#{I.base}#{path}", true)
xhr.responseType = 'arraybuffer'
xhr.onload = (e) ->
if (200 <= this.status < 300) or this.status is 304
deferred.resolve this.response
else
deferred.reject e
xhr.onprogress = deferred.notify
xhr.onerror = deferred.reject
xhr.send()
deferred.promise
| true | S3Uploader = require "s3-uploader"
policyEndpoint = "http://locohost:5000/root.json"
POLICY_KEY = "PI:KEY:<KEY>END_PI"
module.exports = (I={}) ->
defaults I,
base: "http://whimsy.space/whimsy-rpg/"
fetchPolicy = ->
console.log "Fetching policy from #{policyEndpoint}"
Q($.getJSON(policyEndpoint))
getS3Credentials = ->
Q.fcall ->
try
policy = JSON.parse localStorage[POLICY_KEY]
if policy
expiration = JSON.parse(atob(policy.policy)).expiration
if (Date.parse(expiration) - new Date) <= 30
console.log "Policy expired"
fetchPolicy()
else
return policy
else
fetchPolicy()
.then (policy) ->
console.log "Policy loaded"
localStorage[POLICY_KEY] = JSON.stringify(policy)
return policy
.fail (e) ->
throw e
self =
upload: (path, blob) ->
getS3Credentials().then (policy) ->
S3Uploader(policy).upload
key: path
blob: blob
cacheControl: 0
getImage: (path) ->
# Using getBuffer so we can get progress events and
# CORS shiz
self.getBuffer(path)
.then (buffer) ->
deferred = Q.defer()
blob = new Blob([buffer])
img = new Image
img.src = window.URL.createObjectURL(blob)
img.onload = ->
deferred.resolve img
img.onerror = deferred.reject
deferred.promise
getBuffer: (path) ->
deferred = Q.defer()
xhr = new XMLHttpRequest()
xhr.open('GET', "#{I.base}#{path}", true)
xhr.responseType = 'arraybuffer'
xhr.onload = (e) ->
if (200 <= this.status < 300) or this.status is 304
deferred.resolve this.response
else
deferred.reject e
xhr.onprogress = deferred.notify
xhr.onerror = deferred.reject
xhr.send()
deferred.promise
|
[
{
"context": "ll [\n knex('users').insert id: 1, name: 'foo'\n knex('users').insert id: 2, name: 'bar",
"end": 207,
"score": 0.9213083982467651,
"start": 204,
"tag": "NAME",
"value": "foo"
},
{
"context": "foo'\n knex('users').insert id: 2, name: 'bar'\n ... | test/seeder.coffee | douglasduteil/knex-csv-seeder | 10 | describe 'seeder', ->
context 'when 2 lines of csv file', ->
beforeEach ->
@timeout 10000
knex('users').del().then ->
Promise.all [
knex('users').insert id: 1, name: 'foo'
knex('users').insert id: 2, name: 'bar'
]
it 'import the seed', ->
@timeout 10000
@seeder = seeder(table: 'users', file: __dirname + '/fixtures/users_utf8.csv', encoding: 'utf8')
@seeder(knex, Promise).then (res) ->
deletedCount = res.shift()
assert.ok deletedCount == 2
insertedRows = res.shift()
assert.ok insertedRows.shift() == 2
context 'when 1 lines of invalid csv file', ->
beforeEach ->
@timeout 10000
knex('users').del().then ->
Promise.all [
knex('users').insert id: 1, name: 'foo'
knex('users').insert id: 2, name: 'bar'
]
afterEach ->
knex('users').del()
it 'import the seed failed', ->
@timeout 10000
@seeder = seeder(table: 'users', file: __dirname + '/fixtures/invalid_users_utf8.csv', encoding: 'utf8')
@seeder(knex, Promise).then (res) ->
throw new Error('succeeded') # name column is not null
.catch (err) ->
assert.notEqual err.message, 'succeeded'
context 'when 300 lines of csv file', ->
beforeEach ->
knex('users').del()
it 'import the seed', ->
@timeout 60000
@seeder = seeder(table: 'users', file: __dirname + '/fixtures/300_users_utf8.csv', encoding: 'utf8')
@seeder(knex, Promise).then (res) ->
deletedCount = res.shift()
assert.ok deletedCount == 0
insertedRows = res.shift()
assert.ok insertedRows.shift() == 100
insertedRows = res.shift()
assert.ok insertedRows.shift() == 200
insertedRows = res.shift()
assert.ok insertedRows.shift() == 300
context 'when 100000 lines of csv file', ->
beforeEach ->
knex('users').del()
it 'import the seed', ->
@timeout 60000
@seeder = seeder(table: 'users', file: __dirname + '/fixtures/100000_users_utf8.csv', encoding: 'utf8')
@seeder(knex, Promise).then (res) ->
deletedCount = res.shift()
assert.ok deletedCount == 0
insertedRows = res.pop()
assert.ok insertedRows.shift() == 100000
| 224347 | describe 'seeder', ->
context 'when 2 lines of csv file', ->
beforeEach ->
@timeout 10000
knex('users').del().then ->
Promise.all [
knex('users').insert id: 1, name: '<NAME>'
knex('users').insert id: 2, name: '<NAME>'
]
it 'import the seed', ->
@timeout 10000
@seeder = seeder(table: 'users', file: __dirname + '/fixtures/users_utf8.csv', encoding: 'utf8')
@seeder(knex, Promise).then (res) ->
deletedCount = res.shift()
assert.ok deletedCount == 2
insertedRows = res.shift()
assert.ok insertedRows.shift() == 2
context 'when 1 lines of invalid csv file', ->
beforeEach ->
@timeout 10000
knex('users').del().then ->
Promise.all [
knex('users').insert id: 1, name: '<NAME>'
knex('users').insert id: 2, name: '<NAME>'
]
afterEach ->
knex('users').del()
it 'import the seed failed', ->
@timeout 10000
@seeder = seeder(table: 'users', file: __dirname + '/fixtures/invalid_users_utf8.csv', encoding: 'utf8')
@seeder(knex, Promise).then (res) ->
throw new Error('succeeded') # name column is not null
.catch (err) ->
assert.notEqual err.message, 'succeeded'
context 'when 300 lines of csv file', ->
beforeEach ->
knex('users').del()
it 'import the seed', ->
@timeout 60000
@seeder = seeder(table: 'users', file: __dirname + '/fixtures/300_users_utf8.csv', encoding: 'utf8')
@seeder(knex, Promise).then (res) ->
deletedCount = res.shift()
assert.ok deletedCount == 0
insertedRows = res.shift()
assert.ok insertedRows.shift() == 100
insertedRows = res.shift()
assert.ok insertedRows.shift() == 200
insertedRows = res.shift()
assert.ok insertedRows.shift() == 300
context 'when 100000 lines of csv file', ->
beforeEach ->
knex('users').del()
it 'import the seed', ->
@timeout 60000
@seeder = seeder(table: 'users', file: __dirname + '/fixtures/100000_users_utf8.csv', encoding: 'utf8')
@seeder(knex, Promise).then (res) ->
deletedCount = res.shift()
assert.ok deletedCount == 0
insertedRows = res.pop()
assert.ok insertedRows.shift() == 100000
| true | describe 'seeder', ->
context 'when 2 lines of csv file', ->
beforeEach ->
@timeout 10000
knex('users').del().then ->
Promise.all [
knex('users').insert id: 1, name: 'PI:NAME:<NAME>END_PI'
knex('users').insert id: 2, name: 'PI:NAME:<NAME>END_PI'
]
it 'import the seed', ->
@timeout 10000
@seeder = seeder(table: 'users', file: __dirname + '/fixtures/users_utf8.csv', encoding: 'utf8')
@seeder(knex, Promise).then (res) ->
deletedCount = res.shift()
assert.ok deletedCount == 2
insertedRows = res.shift()
assert.ok insertedRows.shift() == 2
context 'when 1 lines of invalid csv file', ->
beforeEach ->
@timeout 10000
knex('users').del().then ->
Promise.all [
knex('users').insert id: 1, name: 'PI:NAME:<NAME>END_PI'
knex('users').insert id: 2, name: 'PI:NAME:<NAME>END_PI'
]
afterEach ->
knex('users').del()
it 'import the seed failed', ->
@timeout 10000
@seeder = seeder(table: 'users', file: __dirname + '/fixtures/invalid_users_utf8.csv', encoding: 'utf8')
@seeder(knex, Promise).then (res) ->
throw new Error('succeeded') # name column is not null
.catch (err) ->
assert.notEqual err.message, 'succeeded'
context 'when 300 lines of csv file', ->
beforeEach ->
knex('users').del()
it 'import the seed', ->
@timeout 60000
@seeder = seeder(table: 'users', file: __dirname + '/fixtures/300_users_utf8.csv', encoding: 'utf8')
@seeder(knex, Promise).then (res) ->
deletedCount = res.shift()
assert.ok deletedCount == 0
insertedRows = res.shift()
assert.ok insertedRows.shift() == 100
insertedRows = res.shift()
assert.ok insertedRows.shift() == 200
insertedRows = res.shift()
assert.ok insertedRows.shift() == 300
context 'when 100000 lines of csv file', ->
beforeEach ->
knex('users').del()
it 'import the seed', ->
@timeout 60000
@seeder = seeder(table: 'users', file: __dirname + '/fixtures/100000_users_utf8.csv', encoding: 'utf8')
@seeder(knex, Promise).then (res) ->
deletedCount = res.shift()
assert.ok deletedCount == 0
insertedRows = res.pop()
assert.ok insertedRows.shift() == 100000
|
[
{
"context": "pl(\n id: user.user_id\n name: user.user_name\n mail: user.user_mail\n stat",
"end": 619,
"score": 0.8488527536392212,
"start": 605,
"tag": "NAME",
"value": "user.user_name"
}
] | src/client/users.coffee | mauriciodelrio/lendme | 1 | class User
constructor: () ->
@load_users()
load_users: () ->
$.getJSON('/api/users')
.always (data) ->
if data.status is 'OK' and data?.data
for user in data.data or []
$('#users').tmpl(
id: user.user_id
name: user.user_name
mail: user.user_mail
status: user.user_state
).appendTo ".users-results"
$.getJSON('/api/users/1')
.always (data) ->
if data.status is 'OK' and data?.data
for user in data.data or []
$('#users').tmpl(
id: user.user_id
name: user.user_name
mail: user.user_mail
status: user.user_state
).appendTo ".users-results"
window.User = User | 137936 | class User
constructor: () ->
@load_users()
load_users: () ->
$.getJSON('/api/users')
.always (data) ->
if data.status is 'OK' and data?.data
for user in data.data or []
$('#users').tmpl(
id: user.user_id
name: user.user_name
mail: user.user_mail
status: user.user_state
).appendTo ".users-results"
$.getJSON('/api/users/1')
.always (data) ->
if data.status is 'OK' and data?.data
for user in data.data or []
$('#users').tmpl(
id: user.user_id
name: <NAME>
mail: user.user_mail
status: user.user_state
).appendTo ".users-results"
window.User = User | true | class User
constructor: () ->
@load_users()
load_users: () ->
$.getJSON('/api/users')
.always (data) ->
if data.status is 'OK' and data?.data
for user in data.data or []
$('#users').tmpl(
id: user.user_id
name: user.user_name
mail: user.user_mail
status: user.user_state
).appendTo ".users-results"
$.getJSON('/api/users/1')
.always (data) ->
if data.status is 'OK' and data?.data
for user in data.data or []
$('#users').tmpl(
id: user.user_id
name: PI:NAME:<NAME>END_PI
mail: user.user_mail
status: user.user_state
).appendTo ".users-results"
window.User = User |
[
{
"context": "nmodified expressions in loop conditions\n# @author Toru Nagashima\n###\n\n'use strict'\n\n#-----------------------------",
"end": 111,
"score": 0.9998735785484314,
"start": 97,
"tag": "NAME",
"value": "Toru Nagashima"
}
] | src/rules/no-unmodified-loop-condition.coffee | danielbayley/eslint-plugin-coffee | 21 | ###*
# @fileoverview Rule to disallow use of unmodified expressions in loop conditions
# @author Toru Nagashima
###
'use strict'
#------------------------------------------------------------------------------
# Requirements
#------------------------------------------------------------------------------
Traverser =
try
require 'eslint/lib/util/traverser'
catch
require 'eslint/lib/shared/traverser'
astUtils = require '../eslint-ast-utils'
#------------------------------------------------------------------------------
# Helpers
#------------------------------------------------------------------------------
SENTINEL_PATTERN =
/(?:(?:Call|Class|Function|Member|New|Yield)Expression|Statement|Declaration)$/
LOOP_PATTERN = /^(?:DoWhile|For|While)Statement$/ # for-in/of statements don't have `test` property.
GROUP_PATTERN = /^(?:BinaryExpression|ConditionalExpression)$/
SKIP_PATTERN = /^(?:ArrowFunction|Class|Function)Expression$/
DYNAMIC_PATTERN = /^(?:Call|Member|New|TaggedTemplate|Yield)Expression$/
###*
# @typedef {Object} LoopConditionInfo
# @property {eslint-scope.Reference} reference - The reference.
# @property {ASTNode} group - BinaryExpression or ConditionalExpression nodes
# that the reference is belonging to.
# @property {Function} isInLoop - The predicate which checks a given reference
# is in this loop.
# @property {boolean} modified - The flag that the reference is modified in
# this loop.
###
###*
# Checks whether or not a given reference is a write reference.
#
# @param {eslint-scope.Reference} reference - A reference to check.
# @returns {boolean} `true` if the reference is a write reference.
###
isWriteReference = (reference) ->
if reference.init
def = reference.resolved?.defs[0]
return no if (
not def or
def.type isnt 'Variable' or
def.parent.kind isnt 'var'
)
reference.isWrite()
###*
# Checks whether or not a given loop condition info does not have the modified
# flag.
#
# @param {LoopConditionInfo} condition - A loop condition info to check.
# @returns {boolean} `true` if the loop condition info is "unmodified".
###
isUnmodified = (condition) -> not condition.modified
###*
# Checks whether or not a given loop condition info does not have the modified
# flag and does not have the group this condition belongs to.
#
# @param {LoopConditionInfo} condition - A loop condition info to check.
# @returns {boolean} `true` if the loop condition info is "unmodified".
###
isUnmodifiedAndNotBelongToGroup = (condition) ->
not (condition.modified or condition.group)
###*
# Checks whether or not a given reference is inside of a given node.
#
# @param {ASTNode} node - A node to check.
# @param {eslint-scope.Reference} reference - A reference to check.
# @returns {boolean} `true` if the reference is inside of the node.
###
isInRange = (node, reference) ->
nr = node.range
ir = reference.identifier.range
nr[0] <= ir[0] and ir[1] <= nr[1]
###*
# Checks whether or not a given reference is inside of a loop node's condition.
#
# @param {ASTNode} node - A node to check.
# @param {eslint-scope.Reference} reference - A reference to check.
# @returns {boolean} `true` if the reference is inside of the loop node's
# condition.
###
isInLoop =
WhileStatement: isInRange
DoWhileStatement: isInRange
ForStatement: (node, reference) ->
isInRange(node, reference) and
not (node.init and isInRange node.init, reference)
###*
# Gets the function which encloses a given reference.
# This supports only FunctionDeclaration.
#
# @param {eslint-scope.Reference} reference - A reference to get.
# @returns {ASTNode|null} The function node or null.
###
getEncloseFunctionName = (reference) ->
node = reference.identifier
while node
if node.type is 'FunctionExpression'
if (
node.parent.type is 'AssignmentExpression' and
node.parent.left.type is 'Identifier'
)
return node.parent.left.name
return null
node = node.parent
null
###*
# Updates the "modified" flags of given loop conditions with given modifiers.
#
# @param {LoopConditionInfo[]} conditions - The loop conditions to be updated.
# @param {eslint-scope.Reference[]} modifiers - The references to update.
# @returns {void}
###
updateModifiedFlag = (conditions, modifiers) ->
###
# Besides checking for the condition being in the loop, we want to
# check the function that this modifier is belonging to is called
# in the loop.
# FIXME: This should probably be extracted to a function.
###
for condition in conditions
for modifier in modifiers when not condition.modified
inLoop =
condition.isInLoop(modifier) or
!!(
(funcName = getEncloseFunctionName modifier) and
(funcVar = astUtils.getVariableByName(
modifier.from.upper
funcName
)) and
funcVar.references.some condition.isInLoop
)
condition.modified = inLoop
#------------------------------------------------------------------------------
# Rule Definition
#------------------------------------------------------------------------------
module.exports =
meta:
docs:
description: 'disallow unmodified loop conditions'
category: 'Best Practices'
recommended: no
url: 'https://eslint.org/docs/rules/no-unmodified-loop-condition'
schema: []
create: (context) ->
sourceCode = context.getSourceCode()
groupMap = null
###*
# Reports a given condition info.
#
# @param {LoopConditionInfo} condition - A loop condition info to report.
# @returns {void}
###
report = (condition) ->
node = condition.reference.identifier
context.report {
node
message: "'{{name}}' is not modified in this loop."
data: node
}
###*
# Registers given conditions to the group the condition belongs to.
#
# @param {LoopConditionInfo[]} conditions - A loop condition info to
# register.
# @returns {void}
###
registerConditionsToGroup = (conditions) ->
i = 0
while i < conditions.length
condition = conditions[i]
if condition.group
group = groupMap.get condition.group
unless group
group = []
groupMap.set condition.group, group
group.push condition
++i
###*
# Reports references which are inside of unmodified groups.
#
# @param {LoopConditionInfo[]} conditions - A loop condition info to report.
# @returns {void}
###
checkConditionsInGroup = (conditions) ->
if conditions.every isUnmodified then conditions.forEach report
###*
# Checks whether or not a given group node has any dynamic elements.
#
# @param {ASTNode} root - A node to check.
# This node is one of BinaryExpression or ConditionalExpression.
# @returns {boolean} `true` if the node is dynamic.
###
hasDynamicExpressions = (root) ->
retv = no
Traverser.traverse root,
visitorKeys: sourceCode.visitorKeys
enter: (node) ->
if DYNAMIC_PATTERN.test node.type
retv ###:### = yes
@break()
else if SKIP_PATTERN.test node.type
@skip()
retv
###*
# Creates the loop condition information from a given reference.
#
# @param {eslint-scope.Reference} reference - A reference to create.
# @returns {LoopConditionInfo|null} Created loop condition info, or null.
###
toLoopCondition = (reference) ->
return null if reference.init
group = null
child = reference.identifier
node = child.parent
while node
if SENTINEL_PATTERN.test node.type
# This reference is inside of a loop condition.
return {
reference
group
isInLoop: isInLoop[node.type].bind null, node
modified: no
} if LOOP_PATTERN.test(node.type) and node.test is child
# This reference is outside of a loop condition.
break
###
# If it's inside of a group, OK if either operand is modified.
# So stores the group this reference belongs to.
###
if GROUP_PATTERN.test node.type
# If this expression is dynamic, no need to check.
if hasDynamicExpressions node then break else group = node
child = node
node = node.parent
null
###*
# Finds unmodified references which are inside of a loop condition.
# Then reports the references which are outside of groups.
#
# @param {eslint-scope.Variable} variable - A variable to report.
# @returns {void}
###
checkReferences = (variable) ->
# Gets references that exist in loop conditions.
conditions = variable.references.map(toLoopCondition).filter Boolean
return if conditions.length is 0
# Registers the conditions to belonging groups.
registerConditionsToGroup conditions
# Check the conditions are modified.
modifiers = variable.references.filter isWriteReference
if modifiers.length > 0 then updateModifiedFlag conditions, modifiers
###
# Reports the conditions which are not belonging to groups.
# Others will be reported after all variables are done.
###
conditions.filter(isUnmodifiedAndNotBelongToGroup).forEach report
'Program:exit': ->
queue = [context.getScope()]
groupMap = new Map()
while scope = queue.pop()
queue.push ...scope.childScopes
scope.variables.forEach checkReferences
groupMap.forEach checkConditionsInGroup
groupMap ###:### = null
| 109953 | ###*
# @fileoverview Rule to disallow use of unmodified expressions in loop conditions
# @author <NAME>
###
'use strict'
#------------------------------------------------------------------------------
# Requirements
#------------------------------------------------------------------------------
Traverser =
try
require 'eslint/lib/util/traverser'
catch
require 'eslint/lib/shared/traverser'
astUtils = require '../eslint-ast-utils'
#------------------------------------------------------------------------------
# Helpers
#------------------------------------------------------------------------------
SENTINEL_PATTERN =
/(?:(?:Call|Class|Function|Member|New|Yield)Expression|Statement|Declaration)$/
LOOP_PATTERN = /^(?:DoWhile|For|While)Statement$/ # for-in/of statements don't have `test` property.
GROUP_PATTERN = /^(?:BinaryExpression|ConditionalExpression)$/
SKIP_PATTERN = /^(?:ArrowFunction|Class|Function)Expression$/
DYNAMIC_PATTERN = /^(?:Call|Member|New|TaggedTemplate|Yield)Expression$/
###*
# @typedef {Object} LoopConditionInfo
# @property {eslint-scope.Reference} reference - The reference.
# @property {ASTNode} group - BinaryExpression or ConditionalExpression nodes
# that the reference is belonging to.
# @property {Function} isInLoop - The predicate which checks a given reference
# is in this loop.
# @property {boolean} modified - The flag that the reference is modified in
# this loop.
###
###*
# Checks whether or not a given reference is a write reference.
#
# @param {eslint-scope.Reference} reference - A reference to check.
# @returns {boolean} `true` if the reference is a write reference.
###
isWriteReference = (reference) ->
if reference.init
def = reference.resolved?.defs[0]
return no if (
not def or
def.type isnt 'Variable' or
def.parent.kind isnt 'var'
)
reference.isWrite()
###*
# Checks whether or not a given loop condition info does not have the modified
# flag.
#
# @param {LoopConditionInfo} condition - A loop condition info to check.
# @returns {boolean} `true` if the loop condition info is "unmodified".
###
isUnmodified = (condition) -> not condition.modified
###*
# Checks whether or not a given loop condition info does not have the modified
# flag and does not have the group this condition belongs to.
#
# @param {LoopConditionInfo} condition - A loop condition info to check.
# @returns {boolean} `true` if the loop condition info is "unmodified".
###
isUnmodifiedAndNotBelongToGroup = (condition) ->
not (condition.modified or condition.group)
###*
# Checks whether or not a given reference is inside of a given node.
#
# @param {ASTNode} node - A node to check.
# @param {eslint-scope.Reference} reference - A reference to check.
# @returns {boolean} `true` if the reference is inside of the node.
###
isInRange = (node, reference) ->
nr = node.range
ir = reference.identifier.range
nr[0] <= ir[0] and ir[1] <= nr[1]
###*
# Checks whether or not a given reference is inside of a loop node's condition.
#
# @param {ASTNode} node - A node to check.
# @param {eslint-scope.Reference} reference - A reference to check.
# @returns {boolean} `true` if the reference is inside of the loop node's
# condition.
###
isInLoop =
WhileStatement: isInRange
DoWhileStatement: isInRange
ForStatement: (node, reference) ->
isInRange(node, reference) and
not (node.init and isInRange node.init, reference)
###*
# Gets the function which encloses a given reference.
# This supports only FunctionDeclaration.
#
# @param {eslint-scope.Reference} reference - A reference to get.
# @returns {ASTNode|null} The function node or null.
###
getEncloseFunctionName = (reference) ->
node = reference.identifier
while node
if node.type is 'FunctionExpression'
if (
node.parent.type is 'AssignmentExpression' and
node.parent.left.type is 'Identifier'
)
return node.parent.left.name
return null
node = node.parent
null
###*
# Updates the "modified" flags of given loop conditions with given modifiers.
#
# @param {LoopConditionInfo[]} conditions - The loop conditions to be updated.
# @param {eslint-scope.Reference[]} modifiers - The references to update.
# @returns {void}
###
updateModifiedFlag = (conditions, modifiers) ->
###
# Besides checking for the condition being in the loop, we want to
# check the function that this modifier is belonging to is called
# in the loop.
# FIXME: This should probably be extracted to a function.
###
for condition in conditions
for modifier in modifiers when not condition.modified
inLoop =
condition.isInLoop(modifier) or
!!(
(funcName = getEncloseFunctionName modifier) and
(funcVar = astUtils.getVariableByName(
modifier.from.upper
funcName
)) and
funcVar.references.some condition.isInLoop
)
condition.modified = inLoop
#------------------------------------------------------------------------------
# Rule Definition
#------------------------------------------------------------------------------
module.exports =
meta:
docs:
description: 'disallow unmodified loop conditions'
category: 'Best Practices'
recommended: no
url: 'https://eslint.org/docs/rules/no-unmodified-loop-condition'
schema: []
create: (context) ->
sourceCode = context.getSourceCode()
groupMap = null
###*
# Reports a given condition info.
#
# @param {LoopConditionInfo} condition - A loop condition info to report.
# @returns {void}
###
report = (condition) ->
node = condition.reference.identifier
context.report {
node
message: "'{{name}}' is not modified in this loop."
data: node
}
###*
# Registers given conditions to the group the condition belongs to.
#
# @param {LoopConditionInfo[]} conditions - A loop condition info to
# register.
# @returns {void}
###
registerConditionsToGroup = (conditions) ->
i = 0
while i < conditions.length
condition = conditions[i]
if condition.group
group = groupMap.get condition.group
unless group
group = []
groupMap.set condition.group, group
group.push condition
++i
###*
# Reports references which are inside of unmodified groups.
#
# @param {LoopConditionInfo[]} conditions - A loop condition info to report.
# @returns {void}
###
checkConditionsInGroup = (conditions) ->
if conditions.every isUnmodified then conditions.forEach report
###*
# Checks whether or not a given group node has any dynamic elements.
#
# @param {ASTNode} root - A node to check.
# This node is one of BinaryExpression or ConditionalExpression.
# @returns {boolean} `true` if the node is dynamic.
###
hasDynamicExpressions = (root) ->
retv = no
Traverser.traverse root,
visitorKeys: sourceCode.visitorKeys
enter: (node) ->
if DYNAMIC_PATTERN.test node.type
retv ###:### = yes
@break()
else if SKIP_PATTERN.test node.type
@skip()
retv
###*
# Creates the loop condition information from a given reference.
#
# @param {eslint-scope.Reference} reference - A reference to create.
# @returns {LoopConditionInfo|null} Created loop condition info, or null.
###
toLoopCondition = (reference) ->
return null if reference.init
group = null
child = reference.identifier
node = child.parent
while node
if SENTINEL_PATTERN.test node.type
# This reference is inside of a loop condition.
return {
reference
group
isInLoop: isInLoop[node.type].bind null, node
modified: no
} if LOOP_PATTERN.test(node.type) and node.test is child
# This reference is outside of a loop condition.
break
###
# If it's inside of a group, OK if either operand is modified.
# So stores the group this reference belongs to.
###
if GROUP_PATTERN.test node.type
# If this expression is dynamic, no need to check.
if hasDynamicExpressions node then break else group = node
child = node
node = node.parent
null
###*
# Finds unmodified references which are inside of a loop condition.
# Then reports the references which are outside of groups.
#
# @param {eslint-scope.Variable} variable - A variable to report.
# @returns {void}
###
checkReferences = (variable) ->
# Gets references that exist in loop conditions.
conditions = variable.references.map(toLoopCondition).filter Boolean
return if conditions.length is 0
# Registers the conditions to belonging groups.
registerConditionsToGroup conditions
# Check the conditions are modified.
modifiers = variable.references.filter isWriteReference
if modifiers.length > 0 then updateModifiedFlag conditions, modifiers
###
# Reports the conditions which are not belonging to groups.
# Others will be reported after all variables are done.
###
conditions.filter(isUnmodifiedAndNotBelongToGroup).forEach report
'Program:exit': ->
queue = [context.getScope()]
groupMap = new Map()
while scope = queue.pop()
queue.push ...scope.childScopes
scope.variables.forEach checkReferences
groupMap.forEach checkConditionsInGroup
groupMap ###:### = null
| true | ###*
# @fileoverview Rule to disallow use of unmodified expressions in loop conditions
# @author PI:NAME:<NAME>END_PI
###
'use strict'
#------------------------------------------------------------------------------
# Requirements
#------------------------------------------------------------------------------
Traverser =
try
require 'eslint/lib/util/traverser'
catch
require 'eslint/lib/shared/traverser'
astUtils = require '../eslint-ast-utils'
#------------------------------------------------------------------------------
# Helpers
#------------------------------------------------------------------------------
SENTINEL_PATTERN =
/(?:(?:Call|Class|Function|Member|New|Yield)Expression|Statement|Declaration)$/
LOOP_PATTERN = /^(?:DoWhile|For|While)Statement$/ # for-in/of statements don't have `test` property.
GROUP_PATTERN = /^(?:BinaryExpression|ConditionalExpression)$/
SKIP_PATTERN = /^(?:ArrowFunction|Class|Function)Expression$/
DYNAMIC_PATTERN = /^(?:Call|Member|New|TaggedTemplate|Yield)Expression$/
###*
# @typedef {Object} LoopConditionInfo
# @property {eslint-scope.Reference} reference - The reference.
# @property {ASTNode} group - BinaryExpression or ConditionalExpression nodes
# that the reference is belonging to.
# @property {Function} isInLoop - The predicate which checks a given reference
# is in this loop.
# @property {boolean} modified - The flag that the reference is modified in
# this loop.
###
###*
# Checks whether or not a given reference is a write reference.
#
# @param {eslint-scope.Reference} reference - A reference to check.
# @returns {boolean} `true` if the reference is a write reference.
###
isWriteReference = (reference) ->
if reference.init
def = reference.resolved?.defs[0]
return no if (
not def or
def.type isnt 'Variable' or
def.parent.kind isnt 'var'
)
reference.isWrite()
###*
# Checks whether or not a given loop condition info does not have the modified
# flag.
#
# @param {LoopConditionInfo} condition - A loop condition info to check.
# @returns {boolean} `true` if the loop condition info is "unmodified".
###
isUnmodified = (condition) -> not condition.modified
###*
# Checks whether or not a given loop condition info does not have the modified
# flag and does not have the group this condition belongs to.
#
# @param {LoopConditionInfo} condition - A loop condition info to check.
# @returns {boolean} `true` if the loop condition info is "unmodified".
###
isUnmodifiedAndNotBelongToGroup = (condition) ->
not (condition.modified or condition.group)
###*
# Checks whether or not a given reference is inside of a given node.
#
# @param {ASTNode} node - A node to check.
# @param {eslint-scope.Reference} reference - A reference to check.
# @returns {boolean} `true` if the reference is inside of the node.
###
isInRange = (node, reference) ->
nr = node.range
ir = reference.identifier.range
nr[0] <= ir[0] and ir[1] <= nr[1]
###*
# Checks whether or not a given reference is inside of a loop node's condition.
#
# @param {ASTNode} node - A node to check.
# @param {eslint-scope.Reference} reference - A reference to check.
# @returns {boolean} `true` if the reference is inside of the loop node's
# condition.
###
isInLoop =
WhileStatement: isInRange
DoWhileStatement: isInRange
ForStatement: (node, reference) ->
isInRange(node, reference) and
not (node.init and isInRange node.init, reference)
###*
# Gets the function which encloses a given reference.
# This supports only FunctionDeclaration.
#
# @param {eslint-scope.Reference} reference - A reference to get.
# @returns {ASTNode|null} The function node or null.
###
getEncloseFunctionName = (reference) ->
node = reference.identifier
while node
if node.type is 'FunctionExpression'
if (
node.parent.type is 'AssignmentExpression' and
node.parent.left.type is 'Identifier'
)
return node.parent.left.name
return null
node = node.parent
null
###*
# Updates the "modified" flags of given loop conditions with given modifiers.
#
# @param {LoopConditionInfo[]} conditions - The loop conditions to be updated.
# @param {eslint-scope.Reference[]} modifiers - The references to update.
# @returns {void}
###
updateModifiedFlag = (conditions, modifiers) ->
###
# Besides checking for the condition being in the loop, we want to
# check the function that this modifier is belonging to is called
# in the loop.
# FIXME: This should probably be extracted to a function.
###
for condition in conditions
for modifier in modifiers when not condition.modified
inLoop =
condition.isInLoop(modifier) or
!!(
(funcName = getEncloseFunctionName modifier) and
(funcVar = astUtils.getVariableByName(
modifier.from.upper
funcName
)) and
funcVar.references.some condition.isInLoop
)
condition.modified = inLoop
#------------------------------------------------------------------------------
# Rule Definition
#------------------------------------------------------------------------------
module.exports =
meta:
docs:
description: 'disallow unmodified loop conditions'
category: 'Best Practices'
recommended: no
url: 'https://eslint.org/docs/rules/no-unmodified-loop-condition'
schema: []
create: (context) ->
sourceCode = context.getSourceCode()
groupMap = null
###*
# Reports a given condition info.
#
# @param {LoopConditionInfo} condition - A loop condition info to report.
# @returns {void}
###
report = (condition) ->
node = condition.reference.identifier
context.report {
node
message: "'{{name}}' is not modified in this loop."
data: node
}
###*
# Registers given conditions to the group the condition belongs to.
#
# @param {LoopConditionInfo[]} conditions - A loop condition info to
# register.
# @returns {void}
###
registerConditionsToGroup = (conditions) ->
i = 0
while i < conditions.length
condition = conditions[i]
if condition.group
group = groupMap.get condition.group
unless group
group = []
groupMap.set condition.group, group
group.push condition
++i
###*
# Reports references which are inside of unmodified groups.
#
# @param {LoopConditionInfo[]} conditions - A loop condition info to report.
# @returns {void}
###
checkConditionsInGroup = (conditions) ->
if conditions.every isUnmodified then conditions.forEach report
###*
# Checks whether or not a given group node has any dynamic elements.
#
# @param {ASTNode} root - A node to check.
# This node is one of BinaryExpression or ConditionalExpression.
# @returns {boolean} `true` if the node is dynamic.
###
hasDynamicExpressions = (root) ->
retv = no
Traverser.traverse root,
visitorKeys: sourceCode.visitorKeys
enter: (node) ->
if DYNAMIC_PATTERN.test node.type
retv ###:### = yes
@break()
else if SKIP_PATTERN.test node.type
@skip()
retv
###*
# Creates the loop condition information from a given reference.
#
# @param {eslint-scope.Reference} reference - A reference to create.
# @returns {LoopConditionInfo|null} Created loop condition info, or null.
###
toLoopCondition = (reference) ->
return null if reference.init
group = null
child = reference.identifier
node = child.parent
while node
if SENTINEL_PATTERN.test node.type
# This reference is inside of a loop condition.
return {
reference
group
isInLoop: isInLoop[node.type].bind null, node
modified: no
} if LOOP_PATTERN.test(node.type) and node.test is child
# This reference is outside of a loop condition.
break
###
# If it's inside of a group, OK if either operand is modified.
# So stores the group this reference belongs to.
###
if GROUP_PATTERN.test node.type
# If this expression is dynamic, no need to check.
if hasDynamicExpressions node then break else group = node
child = node
node = node.parent
null
###*
# Finds unmodified references which are inside of a loop condition.
# Then reports the references which are outside of groups.
#
# @param {eslint-scope.Variable} variable - A variable to report.
# @returns {void}
###
checkReferences = (variable) ->
# Gets references that exist in loop conditions.
conditions = variable.references.map(toLoopCondition).filter Boolean
return if conditions.length is 0
# Registers the conditions to belonging groups.
registerConditionsToGroup conditions
# Check the conditions are modified.
modifiers = variable.references.filter isWriteReference
if modifiers.length > 0 then updateModifiedFlag conditions, modifiers
###
# Reports the conditions which are not belonging to groups.
# Others will be reported after all variables are done.
###
conditions.filter(isUnmodifiedAndNotBelongToGroup).forEach report
'Program:exit': ->
queue = [context.getScope()]
groupMap = new Map()
while scope = queue.pop()
queue.push ...scope.childScopes
scope.variables.forEach checkReferences
groupMap.forEach checkConditionsInGroup
groupMap ###:### = null
|
[
{
"context": "# Slovak translation - sk\n# Translators: Martin Bucko (MartinBucko) and Rober Kamenicky (aladinko) (tre",
"end": 53,
"score": 0.9998641014099121,
"start": 41,
"tag": "NAME",
"value": "Martin Bucko"
},
{
"context": "vak translation - sk\n# Translators: Martin Bucko (Mar... | t9n/sk.coffee | coWorkr-InSights/meteor-accounts-t9n | 80 | # Slovak translation - sk
# Translators: Martin Bucko (MartinBucko) and Rober Kamenicky (aladinko) (treecom.sk)
sk =
add: "pridať"
and: "a"
back: "späť"
changePassword: "Zmena hesla"
choosePassword: "Zvoľte si heslo"
clickAgree: "Stlačením tlačidla \"Registrovať\" akceptujete"
configure: "Nastaviť"
createAccount: "Vytvoriť konto"
currentPassword: "Súčasné heslo"
dontHaveAnAccount: "Nemáte účet?"
email: "Email"
emailAddress: "Emailová adresa"
emailResetLink: "Odoslať na email overovací odkaz"
forgotPassword: "Zabudli ste heslo?"
ifYouAlreadyHaveAnAccount: "Ak už máte vytvorený účet prejdite na"
newPassword: "Nové heslo"
newPasswordAgain: "Nové heslo (opakujte)"
optional: "Voliteľné"
OR: "alebo"
password: "Heslo"
passwordAgain: "Heslo (opakujte)"
privacyPolicy: "pravidlá ochrany súkromia"
remove: "odstrániť"
resetYourPassword: "Obnovenie hesla"
setPassword: "Nastaviť heslo"
sign: "Prihlásiť"
signIn: "Prihlásenie" #Prihlásiť ?
signin: "prihlásenie" #prihlásený ?
signOut: "Odhlásiť"
signUp: "Registrovať"
signupCode: "Registračný kód"
signUpWithYourEmailAddress: "Registrácia pomocou emailovej adresy"
terms: "pravidlá požívania"
updateYourPassword: "Aktualizovať heslo"
username: "Užívateľské meno"
usernameOrEmail: "Užívateľské meno alebo email"
with: "s"
info:
emailSent: "Email odoslaný"
emailVerified: "Email bol overený"
passwordChanged: "Heslo bolo zmenené"
passwordReset: "Obnova hesla"
error:
emailRequired: "Email je vyžadovaný." #Je potrebné zadať email?
minChar: "minimálne 7 znakov heslo."
pwdsDontMatch: "Heslá sa nezhodujú"
pwOneDigit: "Heslo musí mať aspoň jeden znak."
pwOneLetter: "Heslo musí mať aspoň 1 znak.."
signInRequired: "Je vyžadované prihlásenie na túto akciu."
signupCodeIncorrect: "Registračný kód je nesprávny."
signupCodeRequired: "Je vyžadovaný registračný kód."
usernameIsEmail: "Užvateľské meno nemôže byť email."
usernameRequired: "Je vyžadované užívateľské meno."
accounts:
#---- accounts-base
#"@" + domain + " email required"
#"A login handler should return a result or undefined"
"Email already exists.": "Email už bol registrovaný."
"Email doesn't match the criteria.": "Email nevyhovuje kritériam."
"Invalid login token": "Neplatný token prihlásenia"
"Login forbidden": "Prihlásenie neuspešné"
#"Service " + options.service + " already configured"
"Service unknown": "Neznáma služba"
"Unrecognized options for login request": "Neroznali sa podmienky pre požiadavku prihlásenia"
"User validation failed": "Overenie užívateľa zlyhalo"
"Username already exists.": "Užívateľ už existuje."
"You are not logged in.": "Vyžaduje sa prihlásenie."
"You've been logged out by the server. Please log in again.": "Boli ste odhlásený/á zo servera. Prosím prihláste sa znova." #znovu/znova?
"Your session has expired. Please log in again.": "Vaše príhlásenie expirovalo. Prosím prihláste sa znova."#znovu/znova?
#---- accounts-oauth
"No matching login attempt found": "Prihlásenie nesúhlasí"
#---- accounts-password-client
"Password is old. Please reset your password.": "Heslo je zastaralé. Prosím obnovte si ho."
#---- accounts-password
"Incorrect password": "Nesprávne heslo"
"Invalid email": "Nesprávaný email"
"Must be logged in": "Je vyžadované prihlásenie"
"Need to set a username or email": "Je potrebné nastaviť užívateľské meno a email"
"old password format": "formát starého hesla"
"Password may not be empty": "Heslo nesmie byť prázdne"
"Signups forbidden": "Prihlásenie je zakázané"
"Token expired": "Token expiroval"
"Token has invalid email address": "Token obsahuje nesprávnu emailovú adresu"
"User has no password set": "Užívateľ nemá nastavené heslo"
"User not found": "Užívateľ sa nenašiel"
"Verify email link expired": "Odkazu pre overenie emailu vypršala platnosť."
"Verify email link is for unknown address": "Overovací odkaz je z nenámej adresy"
#---- match
"Match failed": "Nezhodné"
#---- Misc...
"Unknown error": "Neznáma chyba"
T9n?.map "sk", sk
module?.exports = sk
| 83877 | # Slovak translation - sk
# Translators: <NAME> (MartinBucko) and <NAME> (aladinko) (treecom.sk)
sk =
add: "pridať"
and: "a"
back: "späť"
changePassword: "<PASSWORD>"
choosePassword: "<PASSWORD>"
clickAgree: "Stlačením tlačidla \"Registrovať\" akceptujete"
configure: "Nastaviť"
createAccount: "Vytvoriť konto"
currentPassword: "<PASSWORD>"
dontHaveAnAccount: "Nemáte účet?"
email: "Email"
emailAddress: "Emailová adresa"
emailResetLink: "Odoslať na email overovací odkaz"
forgotPassword: "<PASSWORD>?"
ifYouAlreadyHaveAnAccount: "Ak už máte vytvorený účet prejdite na"
newPassword: "<PASSWORD>"
newPasswordAgain: "<PASSWORD> (opakujte)"
optional: "Voliteľné"
OR: "alebo"
password: "<PASSWORD>"
passwordAgain: "<PASSWORD> (opakujte)"
privacyPolicy: "pravidlá ochrany súkromia"
remove: "odstrániť"
resetYourPassword: "<PASSWORD>"
setPassword: "<PASSWORD>"
sign: "Prihlásiť"
signIn: "Prihlásenie" #Prihlásiť ?
signin: "prihlásenie" #prihlásený ?
signOut: "Odhlásiť"
signUp: "Registrovať"
signupCode: "Registračný kód"
signUpWithYourEmailAddress: "Registrácia pomocou emailovej adresy"
terms: "pravidlá požívania"
updateYourPassword: "<PASSWORD>izovať <PASSWORD>"
username: "Užívateľské meno"
usernameOrEmail: "Užív<EMAIL> <EMAIL>ské meno alebo email"
with: "s"
info:
emailSent: "Email odoslaný"
emailVerified: "Email bol overený"
passwordChanged: "<PASSWORD>"
passwordReset: "<PASSWORD>"
error:
emailRequired: "Email je vyžadovaný." #Je potrebné zadať email?
minChar: "minimálne 7 znakov heslo."
pwdsDontMatch: "Heslá sa nezhodujú"
pwOneDigit: "Heslo musí mať aspoň jeden znak."
pwOneLetter: "Heslo musí mať aspoň 1 znak.."
signInRequired: "Je vyžadované prihlásenie na túto akciu."
signupCodeIncorrect: "Registračný kód je nesprávny."
signupCodeRequired: "Je vyžadovaný registračný kód."
usernameIsEmail: "Užvateľské meno nemôže byť email."
usernameRequired: "Je vyžadované užívateľské meno."
accounts:
#---- accounts-base
#"@" + domain + " email required"
#"A login handler should return a result or undefined"
"Email already exists.": "Email už bol registrovaný."
"Email doesn't match the criteria.": "Email nevyhovuje kritériam."
"Invalid login token": "Neplatný token prihlásenia"
"Login forbidden": "Prihlásenie neuspešné"
#"Service " + options.service + " already configured"
"Service unknown": "Neznáma služba"
"Unrecognized options for login request": "Neroznali sa podmienky pre požiadavku prihlásenia"
"User validation failed": "Overenie užívateľa zlyhalo"
"Username already exists.": "Užívateľ už existuje."
"You are not logged in.": "Vyžaduje sa prihlásenie."
"You've been logged out by the server. Please log in again.": "Boli ste odhlásený/á zo servera. Prosím prihláste sa znova." #znovu/znova?
"Your session has expired. Please log in again.": "Vaše príhlásenie expirovalo. Prosím prihláste sa znova."#znovu/znova?
#---- accounts-oauth
"No matching login attempt found": "Prihlásenie nesúhlasí"
#---- accounts-password-client
"Password is old. Please reset your password.": "Heslo je zastaralé. Prosím obnovte si ho."
#---- accounts-password
"Incorrect password": "<PASSWORD>"
"Invalid email": "Nesprávaný email"
"Must be logged in": "Je vyžadované prihlásenie"
"Need to set a username or email": "Je potrebné nastaviť užívateľské meno a email"
"old password format": "formát starého hesla"
"Password may not be empty": "Heslo nesmie byť <PASSWORD>ne"
"Signups forbidden": "Prihlásenie je zakázané"
"Token expired": "Token expiroval"
"Token has invalid email address": "Token obsahuje nesprávnu emailovú adresu"
"User has no password set": "Užívateľ nemá nastavené heslo"
"User not found": "Užívateľ sa nenašiel"
"Verify email link expired": "Odkazu pre overenie emailu vypršala platnosť."
"Verify email link is for unknown address": "Overovací odkaz je z nenámej adresy"
#---- match
"Match failed": "Nezhodné"
#---- Misc...
"Unknown error": "Neznáma chyba"
T9n?.map "sk", sk
module?.exports = sk
| true | # Slovak translation - sk
# Translators: PI:NAME:<NAME>END_PI (MartinBucko) and PI:NAME:<NAME>END_PI (aladinko) (treecom.sk)
sk =
add: "pridať"
and: "a"
back: "späť"
changePassword: "PI:PASSWORD:<PASSWORD>END_PI"
choosePassword: "PI:PASSWORD:<PASSWORD>END_PI"
clickAgree: "Stlačením tlačidla \"Registrovať\" akceptujete"
configure: "Nastaviť"
createAccount: "Vytvoriť konto"
currentPassword: "PI:PASSWORD:<PASSWORD>END_PI"
dontHaveAnAccount: "Nemáte účet?"
email: "Email"
emailAddress: "Emailová adresa"
emailResetLink: "Odoslať na email overovací odkaz"
forgotPassword: "PI:PASSWORD:<PASSWORD>END_PI?"
ifYouAlreadyHaveAnAccount: "Ak už máte vytvorený účet prejdite na"
newPassword: "PI:PASSWORD:<PASSWORD>END_PI"
newPasswordAgain: "PI:PASSWORD:<PASSWORD>END_PI (opakujte)"
optional: "Voliteľné"
OR: "alebo"
password: "PI:PASSWORD:<PASSWORD>END_PI"
passwordAgain: "PI:PASSWORD:<PASSWORD>END_PI (opakujte)"
privacyPolicy: "pravidlá ochrany súkromia"
remove: "odstrániť"
resetYourPassword: "PI:PASSWORD:<PASSWORD>END_PI"
setPassword: "PI:PASSWORD:<PASSWORD>END_PI"
sign: "Prihlásiť"
signIn: "Prihlásenie" #Prihlásiť ?
signin: "prihlásenie" #prihlásený ?
signOut: "Odhlásiť"
signUp: "Registrovať"
signupCode: "Registračný kód"
signUpWithYourEmailAddress: "Registrácia pomocou emailovej adresy"
terms: "pravidlá požívania"
updateYourPassword: "PI:PASSWORD:<PASSWORD>END_PIizovať PI:PASSWORD:<PASSWORD>END_PI"
username: "Užívateľské meno"
usernameOrEmail: "UžívPI:EMAIL:<EMAIL>END_PI PI:EMAIL:<EMAIL>END_PIské meno alebo email"
with: "s"
info:
emailSent: "Email odoslaný"
emailVerified: "Email bol overený"
passwordChanged: "PI:PASSWORD:<PASSWORD>END_PI"
passwordReset: "PI:PASSWORD:<PASSWORD>END_PI"
error:
emailRequired: "Email je vyžadovaný." #Je potrebné zadať email?
minChar: "minimálne 7 znakov heslo."
pwdsDontMatch: "Heslá sa nezhodujú"
pwOneDigit: "Heslo musí mať aspoň jeden znak."
pwOneLetter: "Heslo musí mať aspoň 1 znak.."
signInRequired: "Je vyžadované prihlásenie na túto akciu."
signupCodeIncorrect: "Registračný kód je nesprávny."
signupCodeRequired: "Je vyžadovaný registračný kód."
usernameIsEmail: "Užvateľské meno nemôže byť email."
usernameRequired: "Je vyžadované užívateľské meno."
accounts:
#---- accounts-base
#"@" + domain + " email required"
#"A login handler should return a result or undefined"
"Email already exists.": "Email už bol registrovaný."
"Email doesn't match the criteria.": "Email nevyhovuje kritériam."
"Invalid login token": "Neplatný token prihlásenia"
"Login forbidden": "Prihlásenie neuspešné"
#"Service " + options.service + " already configured"
"Service unknown": "Neznáma služba"
"Unrecognized options for login request": "Neroznali sa podmienky pre požiadavku prihlásenia"
"User validation failed": "Overenie užívateľa zlyhalo"
"Username already exists.": "Užívateľ už existuje."
"You are not logged in.": "Vyžaduje sa prihlásenie."
"You've been logged out by the server. Please log in again.": "Boli ste odhlásený/á zo servera. Prosím prihláste sa znova." #znovu/znova?
"Your session has expired. Please log in again.": "Vaše príhlásenie expirovalo. Prosím prihláste sa znova."#znovu/znova?
#---- accounts-oauth
"No matching login attempt found": "Prihlásenie nesúhlasí"
#---- accounts-password-client
"Password is old. Please reset your password.": "Heslo je zastaralé. Prosím obnovte si ho."
#---- accounts-password
"Incorrect password": "PI:PASSWORD:<PASSWORD>END_PI"
"Invalid email": "Nesprávaný email"
"Must be logged in": "Je vyžadované prihlásenie"
"Need to set a username or email": "Je potrebné nastaviť užívateľské meno a email"
"old password format": "formát starého hesla"
"Password may not be empty": "Heslo nesmie byť PI:PASSWORD:<PASSWORD>END_PIne"
"Signups forbidden": "Prihlásenie je zakázané"
"Token expired": "Token expiroval"
"Token has invalid email address": "Token obsahuje nesprávnu emailovú adresu"
"User has no password set": "Užívateľ nemá nastavené heslo"
"User not found": "Užívateľ sa nenašiel"
"Verify email link expired": "Odkazu pre overenie emailu vypršala platnosť."
"Verify email link is for unknown address": "Overovací odkaz je z nenámej adresy"
#---- match
"Match failed": "Nezhodné"
#---- Misc...
"Unknown error": "Neznáma chyba"
T9n?.map "sk", sk
module?.exports = sk
|
[
{
"context": "\n\t\t\trarity =\n\t\t\t\tid: Rarity.Mythron\n\t\t\t\tdevName: \"mythron\"\n\t\t\t\tname: \"Mythron\"\n\t\t\t\tspiritCost: 1200\n\t\t\t\tspi",
"end": 2497,
"score": 0.9883525967597961,
"start": 2490,
"tag": "NAME",
"value": "mythron"
},
{
"context": " Rarity.Mythron\n\t... | app/sdk/cards/rarityFactory.coffee | willroberts/duelyst | 5 |
Rarity = require './rarityLookup'
i18next = require 'i18next'
class RarityFactory
@rarityForIdentifier: (identifier) ->
rarity = null
if identifier == Rarity.Fixed
rarity =
id: Rarity.Fixed
name: i18next.t("rarity.rarity_basic")
devName: "basic"
spiritCost: 0
spiritReward: 0
spiritCostPrismatic: 0
spiritRewardPrismatic: 0
bonusRewardCount: 0
color: {r: 255, g: 255, b: 255}
hex: "#CCCCCC"
isHiddenToUI: false
if identifier == Rarity.Common
rarity =
id: Rarity.Common
name: i18next.t("rarity.rarity_common")
devName: "common"
spiritCost: 40
spiritReward: 10
spiritCostPrismatic: 200
spiritRewardPrismatic: 40
cosmeticHardPrice: 100
spiritCostCosmetic: 500
spiritRewardCosmetic: 50
bonusRewardCount: 1
color: {r: 255, g: 255, b: 255}
hex: "#CCCCCC"
isHiddenToUI: false
if identifier == Rarity.Rare
rarity =
id: Rarity.Rare
name: i18next.t("rarity.rarity_rare")
devName: "rare"
spiritCost: 100
spiritReward: 20
spiritCostPrismatic: 350
spiritRewardPrismatic: 100
cosmeticHardPrice: 150
spiritCostCosmetic: 1000
spiritRewardCosmetic: 100
bonusRewardCount: 1
color: {r: 56, g: 93, b: 255}
hex: "#6dcff6"
isHiddenToUI: false
if identifier == Rarity.Epic
rarity =
id: Rarity.Epic
name: i18next.t("rarity.rarity_epic")
devName: "epic"
spiritCost: 350
spiritReward: 100
spiritCostPrismatic: 900
spiritRewardPrismatic: 350
cosmeticHardPrice: 200
spiritCostCosmetic: 1500
spiritRewardCosmetic: 150
bonusRewardCount: 2
color: {r: 144, g: 41, b: 255}
hex: "#f49ac1"
isHiddenToUI: false
if identifier == Rarity.Legendary
rarity =
id: Rarity.Legendary
name: i18next.t("rarity.rarity_legendary")
devName: "legendary"
spiritCost: 900
spiritReward: 350
spiritCostPrismatic: 1800
spiritRewardPrismatic: 900
cosmeticHardPrice: 400
spiritCostCosmetic: 3000
spiritRewardCosmetic: 350
bonusRewardCount: 2
color: {r: 255, g: 120, b: 0}
hex: "#ffac49"
isHiddenToUI: false
if identifier == Rarity.TokenUnit
rarity =
id: Rarity.TokenUnit
devName: "token"
name: "Token"
spiritCost: 0
spiritReward: 0
bonusRewardCount: 0
color: {r: 189, g: 189, b: 189}
hex: "#BDBDBD"
isHiddenToUI: true
if identifier == Rarity.Mythron
rarity =
id: Rarity.Mythron
devName: "mythron"
name: "Mythron"
spiritCost: 1200
spiritReward: 0
spiritCostPrismatic: 2400
spiritRewardPrismatic: 1200
bonusRewardCount: 0
color: {r: 189, g: 189, b: 189}
hex: "#BDBDBD"
isHiddenToUI: false
# no faction found
if rarity
return rarity
else
console.error "RarityFactory.rarityForIdentifier - Unknown rarity identifier: #{identifier}".red
@getIsRarityTypeCraftable: (rarityType) ->
return rarityType != Rarity.Fixed and rarityType != Rarity.TokenUnit
@getAllRarities: () ->
rarities = []
for rarityName of Rarity
identifier = Rarity[rarityName]
rarity = @rarityForIdentifier(identifier)
if rarity?
rarities.push(rarity)
return rarities
module.exports = RarityFactory
| 141466 |
Rarity = require './rarityLookup'
i18next = require 'i18next'
class RarityFactory
@rarityForIdentifier: (identifier) ->
rarity = null
if identifier == Rarity.Fixed
rarity =
id: Rarity.Fixed
name: i18next.t("rarity.rarity_basic")
devName: "basic"
spiritCost: 0
spiritReward: 0
spiritCostPrismatic: 0
spiritRewardPrismatic: 0
bonusRewardCount: 0
color: {r: 255, g: 255, b: 255}
hex: "#CCCCCC"
isHiddenToUI: false
if identifier == Rarity.Common
rarity =
id: Rarity.Common
name: i18next.t("rarity.rarity_common")
devName: "common"
spiritCost: 40
spiritReward: 10
spiritCostPrismatic: 200
spiritRewardPrismatic: 40
cosmeticHardPrice: 100
spiritCostCosmetic: 500
spiritRewardCosmetic: 50
bonusRewardCount: 1
color: {r: 255, g: 255, b: 255}
hex: "#CCCCCC"
isHiddenToUI: false
if identifier == Rarity.Rare
rarity =
id: Rarity.Rare
name: i18next.t("rarity.rarity_rare")
devName: "rare"
spiritCost: 100
spiritReward: 20
spiritCostPrismatic: 350
spiritRewardPrismatic: 100
cosmeticHardPrice: 150
spiritCostCosmetic: 1000
spiritRewardCosmetic: 100
bonusRewardCount: 1
color: {r: 56, g: 93, b: 255}
hex: "#6dcff6"
isHiddenToUI: false
if identifier == Rarity.Epic
rarity =
id: Rarity.Epic
name: i18next.t("rarity.rarity_epic")
devName: "epic"
spiritCost: 350
spiritReward: 100
spiritCostPrismatic: 900
spiritRewardPrismatic: 350
cosmeticHardPrice: 200
spiritCostCosmetic: 1500
spiritRewardCosmetic: 150
bonusRewardCount: 2
color: {r: 144, g: 41, b: 255}
hex: "#f49ac1"
isHiddenToUI: false
if identifier == Rarity.Legendary
rarity =
id: Rarity.Legendary
name: i18next.t("rarity.rarity_legendary")
devName: "legendary"
spiritCost: 900
spiritReward: 350
spiritCostPrismatic: 1800
spiritRewardPrismatic: 900
cosmeticHardPrice: 400
spiritCostCosmetic: 3000
spiritRewardCosmetic: 350
bonusRewardCount: 2
color: {r: 255, g: 120, b: 0}
hex: "#ffac49"
isHiddenToUI: false
if identifier == Rarity.TokenUnit
rarity =
id: Rarity.TokenUnit
devName: "token"
name: "Token"
spiritCost: 0
spiritReward: 0
bonusRewardCount: 0
color: {r: 189, g: 189, b: 189}
hex: "#BDBDBD"
isHiddenToUI: true
if identifier == Rarity.Mythron
rarity =
id: Rarity.Mythron
devName: "<NAME>"
name: "<NAME>"
spiritCost: 1200
spiritReward: 0
spiritCostPrismatic: 2400
spiritRewardPrismatic: 1200
bonusRewardCount: 0
color: {r: 189, g: 189, b: 189}
hex: "#BDBDBD"
isHiddenToUI: false
# no faction found
if rarity
return rarity
else
console.error "RarityFactory.rarityForIdentifier - Unknown rarity identifier: #{identifier}".red
@getIsRarityTypeCraftable: (rarityType) ->
return rarityType != Rarity.Fixed and rarityType != Rarity.TokenUnit
@getAllRarities: () ->
rarities = []
for rarityName of Rarity
identifier = Rarity[rarityName]
rarity = @rarityForIdentifier(identifier)
if rarity?
rarities.push(rarity)
return rarities
module.exports = RarityFactory
| true |
Rarity = require './rarityLookup'
i18next = require 'i18next'
class RarityFactory
@rarityForIdentifier: (identifier) ->
rarity = null
if identifier == Rarity.Fixed
rarity =
id: Rarity.Fixed
name: i18next.t("rarity.rarity_basic")
devName: "basic"
spiritCost: 0
spiritReward: 0
spiritCostPrismatic: 0
spiritRewardPrismatic: 0
bonusRewardCount: 0
color: {r: 255, g: 255, b: 255}
hex: "#CCCCCC"
isHiddenToUI: false
if identifier == Rarity.Common
rarity =
id: Rarity.Common
name: i18next.t("rarity.rarity_common")
devName: "common"
spiritCost: 40
spiritReward: 10
spiritCostPrismatic: 200
spiritRewardPrismatic: 40
cosmeticHardPrice: 100
spiritCostCosmetic: 500
spiritRewardCosmetic: 50
bonusRewardCount: 1
color: {r: 255, g: 255, b: 255}
hex: "#CCCCCC"
isHiddenToUI: false
if identifier == Rarity.Rare
rarity =
id: Rarity.Rare
name: i18next.t("rarity.rarity_rare")
devName: "rare"
spiritCost: 100
spiritReward: 20
spiritCostPrismatic: 350
spiritRewardPrismatic: 100
cosmeticHardPrice: 150
spiritCostCosmetic: 1000
spiritRewardCosmetic: 100
bonusRewardCount: 1
color: {r: 56, g: 93, b: 255}
hex: "#6dcff6"
isHiddenToUI: false
if identifier == Rarity.Epic
rarity =
id: Rarity.Epic
name: i18next.t("rarity.rarity_epic")
devName: "epic"
spiritCost: 350
spiritReward: 100
spiritCostPrismatic: 900
spiritRewardPrismatic: 350
cosmeticHardPrice: 200
spiritCostCosmetic: 1500
spiritRewardCosmetic: 150
bonusRewardCount: 2
color: {r: 144, g: 41, b: 255}
hex: "#f49ac1"
isHiddenToUI: false
if identifier == Rarity.Legendary
rarity =
id: Rarity.Legendary
name: i18next.t("rarity.rarity_legendary")
devName: "legendary"
spiritCost: 900
spiritReward: 350
spiritCostPrismatic: 1800
spiritRewardPrismatic: 900
cosmeticHardPrice: 400
spiritCostCosmetic: 3000
spiritRewardCosmetic: 350
bonusRewardCount: 2
color: {r: 255, g: 120, b: 0}
hex: "#ffac49"
isHiddenToUI: false
if identifier == Rarity.TokenUnit
rarity =
id: Rarity.TokenUnit
devName: "token"
name: "Token"
spiritCost: 0
spiritReward: 0
bonusRewardCount: 0
color: {r: 189, g: 189, b: 189}
hex: "#BDBDBD"
isHiddenToUI: true
if identifier == Rarity.Mythron
rarity =
id: Rarity.Mythron
devName: "PI:NAME:<NAME>END_PI"
name: "PI:NAME:<NAME>END_PI"
spiritCost: 1200
spiritReward: 0
spiritCostPrismatic: 2400
spiritRewardPrismatic: 1200
bonusRewardCount: 0
color: {r: 189, g: 189, b: 189}
hex: "#BDBDBD"
isHiddenToUI: false
# no faction found
if rarity
return rarity
else
console.error "RarityFactory.rarityForIdentifier - Unknown rarity identifier: #{identifier}".red
@getIsRarityTypeCraftable: (rarityType) ->
return rarityType != Rarity.Fixed and rarityType != Rarity.TokenUnit
@getAllRarities: () ->
rarities = []
for rarityName of Rarity
identifier = Rarity[rarityName]
rarity = @rarityForIdentifier(identifier)
if rarity?
rarities.push(rarity)
return rarities
module.exports = RarityFactory
|
[
{
"context": "\n helper.store.admin.setup helper._tenantId,\"martin\",\"password1\",\"martin@wawrusch.com\",null,{}, (err,",
"end": 564,
"score": 0.9192930459976196,
"start": 558,
"tag": "USERNAME",
"value": "martin"
},
{
"context": "lper.store.admin.setup helper._tenantId,\"mar... | test/method-admin-tests.coffee | codedoctor/mongoose-user-store-multi-tenant | 4 | _ = require 'underscore'
should = require 'should'
helper = require './support/helper'
mongoHelper = require './support/mongo-helper'
sampleUsers = null
describe 'testing admin functions', ->
before (cb) ->
helper.start null, cb
after (cb) ->
helper.stop cb
it 'should exist', ->
should.exist helper.store.roles
describe 'WHEN setting up an account', ->
it 'SHOULD CREATE ONE', (cb) ->
data =
name : "role1"
description: "desc1"
isInternal : false
helper.store.admin.setup helper._tenantId,"martin","password1","martin@wawrusch.com",null,{}, (err,user) ->
return cb err if err
should.exist user
cb()
| 222214 | _ = require 'underscore'
should = require 'should'
helper = require './support/helper'
mongoHelper = require './support/mongo-helper'
sampleUsers = null
describe 'testing admin functions', ->
before (cb) ->
helper.start null, cb
after (cb) ->
helper.stop cb
it 'should exist', ->
should.exist helper.store.roles
describe 'WHEN setting up an account', ->
it 'SHOULD CREATE ONE', (cb) ->
data =
name : "role1"
description: "desc1"
isInternal : false
helper.store.admin.setup helper._tenantId,"martin","<PASSWORD>1","<EMAIL>",null,{}, (err,user) ->
return cb err if err
should.exist user
cb()
| true | _ = require 'underscore'
should = require 'should'
helper = require './support/helper'
mongoHelper = require './support/mongo-helper'
sampleUsers = null
describe 'testing admin functions', ->
before (cb) ->
helper.start null, cb
after (cb) ->
helper.stop cb
it 'should exist', ->
should.exist helper.store.roles
describe 'WHEN setting up an account', ->
it 'SHOULD CREATE ONE', (cb) ->
data =
name : "role1"
description: "desc1"
isInternal : false
helper.store.admin.setup helper._tenantId,"martin","PI:PASSWORD:<PASSWORD>END_PI1","PI:EMAIL:<EMAIL>END_PI",null,{}, (err,user) ->
return cb err if err
should.exist user
cb()
|
[
{
"context": "###\nRecipe Step Base Class\n\n@author Torstein Thune\n@copyright 2015 Microbrew.it\n###\nclass Step\n\n\tFer",
"end": 50,
"score": 0.9998751878738403,
"start": 36,
"tag": "NAME",
"value": "Torstein Thune"
}
] | src/steps/Step.coffee | Microbrewit/microbrewit-recipe | 1 | ###
Recipe Step Base Class
@author Torstein Thune
@copyright 2015 Microbrew.it
###
class Step
Fermentable: require './ingredients/Fermentable'
Hop: require './ingredients/Hop'
Other: require './ingredients/Other'
Yeast: require './ingredients/Yeast'
_calc:
bitterness: {}
mcu: 0
gravityPoints: 0
# Recalculate step meta
recalc: ->
@_calc.bitterness = {}
@_calc.mcu = 0
@_calc.gravityPoints = 0
for ingredient in ingredients
if ingredient.bitterness?
for formula, val in ingredient.bitterness
unless @_calc.bitterness[formula]
@_calc.bitterness[formula] = val
else
@_calc.bitterness[formula] += val
if ingredient.mcu
@_calc.mcu += ingredient.mcu
if ingredient.gravityPoints
@_calc.gravityPoints += ingredient.gravityPoints
# Add ingredient to step
# @param [String] type Type of ingredient
# @param [Object] config The ingredient metadata
addIngredient: (type, config) ->
old = _.clone @model
config = _.clone config if config
switch type
when 'fermentable'
@model.ingredients.push new @Fermentable config
when 'hop'
@model.ingredients.push new @Hop config
when 'other'
@model.ingredients.push new @Other config
when 'yeast'
@model.ingredients.push new @Yeast config
@calculateMeta()
@updated(old, @model)
# Remove an ingredient from the step
# @param [Object, Integer] mixed Either a Ingredient instance or a index
# @todo
removeIngredient: (mixed) ->
if typeof mixed is 'number'
@ingredients.splice(mixed, 1)
getIngredientIndex: (object) ->
moveIngredient: (from, to) ->
module.exports = Step | 190929 | ###
Recipe Step Base Class
@author <NAME>
@copyright 2015 Microbrew.it
###
class Step
Fermentable: require './ingredients/Fermentable'
Hop: require './ingredients/Hop'
Other: require './ingredients/Other'
Yeast: require './ingredients/Yeast'
_calc:
bitterness: {}
mcu: 0
gravityPoints: 0
# Recalculate step meta
recalc: ->
@_calc.bitterness = {}
@_calc.mcu = 0
@_calc.gravityPoints = 0
for ingredient in ingredients
if ingredient.bitterness?
for formula, val in ingredient.bitterness
unless @_calc.bitterness[formula]
@_calc.bitterness[formula] = val
else
@_calc.bitterness[formula] += val
if ingredient.mcu
@_calc.mcu += ingredient.mcu
if ingredient.gravityPoints
@_calc.gravityPoints += ingredient.gravityPoints
# Add ingredient to step
# @param [String] type Type of ingredient
# @param [Object] config The ingredient metadata
addIngredient: (type, config) ->
old = _.clone @model
config = _.clone config if config
switch type
when 'fermentable'
@model.ingredients.push new @Fermentable config
when 'hop'
@model.ingredients.push new @Hop config
when 'other'
@model.ingredients.push new @Other config
when 'yeast'
@model.ingredients.push new @Yeast config
@calculateMeta()
@updated(old, @model)
# Remove an ingredient from the step
# @param [Object, Integer] mixed Either a Ingredient instance or a index
# @todo
removeIngredient: (mixed) ->
if typeof mixed is 'number'
@ingredients.splice(mixed, 1)
getIngredientIndex: (object) ->
moveIngredient: (from, to) ->
module.exports = Step | true | ###
Recipe Step Base Class
@author PI:NAME:<NAME>END_PI
@copyright 2015 Microbrew.it
###
class Step
Fermentable: require './ingredients/Fermentable'
Hop: require './ingredients/Hop'
Other: require './ingredients/Other'
Yeast: require './ingredients/Yeast'
_calc:
bitterness: {}
mcu: 0
gravityPoints: 0
# Recalculate step meta
recalc: ->
@_calc.bitterness = {}
@_calc.mcu = 0
@_calc.gravityPoints = 0
for ingredient in ingredients
if ingredient.bitterness?
for formula, val in ingredient.bitterness
unless @_calc.bitterness[formula]
@_calc.bitterness[formula] = val
else
@_calc.bitterness[formula] += val
if ingredient.mcu
@_calc.mcu += ingredient.mcu
if ingredient.gravityPoints
@_calc.gravityPoints += ingredient.gravityPoints
# Add ingredient to step
# @param [String] type Type of ingredient
# @param [Object] config The ingredient metadata
addIngredient: (type, config) ->
old = _.clone @model
config = _.clone config if config
switch type
when 'fermentable'
@model.ingredients.push new @Fermentable config
when 'hop'
@model.ingredients.push new @Hop config
when 'other'
@model.ingredients.push new @Other config
when 'yeast'
@model.ingredients.push new @Yeast config
@calculateMeta()
@updated(old, @model)
# Remove an ingredient from the step
# @param [Object, Integer] mixed Either a Ingredient instance or a index
# @todo
removeIngredient: (mixed) ->
if typeof mixed is 'number'
@ingredients.splice(mixed, 1)
getIngredientIndex: (object) ->
moveIngredient: (from, to) ->
module.exports = Step |
[
{
"context": "nsole.log(timeDiff)\n res = JSON.stringify(@userpassdict)\n console.log \"json created\"\n f",
"end": 1897,
"score": 0.9967989325523376,
"start": 1884,
"tag": "USERNAME",
"value": "@userpassdict"
},
{
"context": "eated\"\n fs.writeFil... | perfomance_simulations/idbp/create_json.coffee | lucy7li/compromised-credential-checking | 6 | fs = require 'fs';
zlib = require 'zlib';
readline = require 'readline';
crypto = require 'crypto';
ecc = require 'elliptic';
EC = ecc.ec;
ec = new EC('secp256k1');
class UserpassJson
constructor: (@filename) ->
@userpassdict = {}
@key = ec.genKeyPair();
@lineReader = readline.createInterface({
input: fs.createReadStream(@filename)
});
save_key: () ->
fs.writeFileSync('key.txt', @key.getPrivate('hex'))
trim : (s) ->
( s || '' ).replace( /^\s+|\s+$/g, '' );
get_hash_enc: (username,password) =>
str = username.concat(password)
msg = crypto.createHash("sha256").update(str).digest('hex');
key_user = ec.keyFromPrivate(msg,'hex');
public_key = key_user.getPublic().mul(@key.getPrivate()).encode('hex')
hash_prefix = msg[0..3]
if (! @userpassdict[hash_prefix])
pass_list = [public_key]
@userpassdict[hash_prefix] = pass_list;
else
pass_set = new Set(@userpassdict[hash_prefix])
if ! pass_set.has(public_key)
@userpassdict[hash_prefix].push(public_key);
store_json: =>
i = 0
startTime = new Date();
@lineReader.on 'line', (line) =>
if(i%1000000 == 0)
endTime = new Date();
timeDiff = endTime - startTime;
timeDiff /= 1000;
console.log(timeDiff)
console.log(i)
words = @trim(line.toString()).split('\t')
@get_hash_enc(words[0],words[1])
i = i + 1
@lineReader.on 'close', () =>
endTime = new Date();
timeDiff = endTime - startTime;
timeDiff /= 1000;
console.log(timeDiff)
res = JSON.stringify(@userpassdict)
console.log "json created"
fs.writeFileSync('username_pass_10m.json', res);
console.log "stored"
print_ngram: =>
console.log "here"
console.log @ngrams[123]
console.log "done"
module.exports = UserpassJson | 41305 | fs = require 'fs';
zlib = require 'zlib';
readline = require 'readline';
crypto = require 'crypto';
ecc = require 'elliptic';
EC = ecc.ec;
ec = new EC('secp256k1');
class UserpassJson
constructor: (@filename) ->
@userpassdict = {}
@key = ec.genKeyPair();
@lineReader = readline.createInterface({
input: fs.createReadStream(@filename)
});
save_key: () ->
fs.writeFileSync('key.txt', @key.getPrivate('hex'))
trim : (s) ->
( s || '' ).replace( /^\s+|\s+$/g, '' );
get_hash_enc: (username,password) =>
str = username.concat(password)
msg = crypto.createHash("sha256").update(str).digest('hex');
key_user = ec.keyFromPrivate(msg,'hex');
public_key = key_user.getPublic().mul(@key.getPrivate()).encode('hex')
hash_prefix = msg[0..3]
if (! @userpassdict[hash_prefix])
pass_list = [public_key]
@userpassdict[hash_prefix] = pass_list;
else
pass_set = new Set(@userpassdict[hash_prefix])
if ! pass_set.has(public_key)
@userpassdict[hash_prefix].push(public_key);
store_json: =>
i = 0
startTime = new Date();
@lineReader.on 'line', (line) =>
if(i%1000000 == 0)
endTime = new Date();
timeDiff = endTime - startTime;
timeDiff /= 1000;
console.log(timeDiff)
console.log(i)
words = @trim(line.toString()).split('\t')
@get_hash_enc(words[0],words[1])
i = i + 1
@lineReader.on 'close', () =>
endTime = new Date();
timeDiff = endTime - startTime;
timeDiff /= 1000;
console.log(timeDiff)
res = JSON.stringify(@userpassdict)
console.log "json created"
fs.writeFileSync('username_pass_1<PASSWORD>.json', res);
console.log "stored"
print_ngram: =>
console.log "here"
console.log @ngrams[123]
console.log "done"
module.exports = UserpassJson | true | fs = require 'fs';
zlib = require 'zlib';
readline = require 'readline';
crypto = require 'crypto';
ecc = require 'elliptic';
EC = ecc.ec;
ec = new EC('secp256k1');
class UserpassJson
constructor: (@filename) ->
@userpassdict = {}
@key = ec.genKeyPair();
@lineReader = readline.createInterface({
input: fs.createReadStream(@filename)
});
save_key: () ->
fs.writeFileSync('key.txt', @key.getPrivate('hex'))
trim : (s) ->
( s || '' ).replace( /^\s+|\s+$/g, '' );
get_hash_enc: (username,password) =>
str = username.concat(password)
msg = crypto.createHash("sha256").update(str).digest('hex');
key_user = ec.keyFromPrivate(msg,'hex');
public_key = key_user.getPublic().mul(@key.getPrivate()).encode('hex')
hash_prefix = msg[0..3]
if (! @userpassdict[hash_prefix])
pass_list = [public_key]
@userpassdict[hash_prefix] = pass_list;
else
pass_set = new Set(@userpassdict[hash_prefix])
if ! pass_set.has(public_key)
@userpassdict[hash_prefix].push(public_key);
store_json: =>
i = 0
startTime = new Date();
@lineReader.on 'line', (line) =>
if(i%1000000 == 0)
endTime = new Date();
timeDiff = endTime - startTime;
timeDiff /= 1000;
console.log(timeDiff)
console.log(i)
words = @trim(line.toString()).split('\t')
@get_hash_enc(words[0],words[1])
i = i + 1
@lineReader.on 'close', () =>
endTime = new Date();
timeDiff = endTime - startTime;
timeDiff /= 1000;
console.log(timeDiff)
res = JSON.stringify(@userpassdict)
console.log "json created"
fs.writeFileSync('username_pass_1PI:PASSWORD:<PASSWORD>END_PI.json', res);
console.log "stored"
print_ngram: =>
console.log "here"
console.log @ngrams[123]
console.log "done"
module.exports = UserpassJson |
[
{
"context": "ageRenderer\n\n @PROJECT_URL = \"noahsug.github.com/circ\"\n\n constructor: (@win) ->\n @_resetActivityMar",
"end": 233,
"score": 0.9923600554466248,
"start": 229,
"tag": "USERNAME",
"value": "circ"
},
{
"context": "e about group'\n @message '', \" * UI mocks... | src/chat/window_message_renderer.coffee | nornagon/ircv | 20 | exports = (window.chat ?= {}).window ?= {}
##
# Handles outputing text to the window and provides functions to display
# some specific messages like help and about.
##
class MessageRenderer
@PROJECT_URL = "noahsug.github.com/circ"
constructor: (@win) ->
@_resetActivityMarker = false
@_activityMarkerLocation = undefined
onFocus: ->
@_resetActivityMarker = @win.$messages.children().length > 0
displayWelcome: ->
@message()
@message '', "Welcome to CIRC, a packaged Chrome app.", "system"
@message '', @_getWebsiteBlurb(), 'system'
@message '', "Type /server <server> [port] to connect, then /nick " +
"<my_nick> and /join <#channel>.", "system"
@message '', "Type /help to see a full list of commands.", "system"
@message '', "Switch windows with alt+[0-9] or click in the channel " +
"list on the left.", "system"
displayHelp: (commands) ->
@message()
@message '', "Commands Available:", 'notice help'
@_printCommands commands
@message '', "Type /help <command> to see details about a specific command.",
'notice help'
@message '', @_getWebsiteBlurb(), 'notice help'
displayAbout: ->
@message()
@message '', "CIRC is a packaged Chrome app developed by Google Inc. " +
@_getWebsiteBlurb(), 'notice about'
@message '', "Version: #{irc.VERSION}", 'notice about'
@message '', "Contributors:", 'notice about group'
@message '', " * UI mocks by Fravic Fernando (fravicf@gmail.com)", 'notice about group'
_getWebsiteBlurb: ->
"Documentation, issues and source code live at " +
"#{MessageRenderer.PROJECT_URL}."
_printCommands: (commands) ->
maxWidth = 40
style = 'notice help monospace group'
widthPerCommand = @_getMaxCommandWidth commands
commandsPerLine = maxWidth / Math.floor widthPerCommand
line = []
for command, i in commands
line.push @_fillWithWhiteSpace command, widthPerCommand
if line.length >= commandsPerLine or i >= commands.length - 1
@message '', line.join(' '), style
line = []
_getMaxCommandWidth: (commands) ->
maxWidth = 0
for command in commands
if command.length > maxWidth
maxWidth = command.length
maxWidth
_fillWithWhiteSpace: (command, maxCommandWidth) ->
space = (' ' for i in [0..maxCommandWidth-1]).join ''
return command + space.slice 0, maxCommandWidth - command.length
message: (from='', msg='', style...) ->
wasScrolledDown = @win.isScrolledDown()
from = html.escape from
msg = html.display msg
style = style.join ' '
@_addMessage from, msg, style
if wasScrolledDown
@win.scrollToBottom()
@_displayActivityMarker() if @_shouldDisplayActivityMarker()
_addMessage: (from, msg, style) ->
message = $('#templates .message').clone()
message.addClass style
$('.source', message).html from
$('.content', message).html msg
$('.source', message).addClass('empty') unless from
@win.emit 'message', @win.getContext(), style, message[0].outerHTML
@win.$messages.append message
_shouldDisplayActivityMarker: ->
return not @win.isFocused() and @_resetActivityMarker
_displayActivityMarker: ->
@_resetActivityMarker = false
if @_activityMarkerLocation
@_activityMarkerLocation.removeClass 'activity-marker'
@_activityMarkerLocation = @win.$messages.children().last()
@_activityMarkerLocation.addClass 'activity-marker'
exports.MessageRenderer = MessageRenderer | 223417 | exports = (window.chat ?= {}).window ?= {}
##
# Handles outputing text to the window and provides functions to display
# some specific messages like help and about.
##
class MessageRenderer
@PROJECT_URL = "noahsug.github.com/circ"
constructor: (@win) ->
@_resetActivityMarker = false
@_activityMarkerLocation = undefined
onFocus: ->
@_resetActivityMarker = @win.$messages.children().length > 0
displayWelcome: ->
@message()
@message '', "Welcome to CIRC, a packaged Chrome app.", "system"
@message '', @_getWebsiteBlurb(), 'system'
@message '', "Type /server <server> [port] to connect, then /nick " +
"<my_nick> and /join <#channel>.", "system"
@message '', "Type /help to see a full list of commands.", "system"
@message '', "Switch windows with alt+[0-9] or click in the channel " +
"list on the left.", "system"
displayHelp: (commands) ->
@message()
@message '', "Commands Available:", 'notice help'
@_printCommands commands
@message '', "Type /help <command> to see details about a specific command.",
'notice help'
@message '', @_getWebsiteBlurb(), 'notice help'
displayAbout: ->
@message()
@message '', "CIRC is a packaged Chrome app developed by Google Inc. " +
@_getWebsiteBlurb(), 'notice about'
@message '', "Version: #{irc.VERSION}", 'notice about'
@message '', "Contributors:", 'notice about group'
@message '', " * UI mocks by <NAME> (<EMAIL>)", 'notice about group'
_getWebsiteBlurb: ->
"Documentation, issues and source code live at " +
"#{MessageRenderer.PROJECT_URL}."
_printCommands: (commands) ->
maxWidth = 40
style = 'notice help monospace group'
widthPerCommand = @_getMaxCommandWidth commands
commandsPerLine = maxWidth / Math.floor widthPerCommand
line = []
for command, i in commands
line.push @_fillWithWhiteSpace command, widthPerCommand
if line.length >= commandsPerLine or i >= commands.length - 1
@message '', line.join(' '), style
line = []
_getMaxCommandWidth: (commands) ->
maxWidth = 0
for command in commands
if command.length > maxWidth
maxWidth = command.length
maxWidth
_fillWithWhiteSpace: (command, maxCommandWidth) ->
space = (' ' for i in [0..maxCommandWidth-1]).join ''
return command + space.slice 0, maxCommandWidth - command.length
message: (from='', msg='', style...) ->
wasScrolledDown = @win.isScrolledDown()
from = html.escape from
msg = html.display msg
style = style.join ' '
@_addMessage from, msg, style
if wasScrolledDown
@win.scrollToBottom()
@_displayActivityMarker() if @_shouldDisplayActivityMarker()
_addMessage: (from, msg, style) ->
message = $('#templates .message').clone()
message.addClass style
$('.source', message).html from
$('.content', message).html msg
$('.source', message).addClass('empty') unless from
@win.emit 'message', @win.getContext(), style, message[0].outerHTML
@win.$messages.append message
_shouldDisplayActivityMarker: ->
return not @win.isFocused() and @_resetActivityMarker
_displayActivityMarker: ->
@_resetActivityMarker = false
if @_activityMarkerLocation
@_activityMarkerLocation.removeClass 'activity-marker'
@_activityMarkerLocation = @win.$messages.children().last()
@_activityMarkerLocation.addClass 'activity-marker'
exports.MessageRenderer = MessageRenderer | true | exports = (window.chat ?= {}).window ?= {}
##
# Handles outputing text to the window and provides functions to display
# some specific messages like help and about.
##
class MessageRenderer
@PROJECT_URL = "noahsug.github.com/circ"
constructor: (@win) ->
@_resetActivityMarker = false
@_activityMarkerLocation = undefined
onFocus: ->
@_resetActivityMarker = @win.$messages.children().length > 0
displayWelcome: ->
@message()
@message '', "Welcome to CIRC, a packaged Chrome app.", "system"
@message '', @_getWebsiteBlurb(), 'system'
@message '', "Type /server <server> [port] to connect, then /nick " +
"<my_nick> and /join <#channel>.", "system"
@message '', "Type /help to see a full list of commands.", "system"
@message '', "Switch windows with alt+[0-9] or click in the channel " +
"list on the left.", "system"
displayHelp: (commands) ->
@message()
@message '', "Commands Available:", 'notice help'
@_printCommands commands
@message '', "Type /help <command> to see details about a specific command.",
'notice help'
@message '', @_getWebsiteBlurb(), 'notice help'
displayAbout: ->
@message()
@message '', "CIRC is a packaged Chrome app developed by Google Inc. " +
@_getWebsiteBlurb(), 'notice about'
@message '', "Version: #{irc.VERSION}", 'notice about'
@message '', "Contributors:", 'notice about group'
@message '', " * UI mocks by PI:NAME:<NAME>END_PI (PI:EMAIL:<EMAIL>END_PI)", 'notice about group'
_getWebsiteBlurb: ->
"Documentation, issues and source code live at " +
"#{MessageRenderer.PROJECT_URL}."
_printCommands: (commands) ->
maxWidth = 40
style = 'notice help monospace group'
widthPerCommand = @_getMaxCommandWidth commands
commandsPerLine = maxWidth / Math.floor widthPerCommand
line = []
for command, i in commands
line.push @_fillWithWhiteSpace command, widthPerCommand
if line.length >= commandsPerLine or i >= commands.length - 1
@message '', line.join(' '), style
line = []
_getMaxCommandWidth: (commands) ->
maxWidth = 0
for command in commands
if command.length > maxWidth
maxWidth = command.length
maxWidth
_fillWithWhiteSpace: (command, maxCommandWidth) ->
space = (' ' for i in [0..maxCommandWidth-1]).join ''
return command + space.slice 0, maxCommandWidth - command.length
message: (from='', msg='', style...) ->
wasScrolledDown = @win.isScrolledDown()
from = html.escape from
msg = html.display msg
style = style.join ' '
@_addMessage from, msg, style
if wasScrolledDown
@win.scrollToBottom()
@_displayActivityMarker() if @_shouldDisplayActivityMarker()
_addMessage: (from, msg, style) ->
message = $('#templates .message').clone()
message.addClass style
$('.source', message).html from
$('.content', message).html msg
$('.source', message).addClass('empty') unless from
@win.emit 'message', @win.getContext(), style, message[0].outerHTML
@win.$messages.append message
_shouldDisplayActivityMarker: ->
return not @win.isFocused() and @_resetActivityMarker
_displayActivityMarker: ->
@_resetActivityMarker = false
if @_activityMarkerLocation
@_activityMarkerLocation.removeClass 'activity-marker'
@_activityMarkerLocation = @win.$messages.children().last()
@_activityMarkerLocation.addClass 'activity-marker'
exports.MessageRenderer = MessageRenderer |
[
{
"context": "localeCompare(a.id)\n @classrooms.fetchByOwner(@teacherID)\n @supermodel.trackCollection(@classrooms)\n ",
"end": 5170,
"score": 0.9687425494194031,
"start": 5161,
"tag": "USERNAME",
"value": "teacherID"
},
{
"context": "ourseInstances()\n @courseInstances.fe... | app/views/courses/TeacherClassesView.coffee | Dakicka/codecombat | 0 | require('app/styles/courses/teacher-classes-view.sass')
RootView = require 'views/core/RootView'
template = require 'templates/courses/teacher-classes-view'
Classroom = require 'models/Classroom'
Classrooms = require 'collections/Classrooms'
Courses = require 'collections/Courses'
Campaign = require 'models/Campaign'
Campaigns = require 'collections/Campaigns'
LevelSessions = require 'collections/LevelSessions'
CourseInstance = require 'models/CourseInstance'
CourseInstances = require 'collections/CourseInstances'
ClassroomSettingsModal = require 'views/courses/ClassroomSettingsModal'
CourseNagSubview = require 'views/teachers/CourseNagSubview'
Prepaids = require 'collections/Prepaids'
Users = require 'collections/Users'
User = require 'models/User'
utils = require 'core/utils'
storage = require 'core/storage'
GoogleClassroomHandler = require('core/social-handlers/GoogleClassroomHandler')
co = require('co')
helper = require 'lib/coursesHelper'
translateWithMarkdown = (label) ->
marked.inlineLexer $.i18n.t(label), []
# TODO: if this proves useful, make a simple admin page with a Treema for editing office hours in db
officeHours = [
{time: moment('2018-02-28 12:00-08').toDate(), link: 'https://zoom.us/meeting/register/307c335ddb1ee6ef7510d14dfea9e911', host: 'David', name: 'CodeCombat for Beginner Teachers'}
{time: moment('2018-03-07 12:00-08').toDate(), link: 'https://zoom.us/meeting/register/a1a6f5f4eb7a0a387c24e00bf0acd2b8', host: 'Nolan', name: 'CodeCombat: Beyond Block-Based Coding'}
{time: moment('2018-03-15 12:30-08').toDate(), link: 'https://zoom.us/meeting/register/16f0a6b4122087667c24e00bf0acd2b8', host: 'Sean', name: 'Building Student Engagement with CodeCombat'}
{time: moment('2018-03-21 12:00-08').toDate(), link: 'https://zoom.us/meeting/register/4e7eb093f8689e21c5b9141539e44ee6', host: 'Liz', name: 'CodeCombat for Beginner Teachers'}
]
module.exports = class TeacherClassesView extends RootView
id: 'teacher-classes-view'
template: template
helper: helper
translateWithMarkdown: translateWithMarkdown
# TODO: where to track/save this data?
teacherQuestData:
'create_classroom':
title: translateWithMarkdown('teacher.teacher_quest_create_classroom')
'add_students':
title: translateWithMarkdown('teacher.teacher_quest_add_students')
'teach_methods':
title: translateWithMarkdown('teacher.teacher_quest_teach_methods')
steps: [
translateWithMarkdown('teacher.teacher_quest_teach_methods_step1')
translateWithMarkdown('teacher.teacher_quest_teach_methods_step2')
]
'teach_strings':
title: translateWithMarkdown('teacher.teacher_quest_teach_strings')
steps: [
translateWithMarkdown('teacher.teacher_quest_teach_strings_step1')
translateWithMarkdown('teacher.teacher_quest_teach_strings_step2')
]
'teach_loops':
title: translateWithMarkdown('teacher.teacher_quest_teach_loops')
steps: [
translateWithMarkdown('teacher.teacher_quest_teach_loops_step1')
translateWithMarkdown('teacher.teacher_quest_teach_loops_step2')
]
'teach_variables':
title: translateWithMarkdown('teacher.teacher_quest_teach_variables')
steps: [
translateWithMarkdown('teacher.teacher_quest_teach_variables_step1')
translateWithMarkdown('teacher.teacher_quest_teach_variables_step2')
]
'kithgard_gates_100':
title: translateWithMarkdown('teacher.teacher_quest_kithgard_gates_100')
steps: [
translateWithMarkdown('teacher.teacher_quest_kithgard_gates_100_step1')
translateWithMarkdown('teacher.teacher_quest_kithgard_gates_100_step2')
]
'wakka_maul_100':
title: translateWithMarkdown('teacher.teacher_quest_wakka_maul_100')
steps: [
translateWithMarkdown('teacher.teacher_quest_wakka_maul_100_step1')
translateWithMarkdown('teacher.teacher_quest_wakka_maul_100_step2')
]
'reach_gamedev':
title: translateWithMarkdown('teacher.teacher_quest_reach_gamedev')
steps: [
translateWithMarkdown('teacher.teacher_quest_reach_gamedev_step1')
]
events:
'click .edit-classroom': 'onClickEditClassroom'
'click .archive-classroom': 'onClickArchiveClassroom'
'click .unarchive-classroom': 'onClickUnarchiveClassroom'
'click .create-classroom-btn': 'openNewClassroomModal'
'click .create-teacher-btn': 'onClickCreateTeacherButton'
'click .update-teacher-btn': 'onClickUpdateTeacherButton'
'click .view-class-btn': 'onClickViewClassButton'
'click .see-all-quests': 'onClickSeeAllQuests'
'click .see-less-quests': 'onClickSeeLessQuests'
'click .see-all-office-hours': 'onClickSeeAllOfficeHours'
'click .see-less-office-hours': 'onClickSeeLessOfficeHours'
'click .see-no-office-hours': 'onClickSeeNoOfficeHours'
getTitle: -> $.i18n.t 'teacher.my_classes'
initialize: (options) ->
super(options)
@teacherID = (me.isAdmin() and utils.getQueryVariable('teacherID')) or me.id
@classrooms = new Classrooms()
@classrooms.comparator = (a, b) -> b.id.localeCompare(a.id)
@classrooms.fetchByOwner(@teacherID)
@supermodel.trackCollection(@classrooms)
@listenTo @classrooms, 'sync', ->
for classroom in @classrooms.models
continue if classroom.get('archived')
classroom.sessions = new LevelSessions()
Promise.all(classroom.sessions.fetchForAllClassroomMembers(
classroom,
{
data: {
project: 'state.complete,level,creator,changed,created,dateFirstCompleted,submitted,codeConcepts'
}
}
))
.then (results) =>
return if @destroyed
helper.calculateDots(@classrooms, @courses, @courseInstances)
@calculateQuestCompletion()
@render()
window.tracker?.trackEvent 'Teachers Classes Loaded', category: 'Teachers', ['Mixpanel']
@courses = new Courses()
@courses.fetch()
@supermodel.trackCollection(@courses)
@courseInstances = new CourseInstances()
@courseInstances.fetchByOwner(@teacherID)
@supermodel.trackCollection(@courseInstances)
@progressDotTemplate = require 'templates/teachers/hovers/progress-dot-whole-course'
@prepaids = new Prepaids()
@supermodel.trackRequest @prepaids.fetchByCreator(me.id)
earliestHourTime = new Date() - 60 * 60 * 1000
latestHourTime = new Date() - -21 * 24 * 60 * 60 * 1000
@upcomingOfficeHours = _.sortBy (oh for oh in officeHours when earliestHourTime < oh.time < latestHourTime), 'time'
@howManyOfficeHours = if storage.load('hide-office-hours') then 'none' else 'some'
me.getClientCreatorPermissions()?.then(() =>
@calculateQuestCompletion()
@render?()
)
administratingTeacherIds = me.get('administratingTeachers') || []
@administratingTeachers = new Users()
if administratingTeacherIds.length > 0
req = @administratingTeachers.fetchByIds(administratingTeacherIds)
@supermodel.trackRequest req
# TODO: Any reference to paidTeacher can be cleaned up post Teacher Appreciation week (after 2019-05-03)
@paidTeacher = me.isAdmin() or me.isTeacher() and /@codeninjas.com$/i.test me.get('email')
# Level Sessions loaded after onLoaded to prevent race condition in calculateDots
afterRender: ->
super()
unless @courseNagSubview
@courseNagSubview = new CourseNagSubview()
@insertSubView(@courseNagSubview)
$('.progress-dot').each (i, el) ->
dot = $(el)
dot.tooltip({
html: true
container: dot
})
calculateQuestCompletion: ->
@teacherQuestData['create_classroom'].complete = @classrooms.length > 0
for classroom in @classrooms.models
continue unless classroom.get('members')?.length > 0 and classroom.sessions
classCompletion = {}
classCompletion[key] = 0 for key in Object.keys(@teacherQuestData)
students = classroom.get('members')?.length
kithgardGatesCompletes = 0
wakkaMaulCompletes = 0
for session in classroom.sessions.models
if session.get('level')?.original is '541c9a30c6362edfb0f34479' # kithgard-gates
++classCompletion['kithgard_gates_100']
if session.get('level')?.original is '5630eab0c0fcbd86057cc2f8' # wakka-maul
++classCompletion['wakka_maul_100']
continue unless session.get('state')?.complete
if session.get('level')?.original is '5411cb3769152f1707be029c' # dungeons-of-kithgard
++classCompletion['teach_methods']
if session.get('level')?.original is '541875da4c16460000ab990f' # true-names
++classCompletion['teach_strings']
if session.get('level')?.original is '55ca293b9bc1892c835b0136' # fire-dancing
++classCompletion['teach_loops']
if session.get('level')?.original is '5452adea57e83800009730ee' # known-enemy
++classCompletion['teach_variables']
classCompletion[k] /= students for k of classCompletion
classCompletion['add_students'] = if students > 0 then 1.0 else 0.0
if @prepaids.length > 0 or !me.canManageLicensesViaUI()
classCompletion['reach_gamedev'] = 1.0
else
classCompletion['reach_gamedev'] = 0.0
@teacherQuestData[k].complete ||= v > 0.74 for k,v of classCompletion
@teacherQuestData[k].best = Math.max(@teacherQuestData[k].best||0,v) for k,v of classCompletion
onLoaded: ->
helper.calculateDots(@classrooms, @courses, @courseInstances)
@calculateQuestCompletion()
@paidTeacher = @paidTeacher or @prepaids.find((p) => p.get('type') in ['course', 'starter_license'] and p.get('maxRedeemers') > 0)?
if me.isTeacher() and not @classrooms.length
@openNewClassroomModal()
super()
onClickEditClassroom: (e) ->
classroomID = $(e.target).data('classroom-id')
window.tracker?.trackEvent $(e.target).data('event-action'), category: 'Teachers', classroomID: classroomID, ['Mixpanel']
classroom = @classrooms.get(classroomID)
modal = new ClassroomSettingsModal({ classroom: classroom })
@openModalView(modal)
@listenToOnce modal, 'hide', ->
@calculateQuestCompletion()
@render()
openNewClassroomModal: ->
return unless me.id is @teacherID # Viewing page as admin
window.tracker?.trackEvent 'Teachers Classes Create New Class Started', category: 'Teachers', ['Mixpanel']
classroom = new Classroom({ ownerID: me.id })
modal = new ClassroomSettingsModal({ classroom: classroom })
@openModalView(modal)
@listenToOnce modal.classroom, 'sync', ->
window.tracker?.trackEvent 'Teachers Classes Create New Class Finished', category: 'Teachers', ['Mixpanel']
@classrooms.add(modal.classroom)
if modal.classroom.isGoogleClassroom()
GoogleClassroomHandler.markAsImported(classroom.get("googleClassroomId")).then(() => @render()).catch((e) => console.error(e))
classroom = modal.classroom
@addFreeCourseInstances()
.then(() =>
if classroom.isGoogleClassroom()
@importStudents(classroom)
.then (importedStudents) =>
@addImportedStudents(classroom, importedStudents)
, (_e) => {}
, (err) =>
if classroom.isGoogleClassroom()
noty text: 'Could not import students', layout: 'topCenter', timeout: 3000, type: 'error'
)
.then () =>
@calculateQuestCompletion()
@render()
importStudents: (classroom) ->
GoogleClassroomHandler.importStudentsToClassroom(classroom)
.then (importedStudents) =>
if importedStudents.length > 0
console.debug("Students imported to classroom:", importedStudents)
return Promise.resolve(importedStudents)
else
noty text: 'No new students imported', layout: 'topCenter', timeout: 3000, type: 'error'
return Promise.reject()
.catch (err) =>
noty text: err or 'Error in importing students', layout: 'topCenter', timeout: 3000, type: 'error'
return Promise.reject()
# Add imported students to @classrooms and @courseInstances so that they are rendered on the screen
addImportedStudents: (classroom, importedStudents) ->
cl = @classrooms.models.find((c) => c.get("_id") == classroom.get("_id"))
importedStudents.forEach((i) => cl.get("members").push(i._id))
for course in @courses.models
continue if not course.get('free')
courseInstance = @courseInstances.findWhere({classroomID: classroom.id, courseID: course.id})
if courseInstance
importedStudents.forEach((i) => courseInstance.get("members").push(i._id))
onClickCreateTeacherButton: (e) ->
window.tracker?.trackEvent $(e.target).data('event-action'), category: 'Teachers', ['Mixpanel']
application.router.navigate("/teachers/signup", { trigger: true })
onClickUpdateTeacherButton: (e) ->
window.tracker?.trackEvent $(e.target).data('event-action'), category: 'Teachers', ['Mixpanel']
application.router.navigate("/teachers/update-account", { trigger: true })
onClickArchiveClassroom: (e) ->
return unless me.id is @teacherID # Viewing page as admin
classroomID = $(e.currentTarget).data('classroom-id')
classroom = @classrooms.get(classroomID)
classroom.set('archived', true)
classroom.save {}, {
success: =>
window.tracker?.trackEvent 'Teachers Classes Archived Class', category: 'Teachers', ['Mixpanel']
@render()
}
onClickUnarchiveClassroom: (e) ->
return unless me.id is @teacherID # Viewing page as admin
classroomID = $(e.currentTarget).data('classroom-id')
classroom = @classrooms.get(classroomID)
classroom.set('archived', false)
classroom.save {}, {
success: =>
window.tracker?.trackEvent 'Teachers Classes Unarchived Class', category: 'Teachers', ['Mixpanel']
@render()
}
onClickViewClassButton: (e) ->
classroomID = $(e.target).data('classroom-id')
window.tracker?.trackEvent $(e.target).data('event-action'), category: 'Teachers', classroomID: classroomID, ['Mixpanel']
application.router.navigate("/teachers/classes/#{classroomID}", { trigger: true })
addFreeCourseInstances: co.wrap ->
# so that when students join the classroom, they can automatically get free courses
# non-free courses are generated when the teacher first adds a student to them
try
promises = []
for classroom in @classrooms.models
for course in @courses.models
continue if not course.get('free')
courseInstance = @courseInstances.findWhere({classroomID: classroom.id, courseID: course.id})
if not courseInstance
courseInstance = new CourseInstance({
classroomID: classroom.id
courseID: course.id
})
# TODO: figure out a better way to get around triggering validation errors for properties
# that the server will end up filling in, like an empty members array, ownerID
promises.push(new Promise(courseInstance.save(null, {validate: false}).then))
if (promises.length > 0)
courseInstances = yield Promise.all(promises)
@courseInstances.add(courseInstances) if courseInstances.length > 0
return
catch e
console.error("Error in adding free course instances")
return Promise.reject()
onClickSeeAllQuests: (e) =>
$(e.target).hide()
@$el.find('.see-less-quests').show()
@$el.find('.quest.hide').addClass('hide-revealed').removeClass('hide')
onClickSeeLessQuests: (e) =>
$(e.target).hide()
@$el.find('.see-all-quests').show()
@$el.find('.quest.hide-revealed').addClass('hide').removeClass('hide-revealed')
onClickSeeAllOfficeHours: (e) ->
@howManyOfficeHours = 'all'
@renderSelectors '#office-hours'
onClickSeeLessOfficeHours: (e) ->
@howManyOfficeHours = 'some'
@renderSelectors '#office-hours'
onClickSeeNoOfficeHours: (e) ->
@howManyOfficeHours = 'none'
@renderSelectors '#office-hours'
storage.save 'hide-office-hours', true
| 12508 | require('app/styles/courses/teacher-classes-view.sass')
RootView = require 'views/core/RootView'
template = require 'templates/courses/teacher-classes-view'
Classroom = require 'models/Classroom'
Classrooms = require 'collections/Classrooms'
Courses = require 'collections/Courses'
Campaign = require 'models/Campaign'
Campaigns = require 'collections/Campaigns'
LevelSessions = require 'collections/LevelSessions'
CourseInstance = require 'models/CourseInstance'
CourseInstances = require 'collections/CourseInstances'
ClassroomSettingsModal = require 'views/courses/ClassroomSettingsModal'
CourseNagSubview = require 'views/teachers/CourseNagSubview'
Prepaids = require 'collections/Prepaids'
Users = require 'collections/Users'
User = require 'models/User'
utils = require 'core/utils'
storage = require 'core/storage'
GoogleClassroomHandler = require('core/social-handlers/GoogleClassroomHandler')
co = require('co')
helper = require 'lib/coursesHelper'
translateWithMarkdown = (label) ->
marked.inlineLexer $.i18n.t(label), []
# TODO: if this proves useful, make a simple admin page with a Treema for editing office hours in db
officeHours = [
{time: moment('2018-02-28 12:00-08').toDate(), link: 'https://zoom.us/meeting/register/307c335ddb1ee6ef7510d14dfea9e911', host: 'David', name: 'CodeCombat for Beginner Teachers'}
{time: moment('2018-03-07 12:00-08').toDate(), link: 'https://zoom.us/meeting/register/a1a6f5f4eb7a0a387c24e00bf0acd2b8', host: 'Nolan', name: 'CodeCombat: Beyond Block-Based Coding'}
{time: moment('2018-03-15 12:30-08').toDate(), link: 'https://zoom.us/meeting/register/16f0a6b4122087667c24e00bf0acd2b8', host: 'Sean', name: 'Building Student Engagement with CodeCombat'}
{time: moment('2018-03-21 12:00-08').toDate(), link: 'https://zoom.us/meeting/register/4e7eb093f8689e21c5b9141539e44ee6', host: 'Liz', name: 'CodeCombat for Beginner Teachers'}
]
module.exports = class TeacherClassesView extends RootView
id: 'teacher-classes-view'
template: template
helper: helper
translateWithMarkdown: translateWithMarkdown
# TODO: where to track/save this data?
teacherQuestData:
'create_classroom':
title: translateWithMarkdown('teacher.teacher_quest_create_classroom')
'add_students':
title: translateWithMarkdown('teacher.teacher_quest_add_students')
'teach_methods':
title: translateWithMarkdown('teacher.teacher_quest_teach_methods')
steps: [
translateWithMarkdown('teacher.teacher_quest_teach_methods_step1')
translateWithMarkdown('teacher.teacher_quest_teach_methods_step2')
]
'teach_strings':
title: translateWithMarkdown('teacher.teacher_quest_teach_strings')
steps: [
translateWithMarkdown('teacher.teacher_quest_teach_strings_step1')
translateWithMarkdown('teacher.teacher_quest_teach_strings_step2')
]
'teach_loops':
title: translateWithMarkdown('teacher.teacher_quest_teach_loops')
steps: [
translateWithMarkdown('teacher.teacher_quest_teach_loops_step1')
translateWithMarkdown('teacher.teacher_quest_teach_loops_step2')
]
'teach_variables':
title: translateWithMarkdown('teacher.teacher_quest_teach_variables')
steps: [
translateWithMarkdown('teacher.teacher_quest_teach_variables_step1')
translateWithMarkdown('teacher.teacher_quest_teach_variables_step2')
]
'kithgard_gates_100':
title: translateWithMarkdown('teacher.teacher_quest_kithgard_gates_100')
steps: [
translateWithMarkdown('teacher.teacher_quest_kithgard_gates_100_step1')
translateWithMarkdown('teacher.teacher_quest_kithgard_gates_100_step2')
]
'wakka_maul_100':
title: translateWithMarkdown('teacher.teacher_quest_wakka_maul_100')
steps: [
translateWithMarkdown('teacher.teacher_quest_wakka_maul_100_step1')
translateWithMarkdown('teacher.teacher_quest_wakka_maul_100_step2')
]
'reach_gamedev':
title: translateWithMarkdown('teacher.teacher_quest_reach_gamedev')
steps: [
translateWithMarkdown('teacher.teacher_quest_reach_gamedev_step1')
]
events:
'click .edit-classroom': 'onClickEditClassroom'
'click .archive-classroom': 'onClickArchiveClassroom'
'click .unarchive-classroom': 'onClickUnarchiveClassroom'
'click .create-classroom-btn': 'openNewClassroomModal'
'click .create-teacher-btn': 'onClickCreateTeacherButton'
'click .update-teacher-btn': 'onClickUpdateTeacherButton'
'click .view-class-btn': 'onClickViewClassButton'
'click .see-all-quests': 'onClickSeeAllQuests'
'click .see-less-quests': 'onClickSeeLessQuests'
'click .see-all-office-hours': 'onClickSeeAllOfficeHours'
'click .see-less-office-hours': 'onClickSeeLessOfficeHours'
'click .see-no-office-hours': 'onClickSeeNoOfficeHours'
getTitle: -> $.i18n.t 'teacher.my_classes'
initialize: (options) ->
super(options)
@teacherID = (me.isAdmin() and utils.getQueryVariable('teacherID')) or me.id
@classrooms = new Classrooms()
@classrooms.comparator = (a, b) -> b.id.localeCompare(a.id)
@classrooms.fetchByOwner(@teacherID)
@supermodel.trackCollection(@classrooms)
@listenTo @classrooms, 'sync', ->
for classroom in @classrooms.models
continue if classroom.get('archived')
classroom.sessions = new LevelSessions()
Promise.all(classroom.sessions.fetchForAllClassroomMembers(
classroom,
{
data: {
project: 'state.complete,level,creator,changed,created,dateFirstCompleted,submitted,codeConcepts'
}
}
))
.then (results) =>
return if @destroyed
helper.calculateDots(@classrooms, @courses, @courseInstances)
@calculateQuestCompletion()
@render()
window.tracker?.trackEvent 'Teachers Classes Loaded', category: 'Teachers', ['Mixpanel']
@courses = new Courses()
@courses.fetch()
@supermodel.trackCollection(@courses)
@courseInstances = new CourseInstances()
@courseInstances.fetchByOwner(@teacherID)
@supermodel.trackCollection(@courseInstances)
@progressDotTemplate = require 'templates/teachers/hovers/progress-dot-whole-course'
@prepaids = new Prepaids()
@supermodel.trackRequest @prepaids.fetchByCreator(me.id)
earliestHourTime = new Date() - 60 * 60 * 1000
latestHourTime = new Date() - -21 * 24 * 60 * 60 * 1000
@upcomingOfficeHours = _.sortBy (oh for oh in officeHours when earliestHourTime < oh.time < latestHourTime), 'time'
@howManyOfficeHours = if storage.load('hide-office-hours') then 'none' else 'some'
me.getClientCreatorPermissions()?.then(() =>
@calculateQuestCompletion()
@render?()
)
administratingTeacherIds = me.get('administratingTeachers') || []
@administratingTeachers = new Users()
if administratingTeacherIds.length > 0
req = @administratingTeachers.fetchByIds(administratingTeacherIds)
@supermodel.trackRequest req
# TODO: Any reference to paidTeacher can be cleaned up post Teacher Appreciation week (after 2019-05-03)
@paidTeacher = me.isAdmin() or me.isTeacher() and /<EMAIL>$/i.test me.get('email')
# Level Sessions loaded after onLoaded to prevent race condition in calculateDots
afterRender: ->
super()
unless @courseNagSubview
@courseNagSubview = new CourseNagSubview()
@insertSubView(@courseNagSubview)
$('.progress-dot').each (i, el) ->
dot = $(el)
dot.tooltip({
html: true
container: dot
})
calculateQuestCompletion: ->
@teacherQuestData['create_classroom'].complete = @classrooms.length > 0
for classroom in @classrooms.models
continue unless classroom.get('members')?.length > 0 and classroom.sessions
classCompletion = {}
classCompletion[key] = 0 for key in Object.keys(@teacherQuestData)
students = classroom.get('members')?.length
kithgardGatesCompletes = 0
wakkaMaulCompletes = 0
for session in classroom.sessions.models
if session.get('level')?.original is '541c9a30c6362edfb0f34479' # kithgard-gates
++classCompletion['kithgard_gates_100']
if session.get('level')?.original is '5630eab0c0fcbd86057cc2f8' # wakka-maul
++classCompletion['wakka_maul_100']
continue unless session.get('state')?.complete
if session.get('level')?.original is '5411cb3769152f1707be029c' # dungeons-of-kithgard
++classCompletion['teach_methods']
if session.get('level')?.original is '541875da4c16460000ab990f' # true-names
++classCompletion['teach_strings']
if session.get('level')?.original is '55ca293b9bc1892c835b0136' # fire-dancing
++classCompletion['teach_loops']
if session.get('level')?.original is '5452adea57e83800009730ee' # known-enemy
++classCompletion['teach_variables']
classCompletion[k] /= students for k of classCompletion
classCompletion['add_students'] = if students > 0 then 1.0 else 0.0
if @prepaids.length > 0 or !me.canManageLicensesViaUI()
classCompletion['reach_gamedev'] = 1.0
else
classCompletion['reach_gamedev'] = 0.0
@teacherQuestData[k].complete ||= v > 0.74 for k,v of classCompletion
@teacherQuestData[k].best = Math.max(@teacherQuestData[k].best||0,v) for k,v of classCompletion
onLoaded: ->
helper.calculateDots(@classrooms, @courses, @courseInstances)
@calculateQuestCompletion()
@paidTeacher = @paidTeacher or @prepaids.find((p) => p.get('type') in ['course', 'starter_license'] and p.get('maxRedeemers') > 0)?
if me.isTeacher() and not @classrooms.length
@openNewClassroomModal()
super()
onClickEditClassroom: (e) ->
classroomID = $(e.target).data('classroom-id')
window.tracker?.trackEvent $(e.target).data('event-action'), category: 'Teachers', classroomID: classroomID, ['Mixpanel']
classroom = @classrooms.get(classroomID)
modal = new ClassroomSettingsModal({ classroom: classroom })
@openModalView(modal)
@listenToOnce modal, 'hide', ->
@calculateQuestCompletion()
@render()
openNewClassroomModal: ->
return unless me.id is @teacherID # Viewing page as admin
window.tracker?.trackEvent 'Teachers Classes Create New Class Started', category: 'Teachers', ['Mixpanel']
classroom = new Classroom({ ownerID: me.id })
modal = new ClassroomSettingsModal({ classroom: classroom })
@openModalView(modal)
@listenToOnce modal.classroom, 'sync', ->
window.tracker?.trackEvent 'Teachers Classes Create New Class Finished', category: 'Teachers', ['Mixpanel']
@classrooms.add(modal.classroom)
if modal.classroom.isGoogleClassroom()
GoogleClassroomHandler.markAsImported(classroom.get("googleClassroomId")).then(() => @render()).catch((e) => console.error(e))
classroom = modal.classroom
@addFreeCourseInstances()
.then(() =>
if classroom.isGoogleClassroom()
@importStudents(classroom)
.then (importedStudents) =>
@addImportedStudents(classroom, importedStudents)
, (_e) => {}
, (err) =>
if classroom.isGoogleClassroom()
noty text: 'Could not import students', layout: 'topCenter', timeout: 3000, type: 'error'
)
.then () =>
@calculateQuestCompletion()
@render()
importStudents: (classroom) ->
GoogleClassroomHandler.importStudentsToClassroom(classroom)
.then (importedStudents) =>
if importedStudents.length > 0
console.debug("Students imported to classroom:", importedStudents)
return Promise.resolve(importedStudents)
else
noty text: 'No new students imported', layout: 'topCenter', timeout: 3000, type: 'error'
return Promise.reject()
.catch (err) =>
noty text: err or 'Error in importing students', layout: 'topCenter', timeout: 3000, type: 'error'
return Promise.reject()
# Add imported students to @classrooms and @courseInstances so that they are rendered on the screen
addImportedStudents: (classroom, importedStudents) ->
cl = @classrooms.models.find((c) => c.get("_id") == classroom.get("_id"))
importedStudents.forEach((i) => cl.get("members").push(i._id))
for course in @courses.models
continue if not course.get('free')
courseInstance = @courseInstances.findWhere({classroomID: classroom.id, courseID: course.id})
if courseInstance
importedStudents.forEach((i) => courseInstance.get("members").push(i._id))
onClickCreateTeacherButton: (e) ->
window.tracker?.trackEvent $(e.target).data('event-action'), category: 'Teachers', ['Mixpanel']
application.router.navigate("/teachers/signup", { trigger: true })
onClickUpdateTeacherButton: (e) ->
window.tracker?.trackEvent $(e.target).data('event-action'), category: 'Teachers', ['Mixpanel']
application.router.navigate("/teachers/update-account", { trigger: true })
onClickArchiveClassroom: (e) ->
return unless me.id is @teacherID # Viewing page as admin
classroomID = $(e.currentTarget).data('classroom-id')
classroom = @classrooms.get(classroomID)
classroom.set('archived', true)
classroom.save {}, {
success: =>
window.tracker?.trackEvent 'Teachers Classes Archived Class', category: 'Teachers', ['Mixpanel']
@render()
}
onClickUnarchiveClassroom: (e) ->
return unless me.id is @teacherID # Viewing page as admin
classroomID = $(e.currentTarget).data('classroom-id')
classroom = @classrooms.get(classroomID)
classroom.set('archived', false)
classroom.save {}, {
success: =>
window.tracker?.trackEvent 'Teachers Classes Unarchived Class', category: 'Teachers', ['Mixpanel']
@render()
}
onClickViewClassButton: (e) ->
classroomID = $(e.target).data('classroom-id')
window.tracker?.trackEvent $(e.target).data('event-action'), category: 'Teachers', classroomID: classroomID, ['Mixpanel']
application.router.navigate("/teachers/classes/#{classroomID}", { trigger: true })
addFreeCourseInstances: co.wrap ->
# so that when students join the classroom, they can automatically get free courses
# non-free courses are generated when the teacher first adds a student to them
try
promises = []
for classroom in @classrooms.models
for course in @courses.models
continue if not course.get('free')
courseInstance = @courseInstances.findWhere({classroomID: classroom.id, courseID: course.id})
if not courseInstance
courseInstance = new CourseInstance({
classroomID: classroom.id
courseID: course.id
})
# TODO: figure out a better way to get around triggering validation errors for properties
# that the server will end up filling in, like an empty members array, ownerID
promises.push(new Promise(courseInstance.save(null, {validate: false}).then))
if (promises.length > 0)
courseInstances = yield Promise.all(promises)
@courseInstances.add(courseInstances) if courseInstances.length > 0
return
catch e
console.error("Error in adding free course instances")
return Promise.reject()
onClickSeeAllQuests: (e) =>
$(e.target).hide()
@$el.find('.see-less-quests').show()
@$el.find('.quest.hide').addClass('hide-revealed').removeClass('hide')
onClickSeeLessQuests: (e) =>
$(e.target).hide()
@$el.find('.see-all-quests').show()
@$el.find('.quest.hide-revealed').addClass('hide').removeClass('hide-revealed')
onClickSeeAllOfficeHours: (e) ->
@howManyOfficeHours = 'all'
@renderSelectors '#office-hours'
onClickSeeLessOfficeHours: (e) ->
@howManyOfficeHours = 'some'
@renderSelectors '#office-hours'
onClickSeeNoOfficeHours: (e) ->
@howManyOfficeHours = 'none'
@renderSelectors '#office-hours'
storage.save 'hide-office-hours', true
| true | require('app/styles/courses/teacher-classes-view.sass')
RootView = require 'views/core/RootView'
template = require 'templates/courses/teacher-classes-view'
Classroom = require 'models/Classroom'
Classrooms = require 'collections/Classrooms'
Courses = require 'collections/Courses'
Campaign = require 'models/Campaign'
Campaigns = require 'collections/Campaigns'
LevelSessions = require 'collections/LevelSessions'
CourseInstance = require 'models/CourseInstance'
CourseInstances = require 'collections/CourseInstances'
ClassroomSettingsModal = require 'views/courses/ClassroomSettingsModal'
CourseNagSubview = require 'views/teachers/CourseNagSubview'
Prepaids = require 'collections/Prepaids'
Users = require 'collections/Users'
User = require 'models/User'
utils = require 'core/utils'
storage = require 'core/storage'
GoogleClassroomHandler = require('core/social-handlers/GoogleClassroomHandler')
co = require('co')
helper = require 'lib/coursesHelper'
translateWithMarkdown = (label) ->
marked.inlineLexer $.i18n.t(label), []
# TODO: if this proves useful, make a simple admin page with a Treema for editing office hours in db
officeHours = [
{time: moment('2018-02-28 12:00-08').toDate(), link: 'https://zoom.us/meeting/register/307c335ddb1ee6ef7510d14dfea9e911', host: 'David', name: 'CodeCombat for Beginner Teachers'}
{time: moment('2018-03-07 12:00-08').toDate(), link: 'https://zoom.us/meeting/register/a1a6f5f4eb7a0a387c24e00bf0acd2b8', host: 'Nolan', name: 'CodeCombat: Beyond Block-Based Coding'}
{time: moment('2018-03-15 12:30-08').toDate(), link: 'https://zoom.us/meeting/register/16f0a6b4122087667c24e00bf0acd2b8', host: 'Sean', name: 'Building Student Engagement with CodeCombat'}
{time: moment('2018-03-21 12:00-08').toDate(), link: 'https://zoom.us/meeting/register/4e7eb093f8689e21c5b9141539e44ee6', host: 'Liz', name: 'CodeCombat for Beginner Teachers'}
]
module.exports = class TeacherClassesView extends RootView
id: 'teacher-classes-view'
template: template
helper: helper
translateWithMarkdown: translateWithMarkdown
# TODO: where to track/save this data?
teacherQuestData:
'create_classroom':
title: translateWithMarkdown('teacher.teacher_quest_create_classroom')
'add_students':
title: translateWithMarkdown('teacher.teacher_quest_add_students')
'teach_methods':
title: translateWithMarkdown('teacher.teacher_quest_teach_methods')
steps: [
translateWithMarkdown('teacher.teacher_quest_teach_methods_step1')
translateWithMarkdown('teacher.teacher_quest_teach_methods_step2')
]
'teach_strings':
title: translateWithMarkdown('teacher.teacher_quest_teach_strings')
steps: [
translateWithMarkdown('teacher.teacher_quest_teach_strings_step1')
translateWithMarkdown('teacher.teacher_quest_teach_strings_step2')
]
'teach_loops':
title: translateWithMarkdown('teacher.teacher_quest_teach_loops')
steps: [
translateWithMarkdown('teacher.teacher_quest_teach_loops_step1')
translateWithMarkdown('teacher.teacher_quest_teach_loops_step2')
]
'teach_variables':
title: translateWithMarkdown('teacher.teacher_quest_teach_variables')
steps: [
translateWithMarkdown('teacher.teacher_quest_teach_variables_step1')
translateWithMarkdown('teacher.teacher_quest_teach_variables_step2')
]
'kithgard_gates_100':
title: translateWithMarkdown('teacher.teacher_quest_kithgard_gates_100')
steps: [
translateWithMarkdown('teacher.teacher_quest_kithgard_gates_100_step1')
translateWithMarkdown('teacher.teacher_quest_kithgard_gates_100_step2')
]
'wakka_maul_100':
title: translateWithMarkdown('teacher.teacher_quest_wakka_maul_100')
steps: [
translateWithMarkdown('teacher.teacher_quest_wakka_maul_100_step1')
translateWithMarkdown('teacher.teacher_quest_wakka_maul_100_step2')
]
'reach_gamedev':
title: translateWithMarkdown('teacher.teacher_quest_reach_gamedev')
steps: [
translateWithMarkdown('teacher.teacher_quest_reach_gamedev_step1')
]
events:
'click .edit-classroom': 'onClickEditClassroom'
'click .archive-classroom': 'onClickArchiveClassroom'
'click .unarchive-classroom': 'onClickUnarchiveClassroom'
'click .create-classroom-btn': 'openNewClassroomModal'
'click .create-teacher-btn': 'onClickCreateTeacherButton'
'click .update-teacher-btn': 'onClickUpdateTeacherButton'
'click .view-class-btn': 'onClickViewClassButton'
'click .see-all-quests': 'onClickSeeAllQuests'
'click .see-less-quests': 'onClickSeeLessQuests'
'click .see-all-office-hours': 'onClickSeeAllOfficeHours'
'click .see-less-office-hours': 'onClickSeeLessOfficeHours'
'click .see-no-office-hours': 'onClickSeeNoOfficeHours'
getTitle: -> $.i18n.t 'teacher.my_classes'
initialize: (options) ->
super(options)
@teacherID = (me.isAdmin() and utils.getQueryVariable('teacherID')) or me.id
@classrooms = new Classrooms()
@classrooms.comparator = (a, b) -> b.id.localeCompare(a.id)
@classrooms.fetchByOwner(@teacherID)
@supermodel.trackCollection(@classrooms)
@listenTo @classrooms, 'sync', ->
for classroom in @classrooms.models
continue if classroom.get('archived')
classroom.sessions = new LevelSessions()
Promise.all(classroom.sessions.fetchForAllClassroomMembers(
classroom,
{
data: {
project: 'state.complete,level,creator,changed,created,dateFirstCompleted,submitted,codeConcepts'
}
}
))
.then (results) =>
return if @destroyed
helper.calculateDots(@classrooms, @courses, @courseInstances)
@calculateQuestCompletion()
@render()
window.tracker?.trackEvent 'Teachers Classes Loaded', category: 'Teachers', ['Mixpanel']
@courses = new Courses()
@courses.fetch()
@supermodel.trackCollection(@courses)
@courseInstances = new CourseInstances()
@courseInstances.fetchByOwner(@teacherID)
@supermodel.trackCollection(@courseInstances)
@progressDotTemplate = require 'templates/teachers/hovers/progress-dot-whole-course'
@prepaids = new Prepaids()
@supermodel.trackRequest @prepaids.fetchByCreator(me.id)
earliestHourTime = new Date() - 60 * 60 * 1000
latestHourTime = new Date() - -21 * 24 * 60 * 60 * 1000
@upcomingOfficeHours = _.sortBy (oh for oh in officeHours when earliestHourTime < oh.time < latestHourTime), 'time'
@howManyOfficeHours = if storage.load('hide-office-hours') then 'none' else 'some'
me.getClientCreatorPermissions()?.then(() =>
@calculateQuestCompletion()
@render?()
)
administratingTeacherIds = me.get('administratingTeachers') || []
@administratingTeachers = new Users()
if administratingTeacherIds.length > 0
req = @administratingTeachers.fetchByIds(administratingTeacherIds)
@supermodel.trackRequest req
# TODO: Any reference to paidTeacher can be cleaned up post Teacher Appreciation week (after 2019-05-03)
@paidTeacher = me.isAdmin() or me.isTeacher() and /PI:EMAIL:<EMAIL>END_PI$/i.test me.get('email')
# Level Sessions loaded after onLoaded to prevent race condition in calculateDots
afterRender: ->
super()
unless @courseNagSubview
@courseNagSubview = new CourseNagSubview()
@insertSubView(@courseNagSubview)
$('.progress-dot').each (i, el) ->
dot = $(el)
dot.tooltip({
html: true
container: dot
})
calculateQuestCompletion: ->
@teacherQuestData['create_classroom'].complete = @classrooms.length > 0
for classroom in @classrooms.models
continue unless classroom.get('members')?.length > 0 and classroom.sessions
classCompletion = {}
classCompletion[key] = 0 for key in Object.keys(@teacherQuestData)
students = classroom.get('members')?.length
kithgardGatesCompletes = 0
wakkaMaulCompletes = 0
for session in classroom.sessions.models
if session.get('level')?.original is '541c9a30c6362edfb0f34479' # kithgard-gates
++classCompletion['kithgard_gates_100']
if session.get('level')?.original is '5630eab0c0fcbd86057cc2f8' # wakka-maul
++classCompletion['wakka_maul_100']
continue unless session.get('state')?.complete
if session.get('level')?.original is '5411cb3769152f1707be029c' # dungeons-of-kithgard
++classCompletion['teach_methods']
if session.get('level')?.original is '541875da4c16460000ab990f' # true-names
++classCompletion['teach_strings']
if session.get('level')?.original is '55ca293b9bc1892c835b0136' # fire-dancing
++classCompletion['teach_loops']
if session.get('level')?.original is '5452adea57e83800009730ee' # known-enemy
++classCompletion['teach_variables']
classCompletion[k] /= students for k of classCompletion
classCompletion['add_students'] = if students > 0 then 1.0 else 0.0
if @prepaids.length > 0 or !me.canManageLicensesViaUI()
classCompletion['reach_gamedev'] = 1.0
else
classCompletion['reach_gamedev'] = 0.0
@teacherQuestData[k].complete ||= v > 0.74 for k,v of classCompletion
@teacherQuestData[k].best = Math.max(@teacherQuestData[k].best||0,v) for k,v of classCompletion
onLoaded: ->
helper.calculateDots(@classrooms, @courses, @courseInstances)
@calculateQuestCompletion()
@paidTeacher = @paidTeacher or @prepaids.find((p) => p.get('type') in ['course', 'starter_license'] and p.get('maxRedeemers') > 0)?
if me.isTeacher() and not @classrooms.length
@openNewClassroomModal()
super()
onClickEditClassroom: (e) ->
classroomID = $(e.target).data('classroom-id')
window.tracker?.trackEvent $(e.target).data('event-action'), category: 'Teachers', classroomID: classroomID, ['Mixpanel']
classroom = @classrooms.get(classroomID)
modal = new ClassroomSettingsModal({ classroom: classroom })
@openModalView(modal)
@listenToOnce modal, 'hide', ->
@calculateQuestCompletion()
@render()
openNewClassroomModal: ->
return unless me.id is @teacherID # Viewing page as admin
window.tracker?.trackEvent 'Teachers Classes Create New Class Started', category: 'Teachers', ['Mixpanel']
classroom = new Classroom({ ownerID: me.id })
modal = new ClassroomSettingsModal({ classroom: classroom })
@openModalView(modal)
@listenToOnce modal.classroom, 'sync', ->
window.tracker?.trackEvent 'Teachers Classes Create New Class Finished', category: 'Teachers', ['Mixpanel']
@classrooms.add(modal.classroom)
if modal.classroom.isGoogleClassroom()
GoogleClassroomHandler.markAsImported(classroom.get("googleClassroomId")).then(() => @render()).catch((e) => console.error(e))
classroom = modal.classroom
@addFreeCourseInstances()
.then(() =>
if classroom.isGoogleClassroom()
@importStudents(classroom)
.then (importedStudents) =>
@addImportedStudents(classroom, importedStudents)
, (_e) => {}
, (err) =>
if classroom.isGoogleClassroom()
noty text: 'Could not import students', layout: 'topCenter', timeout: 3000, type: 'error'
)
.then () =>
@calculateQuestCompletion()
@render()
importStudents: (classroom) ->
GoogleClassroomHandler.importStudentsToClassroom(classroom)
.then (importedStudents) =>
if importedStudents.length > 0
console.debug("Students imported to classroom:", importedStudents)
return Promise.resolve(importedStudents)
else
noty text: 'No new students imported', layout: 'topCenter', timeout: 3000, type: 'error'
return Promise.reject()
.catch (err) =>
noty text: err or 'Error in importing students', layout: 'topCenter', timeout: 3000, type: 'error'
return Promise.reject()
# Add imported students to @classrooms and @courseInstances so that they are rendered on the screen
addImportedStudents: (classroom, importedStudents) ->
cl = @classrooms.models.find((c) => c.get("_id") == classroom.get("_id"))
importedStudents.forEach((i) => cl.get("members").push(i._id))
for course in @courses.models
continue if not course.get('free')
courseInstance = @courseInstances.findWhere({classroomID: classroom.id, courseID: course.id})
if courseInstance
importedStudents.forEach((i) => courseInstance.get("members").push(i._id))
onClickCreateTeacherButton: (e) ->
window.tracker?.trackEvent $(e.target).data('event-action'), category: 'Teachers', ['Mixpanel']
application.router.navigate("/teachers/signup", { trigger: true })
onClickUpdateTeacherButton: (e) ->
window.tracker?.trackEvent $(e.target).data('event-action'), category: 'Teachers', ['Mixpanel']
application.router.navigate("/teachers/update-account", { trigger: true })
onClickArchiveClassroom: (e) ->
return unless me.id is @teacherID # Viewing page as admin
classroomID = $(e.currentTarget).data('classroom-id')
classroom = @classrooms.get(classroomID)
classroom.set('archived', true)
classroom.save {}, {
success: =>
window.tracker?.trackEvent 'Teachers Classes Archived Class', category: 'Teachers', ['Mixpanel']
@render()
}
onClickUnarchiveClassroom: (e) ->
return unless me.id is @teacherID # Viewing page as admin
classroomID = $(e.currentTarget).data('classroom-id')
classroom = @classrooms.get(classroomID)
classroom.set('archived', false)
classroom.save {}, {
success: =>
window.tracker?.trackEvent 'Teachers Classes Unarchived Class', category: 'Teachers', ['Mixpanel']
@render()
}
onClickViewClassButton: (e) ->
classroomID = $(e.target).data('classroom-id')
window.tracker?.trackEvent $(e.target).data('event-action'), category: 'Teachers', classroomID: classroomID, ['Mixpanel']
application.router.navigate("/teachers/classes/#{classroomID}", { trigger: true })
addFreeCourseInstances: co.wrap ->
# so that when students join the classroom, they can automatically get free courses
# non-free courses are generated when the teacher first adds a student to them
try
promises = []
for classroom in @classrooms.models
for course in @courses.models
continue if not course.get('free')
courseInstance = @courseInstances.findWhere({classroomID: classroom.id, courseID: course.id})
if not courseInstance
courseInstance = new CourseInstance({
classroomID: classroom.id
courseID: course.id
})
# TODO: figure out a better way to get around triggering validation errors for properties
# that the server will end up filling in, like an empty members array, ownerID
promises.push(new Promise(courseInstance.save(null, {validate: false}).then))
if (promises.length > 0)
courseInstances = yield Promise.all(promises)
@courseInstances.add(courseInstances) if courseInstances.length > 0
return
catch e
console.error("Error in adding free course instances")
return Promise.reject()
onClickSeeAllQuests: (e) =>
$(e.target).hide()
@$el.find('.see-less-quests').show()
@$el.find('.quest.hide').addClass('hide-revealed').removeClass('hide')
onClickSeeLessQuests: (e) =>
$(e.target).hide()
@$el.find('.see-all-quests').show()
@$el.find('.quest.hide-revealed').addClass('hide').removeClass('hide-revealed')
onClickSeeAllOfficeHours: (e) ->
@howManyOfficeHours = 'all'
@renderSelectors '#office-hours'
onClickSeeLessOfficeHours: (e) ->
@howManyOfficeHours = 'some'
@renderSelectors '#office-hours'
onClickSeeNoOfficeHours: (e) ->
@howManyOfficeHours = 'none'
@renderSelectors '#office-hours'
storage.save 'hide-office-hours', true
|
[
{
"context": "pell\"\n\nclass DrunkenFrenzy extends Spell\n name: \"drunken frenzy\"\n @element = DrunkenFrenzy::element = Spell::Ele",
"end": 97,
"score": 0.9138669371604919,
"start": 83,
"tag": "NAME",
"value": "drunken frenzy"
}
] | src/character/spells/combat-skills/pirate/DrunkenFrenzy.coffee | jawsome/IdleLands | 3 |
Spell = require "../../../base/Spell"
class DrunkenFrenzy extends Spell
name: "drunken frenzy"
@element = DrunkenFrenzy::element = Spell::Element.physical
determineTargets: ->
@targetSomeEnemies size: 1
calcDamage: ->
baseDamage = (@caster.calc.stat 'str')*(1+(99-@caster.special.getValue())/100)
minStat = baseDamage*0.8
maxStat = baseDamage*1.25
super() + @minMax minStat, maxStat
cast: (player) ->
return if @suppressed
damage = @calcDamage()
message = "%casterName assaults %targetName in a %spellName for %damage damage!"
@doDamageTo player, damage, message
constructor: (@game, @caster) ->
super @game, @caster
@bindings =
doSpellCast: @cast
module.exports = exports = DrunkenFrenzy | 56017 |
Spell = require "../../../base/Spell"
class DrunkenFrenzy extends Spell
name: "<NAME>"
@element = DrunkenFrenzy::element = Spell::Element.physical
determineTargets: ->
@targetSomeEnemies size: 1
calcDamage: ->
baseDamage = (@caster.calc.stat 'str')*(1+(99-@caster.special.getValue())/100)
minStat = baseDamage*0.8
maxStat = baseDamage*1.25
super() + @minMax minStat, maxStat
cast: (player) ->
return if @suppressed
damage = @calcDamage()
message = "%casterName assaults %targetName in a %spellName for %damage damage!"
@doDamageTo player, damage, message
constructor: (@game, @caster) ->
super @game, @caster
@bindings =
doSpellCast: @cast
module.exports = exports = DrunkenFrenzy | true |
Spell = require "../../../base/Spell"
class DrunkenFrenzy extends Spell
name: "PI:NAME:<NAME>END_PI"
@element = DrunkenFrenzy::element = Spell::Element.physical
determineTargets: ->
@targetSomeEnemies size: 1
calcDamage: ->
baseDamage = (@caster.calc.stat 'str')*(1+(99-@caster.special.getValue())/100)
minStat = baseDamage*0.8
maxStat = baseDamage*1.25
super() + @minMax minStat, maxStat
cast: (player) ->
return if @suppressed
damage = @calcDamage()
message = "%casterName assaults %targetName in a %spellName for %damage damage!"
@doDamageTo player, damage, message
constructor: (@game, @caster) ->
super @game, @caster
@bindings =
doSpellCast: @cast
module.exports = exports = DrunkenFrenzy |
[
{
"context": "umper | responsive | image\n# * https://github.com/brewster1134/bumper\n# *\n# * @author Ryan Brewster\n# * Copyrigh",
"end": 71,
"score": 0.9994359612464905,
"start": 59,
"tag": "USERNAME",
"value": "brewster1134"
},
{
"context": "s://github.com/brewster1134/bumper\n# ... | src/bumper-responsive-image.coffee | brewster1134/bumper | 0 | ###
# * bumper | responsive | image
# * https://github.com/brewster1134/bumper
# *
# * @author Ryan Brewster
# * Copyright (c) 2014
# * Licensed under the MIT license.
###
((factory) ->
if define?.amd
define [
'bumper-core'
'bumper-responsive-breakpoint'
], (Core, ResponsiveBreakpoint) ->
factory Core, ResponsiveBreakpoint
else
factory window.Bumper.Core, window.Bumper.Responsive.Breakpoint
) (Core, ResponsiveBreakpoint) ->
class BumperResponsiveImage extends Core.Module
events: ->
# resize all images on page load
window.addEventListener 'load', => @resizeAll()
# resize all images on breakpoint change increase
window.addEventListener 'bumper-responsive-breakpoint-change', => @resizeAll()
# Creates a mutation observor when new elements are added to the dom
# http://caniuse.com/#feat=mutationobserver
responsiveObserver = new MutationObserver => @resizeAll()
responsiveObserver.observe document,
childList: true
subtree: true
return @
# calls resize on all matching elements
#
resizeAll: ->
images = document.querySelectorAll '.bumper-responsive-image'
for image in images
try
@resize image
return images
# set the repsonsive image and fire events
# @param el [HTML Element] html img or div element that has responsive image data attributes
# @param breakpoint [String] an optional name of a breakpoint (as defined from setBreakpoints)
#
resize: (el, breakpoint, force = false) ->
el = el[0] if el.jquery # convert from a jquery object
breakpoint ||= window.Bumper.Responsive.Breakpoint?.getCurrent()
fullUrl = @getUrl el, breakpoint
# return if no url, or url is the same as the existing url
return false unless fullUrl
if force == false && (el.getAttribute('src')?.indexOf(fullUrl) >= 0 || el.style.backgroundImage.indexOf(fullUrl) >= 0)
return fullUrl
# handle images
#
if el.tagName == 'IMG'
img = el
# trigger event
img.addEventListener 'load', ->
event = new CustomEvent 'bumper-responsive-image-loaded',
detail:
img: img
img.dispatchEvent event
img.setAttribute 'data-bumper-breakpoint', breakpoint
img.setAttribute 'src', fullUrl
# handle background images
#
else
# create a temp image tag so we can fire an event when the image is loaded
img = document.createElement 'img'
img.addEventListener 'load', ->
src = @getAttribute 'src'
el.setAttribute 'data-bumper-breakpoint', breakpoint
el.style.backgroundImage = "url(#{src})"
el.style.width = "#{img.width}px"
el.style.height = "#{img.height}px"
event = new CustomEvent 'bumper-responsive-image-loaded',
detail:
img: img
el.dispatchEvent event
img.setAttribute 'src', fullUrl
return fullUrl
# Gets the full url based on bumper data attributes
# @param el [HTML Element] html img or div element that has responsive image data attributes
# @param breakpoint [String] an optional name of a breakpoint (as defined from setBreakpoints)
#
getUrl: (el, breakpoint = 'default') ->
url = el.getAttribute("data-bumper-responsive-image-url-#{breakpoint}") ||
el.getAttribute('data-bumper-responsive-image-url')
params = el.getAttribute("data-bumper-responsive-image-url-params-#{breakpoint}") ||
el.getAttribute('data-bumper-responsive-image-url-params')
# Log warning if no url is defined
throw new Error "data-bumper-responsive-image-url[-#{breakpoint}] is not set." unless url
# preserve any params in the url value
# best practice is to keep all url parameters out of the url attribute, but this provides fallback support for special cases
urlParams = url.split('?')
if urlParams.length > 1
url = urlParams[0]
params = if params
"#{urlParams[1]}&#{params}"
else
urlParams[1]
# combine params if they are found
fullUrl = if params then "#{url}?#{params}" else url
# process any possible string inteprolation
fullUrl = window.Bumper.Dom?.getElementData(fullUrl, el) || fullUrl
return fullUrl
window.Bumper ||= {}
window.Bumper.Responsive ||= {}
window.Bumper.Responsive.Image ||= new BumperResponsiveImage
| 205862 | ###
# * bumper | responsive | image
# * https://github.com/brewster1134/bumper
# *
# * @author <NAME>
# * Copyright (c) 2014
# * Licensed under the MIT license.
###
((factory) ->
if define?.amd
define [
'bumper-core'
'bumper-responsive-breakpoint'
], (Core, ResponsiveBreakpoint) ->
factory Core, ResponsiveBreakpoint
else
factory window.Bumper.Core, window.Bumper.Responsive.Breakpoint
) (Core, ResponsiveBreakpoint) ->
class BumperResponsiveImage extends Core.Module
events: ->
# resize all images on page load
window.addEventListener 'load', => @resizeAll()
# resize all images on breakpoint change increase
window.addEventListener 'bumper-responsive-breakpoint-change', => @resizeAll()
# Creates a mutation observor when new elements are added to the dom
# http://caniuse.com/#feat=mutationobserver
responsiveObserver = new MutationObserver => @resizeAll()
responsiveObserver.observe document,
childList: true
subtree: true
return @
# calls resize on all matching elements
#
resizeAll: ->
images = document.querySelectorAll '.bumper-responsive-image'
for image in images
try
@resize image
return images
# set the repsonsive image and fire events
# @param el [HTML Element] html img or div element that has responsive image data attributes
# @param breakpoint [String] an optional name of a breakpoint (as defined from setBreakpoints)
#
resize: (el, breakpoint, force = false) ->
el = el[0] if el.jquery # convert from a jquery object
breakpoint ||= window.Bumper.Responsive.Breakpoint?.getCurrent()
fullUrl = @getUrl el, breakpoint
# return if no url, or url is the same as the existing url
return false unless fullUrl
if force == false && (el.getAttribute('src')?.indexOf(fullUrl) >= 0 || el.style.backgroundImage.indexOf(fullUrl) >= 0)
return fullUrl
# handle images
#
if el.tagName == 'IMG'
img = el
# trigger event
img.addEventListener 'load', ->
event = new CustomEvent 'bumper-responsive-image-loaded',
detail:
img: img
img.dispatchEvent event
img.setAttribute 'data-bumper-breakpoint', breakpoint
img.setAttribute 'src', fullUrl
# handle background images
#
else
# create a temp image tag so we can fire an event when the image is loaded
img = document.createElement 'img'
img.addEventListener 'load', ->
src = @getAttribute 'src'
el.setAttribute 'data-bumper-breakpoint', breakpoint
el.style.backgroundImage = "url(#{src})"
el.style.width = "#{img.width}px"
el.style.height = "#{img.height}px"
event = new CustomEvent 'bumper-responsive-image-loaded',
detail:
img: img
el.dispatchEvent event
img.setAttribute 'src', fullUrl
return fullUrl
# Gets the full url based on bumper data attributes
# @param el [HTML Element] html img or div element that has responsive image data attributes
# @param breakpoint [String] an optional name of a breakpoint (as defined from setBreakpoints)
#
getUrl: (el, breakpoint = 'default') ->
url = el.getAttribute("data-bumper-responsive-image-url-#{breakpoint}") ||
el.getAttribute('data-bumper-responsive-image-url')
params = el.getAttribute("data-bumper-responsive-image-url-params-#{breakpoint}") ||
el.getAttribute('data-bumper-responsive-image-url-params')
# Log warning if no url is defined
throw new Error "data-bumper-responsive-image-url[-#{breakpoint}] is not set." unless url
# preserve any params in the url value
# best practice is to keep all url parameters out of the url attribute, but this provides fallback support for special cases
urlParams = url.split('?')
if urlParams.length > 1
url = urlParams[0]
params = if params
"#{urlParams[1]}&#{params}"
else
urlParams[1]
# combine params if they are found
fullUrl = if params then "#{url}?#{params}" else url
# process any possible string inteprolation
fullUrl = window.Bumper.Dom?.getElementData(fullUrl, el) || fullUrl
return fullUrl
window.Bumper ||= {}
window.Bumper.Responsive ||= {}
window.Bumper.Responsive.Image ||= new BumperResponsiveImage
| true | ###
# * bumper | responsive | image
# * https://github.com/brewster1134/bumper
# *
# * @author PI:NAME:<NAME>END_PI
# * Copyright (c) 2014
# * Licensed under the MIT license.
###
((factory) ->
if define?.amd
define [
'bumper-core'
'bumper-responsive-breakpoint'
], (Core, ResponsiveBreakpoint) ->
factory Core, ResponsiveBreakpoint
else
factory window.Bumper.Core, window.Bumper.Responsive.Breakpoint
) (Core, ResponsiveBreakpoint) ->
class BumperResponsiveImage extends Core.Module
events: ->
# resize all images on page load
window.addEventListener 'load', => @resizeAll()
# resize all images on breakpoint change increase
window.addEventListener 'bumper-responsive-breakpoint-change', => @resizeAll()
# Creates a mutation observor when new elements are added to the dom
# http://caniuse.com/#feat=mutationobserver
responsiveObserver = new MutationObserver => @resizeAll()
responsiveObserver.observe document,
childList: true
subtree: true
return @
# calls resize on all matching elements
#
resizeAll: ->
images = document.querySelectorAll '.bumper-responsive-image'
for image in images
try
@resize image
return images
# set the repsonsive image and fire events
# @param el [HTML Element] html img or div element that has responsive image data attributes
# @param breakpoint [String] an optional name of a breakpoint (as defined from setBreakpoints)
#
resize: (el, breakpoint, force = false) ->
el = el[0] if el.jquery # convert from a jquery object
breakpoint ||= window.Bumper.Responsive.Breakpoint?.getCurrent()
fullUrl = @getUrl el, breakpoint
# return if no url, or url is the same as the existing url
return false unless fullUrl
if force == false && (el.getAttribute('src')?.indexOf(fullUrl) >= 0 || el.style.backgroundImage.indexOf(fullUrl) >= 0)
return fullUrl
# handle images
#
if el.tagName == 'IMG'
img = el
# trigger event
img.addEventListener 'load', ->
event = new CustomEvent 'bumper-responsive-image-loaded',
detail:
img: img
img.dispatchEvent event
img.setAttribute 'data-bumper-breakpoint', breakpoint
img.setAttribute 'src', fullUrl
# handle background images
#
else
# create a temp image tag so we can fire an event when the image is loaded
img = document.createElement 'img'
img.addEventListener 'load', ->
src = @getAttribute 'src'
el.setAttribute 'data-bumper-breakpoint', breakpoint
el.style.backgroundImage = "url(#{src})"
el.style.width = "#{img.width}px"
el.style.height = "#{img.height}px"
event = new CustomEvent 'bumper-responsive-image-loaded',
detail:
img: img
el.dispatchEvent event
img.setAttribute 'src', fullUrl
return fullUrl
# Gets the full url based on bumper data attributes
# @param el [HTML Element] html img or div element that has responsive image data attributes
# @param breakpoint [String] an optional name of a breakpoint (as defined from setBreakpoints)
#
getUrl: (el, breakpoint = 'default') ->
url = el.getAttribute("data-bumper-responsive-image-url-#{breakpoint}") ||
el.getAttribute('data-bumper-responsive-image-url')
params = el.getAttribute("data-bumper-responsive-image-url-params-#{breakpoint}") ||
el.getAttribute('data-bumper-responsive-image-url-params')
# Log warning if no url is defined
throw new Error "data-bumper-responsive-image-url[-#{breakpoint}] is not set." unless url
# preserve any params in the url value
# best practice is to keep all url parameters out of the url attribute, but this provides fallback support for special cases
urlParams = url.split('?')
if urlParams.length > 1
url = urlParams[0]
params = if params
"#{urlParams[1]}&#{params}"
else
urlParams[1]
# combine params if they are found
fullUrl = if params then "#{url}?#{params}" else url
# process any possible string inteprolation
fullUrl = window.Bumper.Dom?.getElementData(fullUrl, el) || fullUrl
return fullUrl
window.Bumper ||= {}
window.Bumper.Responsive ||= {}
window.Bumper.Responsive.Image ||= new BumperResponsiveImage
|
[
{
"context": "mpest',\r\n title : \"The Tempest\",\r\n name : \"William\"\r\n surname : \"Shakespeare\"\r\n length : 123\r\n\r\no",
"end": 507,
"score": 0.9998340606689453,
"start": 500,
"tag": "NAME",
"value": "William"
},
{
"context": "he Tempest\",\r\n name : \"Willi... | archive/backbone/bower_components/backbone/test/model.coffee | Morgantheplant/website | 0 | # Quick Backbone/CoffeeScript tests to make sure that inheritance
# works correctly.
{ok, equal, deepEqual} = require 'assert'
{Model, Collection, Events} = require '../backbone'
# Patch `ok` to store a count of passed tests...
count = 0
oldOk = ok
ok = ->
oldOk arguments...
count++
class Document extends Model
fullName: ->
@get('name') + ' ' + @get('surname')
tempest = new Document
id : '1-the-tempest',
title : "The Tempest",
name : "William"
surname : "Shakespeare"
length : 123
ok tempest.fullName() is "William Shakespeare"
ok tempest.get('length') is 123
class ProperDocument extends Document
fullName: ->
"Mr. " + super
properTempest = new ProperDocument tempest.attributes
ok properTempest.fullName() is "Mr. William Shakespeare"
ok properTempest.get('length') is 123
console.log "passed #{count} tests"
| 7794 | # Quick Backbone/CoffeeScript tests to make sure that inheritance
# works correctly.
{ok, equal, deepEqual} = require 'assert'
{Model, Collection, Events} = require '../backbone'
# Patch `ok` to store a count of passed tests...
count = 0
oldOk = ok
ok = ->
oldOk arguments...
count++
class Document extends Model
fullName: ->
@get('name') + ' ' + @get('surname')
tempest = new Document
id : '1-the-tempest',
title : "The Tempest",
name : "<NAME>"
surname : "<NAME>"
length : 123
ok tempest.fullName() is "<NAME>"
ok tempest.get('length') is 123
class ProperDocument extends Document
fullName: ->
"Mr. " + super
properTempest = new ProperDocument tempest.attributes
ok properTempest.fullName() is "Mr. <NAME>"
ok properTempest.get('length') is 123
console.log "passed #{count} tests"
| true | # Quick Backbone/CoffeeScript tests to make sure that inheritance
# works correctly.
{ok, equal, deepEqual} = require 'assert'
{Model, Collection, Events} = require '../backbone'
# Patch `ok` to store a count of passed tests...
count = 0
oldOk = ok
ok = ->
oldOk arguments...
count++
class Document extends Model
fullName: ->
@get('name') + ' ' + @get('surname')
tempest = new Document
id : '1-the-tempest',
title : "The Tempest",
name : "PI:NAME:<NAME>END_PI"
surname : "PI:NAME:<NAME>END_PI"
length : 123
ok tempest.fullName() is "PI:NAME:<NAME>END_PI"
ok tempest.get('length') is 123
class ProperDocument extends Document
fullName: ->
"Mr. " + super
properTempest = new ProperDocument tempest.attributes
ok properTempest.fullName() is "Mr. PI:NAME:<NAME>END_PI"
ok properTempest.get('length') is 123
console.log "passed #{count} tests"
|
[
{
"context": " routes.submitForgotPassword { body: { email: 'foo@bar.com' } }, @res\n Backbone.sync.args[0][2].url.shoul",
"end": 827,
"score": 0.9999151229858398,
"start": 816,
"tag": "EMAIL",
"value": "foo@bar.com"
},
{
"context": "ontainEql 'send_reset_password_instructions?... | src/mobile/apps/auth/test/routes.coffee | kierangillen/force | 0 | { fabricate } = require 'antigravity'
_ = require 'underscore'
sinon = require 'sinon'
rewire = require 'rewire'
routes = rewire '../routes'
Backbone = require 'backbone'
describe '#forgotPassword', ->
it 'renders the reset form', ->
@res =
render: @render = sinon.stub(),
locals:
sd:
MOBILE_NEW_AUTH_MODAL: false
routes.forgotPassword {}, @res
@render.args[0][0].should.equal 'forgot_password'
describe '#submitForgotPassword', ->
beforeEach ->
sinon.stub Backbone, 'sync'
@res =
render: @render = sinon.stub(),
locals:
sd:
MOBILE_NEW_AUTH_MODAL: false
afterEach ->
Backbone.sync.restore()
it 'articles to the reset password API and renders the confirmation page', ->
routes.submitForgotPassword { body: { email: 'foo@bar.com' } }, @res
Backbone.sync.args[0][2].url.should.containEql 'send_reset_password_instructions?email=foo@bar.com'
Backbone.sync.args[0][2].success { success: true }
@render.args[0][0].should.equal 'forgot_password_confirmation'
it 'bubbles error', ->
routes.submitForgotPassword(
{ body: email: 'foo@bar.com' }
@res
)
Backbone.sync.args[0][2].error response: body: error: 'Fail whale'
@render.args[0][1].error.should.equal 'Fail whale'
it 'doesnt choke on errors without bodies', ->
routes.submitForgotPassword(
{ body: email: 'foo@bar.com' }
@res
)
Backbone.sync.args[0][2].error response: text: 'Whomp'
@render.args[0][1].error.should.containEql 'try again'
describe '#resetPassword', ->
it 'renders the reset form', ->
@res =
render: @render = sinon.stub(),
locals:
sd:
MOBILE_NEW_AUTH_MODAL: false
routes.resetPassword {}, @res
@render.args[0][0].should.equal 'reset_password'
describe '#login', ->
beforeEach ->
@redirectURL = '/artwork/matthew-abbott-lobby-and-supercomputer'
@req =
session: null
get: (-> @redirectURL)
query: {}
body: {}
params: {}
@res =
render: @render = sinon.stub(),
locals:
sd:
MOBILE_NEW_AUTH_MODAL: false
it 'renders the login page', ->
routes.login @req, @res
@render.args[0][0].should.equal 'login'
it 'renders the call_to_action page', ->
req =
session: null
get: (=> @redirectURL)
query: {
action: 'artwork-save',
'redirect-to': @redirectURL
}
body: {}
params: {}
routes.login req, @res
@render.args[0][0].should.equal 'call_to_action'
it 'passes the redirect param to the template', ->
req =
query: { 'redirect-to': '%2Ffollowing%2Fprofiles' }
body: {}
params: {}
get: (-> false)
routes.login req, @res
@render.args[0][1].redirectTo.should.equal '%2Ffollowing%2Fprofiles'
it 'passes the redirect query param to the template', ->
@req.query = { 'redirect_uri': '%2Ffollowing%2Fprofiles' }
@req.get = (-> false)
routes.login @req, @res
@render.args[0][1].redirectTo.should.equal '%2Ffollowing%2Fprofiles'
it 'redirects to new login page if env variable is set', ->
req =
query: { 'redirect-to': '/'}
body: {}
params: {}
get: (-> false)
res =
redirect: @redirect = sinon.stub()
locals:
sd:
MOBILE_NEW_AUTH_MODAL: true
routes.login req, res
@redirect.args[0][0].should.equal '/login?redirect-to=%2F&redirectTo=%2F'
describe '#signUp', ->
beforeEach ->
@req = { session: {}, get: (-> '/auctions/two-x-two'), query: {}, body: {}, params: {}}
@res =
render: @render = sinon.stub()
locals:
sd:
MOBILE_NEW_AUTH_MODAL: false
sinon.stub Backbone, 'sync'
afterEach ->
@render.restore?()
Backbone.sync.restore()
it 'renders the call_to_action if coming from an action', ->
@req.query.action = 'register-for-auction'
routes.signUp @req, @res
@render.args[0][1].action.should.equal 'register-for-auction'
@render.args[0][0].should.equal 'call_to_action'
it 'renders the create account page if ?email=1', ->
@req.query['email'] = '1'
routes.signUp @req, @res
@render.args[0][0].should.equal 'signup_email'
it 'renders the create account page if ?email=1', ->
@req.query['email'] = '1'
routes.signUp @req, @res
@render.args[0][0].should.equal 'signup_email'
it 'sets redirectTo if redirect-to was passed', ->
@req.query['email'] = '1'
@req.query['redirect-to'] = '/auction-registration'
routes.signUp @req, @res
@render.args[0][1].redirectTo.should.equal '/auction-registration'
it 'ignores malicious redirects', ->
req = query: { 'redirect-to': 'http://www.iamveryverysorry.com/' }, body: {}, session: {}, params: {}
routes.signUp req, @res
@render.args[0][1].redirectTo.should.equal '/'
it 'sets the prefill var', ->
@req.query.prefill = 'foo@bar.com'
routes.signUp @req, @res
@render.args[0][1].prefill.should.equal 'foo@bar.com'
it 'redirects to new signup page if env variable is set', ->
req =
query: { 'redirect-to': '/'}
body: {}
params: {}
get: (-> false)
res =
redirect: @redirect = sinon.stub()
locals:
sd:
MOBILE_NEW_AUTH_MODAL: true
routes.signUp req, res
@redirect.args[0][0].should.containEql '/signup?redirect-to=%2F&redirectTo=%2F'
| 22930 | { fabricate } = require 'antigravity'
_ = require 'underscore'
sinon = require 'sinon'
rewire = require 'rewire'
routes = rewire '../routes'
Backbone = require 'backbone'
describe '#forgotPassword', ->
it 'renders the reset form', ->
@res =
render: @render = sinon.stub(),
locals:
sd:
MOBILE_NEW_AUTH_MODAL: false
routes.forgotPassword {}, @res
@render.args[0][0].should.equal 'forgot_password'
describe '#submitForgotPassword', ->
beforeEach ->
sinon.stub Backbone, 'sync'
@res =
render: @render = sinon.stub(),
locals:
sd:
MOBILE_NEW_AUTH_MODAL: false
afterEach ->
Backbone.sync.restore()
it 'articles to the reset password API and renders the confirmation page', ->
routes.submitForgotPassword { body: { email: '<EMAIL>' } }, @res
Backbone.sync.args[0][2].url.should.containEql 'send_reset_password_instructions?email=<EMAIL>'
Backbone.sync.args[0][2].success { success: true }
@render.args[0][0].should.equal 'forgot_password_confirmation'
it 'bubbles error', ->
routes.submitForgotPassword(
{ body: email: '<EMAIL>' }
@res
)
Backbone.sync.args[0][2].error response: body: error: 'Fail whale'
@render.args[0][1].error.should.equal 'Fail whale'
it 'doesnt choke on errors without bodies', ->
routes.submitForgotPassword(
{ body: email: '<EMAIL>' }
@res
)
Backbone.sync.args[0][2].error response: text: 'Whomp'
@render.args[0][1].error.should.containEql 'try again'
describe '#resetPassword', ->
it 'renders the reset form', ->
@res =
render: @render = sinon.stub(),
locals:
sd:
MOBILE_NEW_AUTH_MODAL: false
routes.resetPassword {}, @res
@render.args[0][0].should.equal 'reset_password'
describe '#login', ->
beforeEach ->
@redirectURL = '/artwork/matthew-abbott-lobby-and-supercomputer'
@req =
session: null
get: (-> @redirectURL)
query: {}
body: {}
params: {}
@res =
render: @render = sinon.stub(),
locals:
sd:
MOBILE_NEW_AUTH_MODAL: false
it 'renders the login page', ->
routes.login @req, @res
@render.args[0][0].should.equal 'login'
it 'renders the call_to_action page', ->
req =
session: null
get: (=> @redirectURL)
query: {
action: 'artwork-save',
'redirect-to': @redirectURL
}
body: {}
params: {}
routes.login req, @res
@render.args[0][0].should.equal 'call_to_action'
it 'passes the redirect param to the template', ->
req =
query: { 'redirect-to': '%2Ffollowing%2Fprofiles' }
body: {}
params: {}
get: (-> false)
routes.login req, @res
@render.args[0][1].redirectTo.should.equal '%2Ffollowing%2Fprofiles'
it 'passes the redirect query param to the template', ->
@req.query = { 'redirect_uri': '%2Ffollowing%2Fprofiles' }
@req.get = (-> false)
routes.login @req, @res
@render.args[0][1].redirectTo.should.equal '%2Ffollowing%2Fprofiles'
it 'redirects to new login page if env variable is set', ->
req =
query: { 'redirect-to': '/'}
body: {}
params: {}
get: (-> false)
res =
redirect: @redirect = sinon.stub()
locals:
sd:
MOBILE_NEW_AUTH_MODAL: true
routes.login req, res
@redirect.args[0][0].should.equal '/login?redirect-to=%2F&redirectTo=%2F'
describe '#signUp', ->
beforeEach ->
@req = { session: {}, get: (-> '/auctions/two-x-two'), query: {}, body: {}, params: {}}
@res =
render: @render = sinon.stub()
locals:
sd:
MOBILE_NEW_AUTH_MODAL: false
sinon.stub Backbone, 'sync'
afterEach ->
@render.restore?()
Backbone.sync.restore()
it 'renders the call_to_action if coming from an action', ->
@req.query.action = 'register-for-auction'
routes.signUp @req, @res
@render.args[0][1].action.should.equal 'register-for-auction'
@render.args[0][0].should.equal 'call_to_action'
it 'renders the create account page if ?email=1', ->
@req.query['email'] = '1'
routes.signUp @req, @res
@render.args[0][0].should.equal 'signup_email'
it 'renders the create account page if ?email=1', ->
@req.query['email'] = '1'
routes.signUp @req, @res
@render.args[0][0].should.equal 'signup_email'
it 'sets redirectTo if redirect-to was passed', ->
@req.query['email'] = '1'
@req.query['redirect-to'] = '/auction-registration'
routes.signUp @req, @res
@render.args[0][1].redirectTo.should.equal '/auction-registration'
it 'ignores malicious redirects', ->
req = query: { 'redirect-to': 'http://www.iamveryverysorry.com/' }, body: {}, session: {}, params: {}
routes.signUp req, @res
@render.args[0][1].redirectTo.should.equal '/'
it 'sets the prefill var', ->
@req.query.prefill = '<EMAIL>'
routes.signUp @req, @res
@render.args[0][1].prefill.should.equal '<EMAIL>'
it 'redirects to new signup page if env variable is set', ->
req =
query: { 'redirect-to': '/'}
body: {}
params: {}
get: (-> false)
res =
redirect: @redirect = sinon.stub()
locals:
sd:
MOBILE_NEW_AUTH_MODAL: true
routes.signUp req, res
@redirect.args[0][0].should.containEql '/signup?redirect-to=%2F&redirectTo=%2F'
| true | { fabricate } = require 'antigravity'
_ = require 'underscore'
sinon = require 'sinon'
rewire = require 'rewire'
routes = rewire '../routes'
Backbone = require 'backbone'
describe '#forgotPassword', ->
it 'renders the reset form', ->
@res =
render: @render = sinon.stub(),
locals:
sd:
MOBILE_NEW_AUTH_MODAL: false
routes.forgotPassword {}, @res
@render.args[0][0].should.equal 'forgot_password'
describe '#submitForgotPassword', ->
beforeEach ->
sinon.stub Backbone, 'sync'
@res =
render: @render = sinon.stub(),
locals:
sd:
MOBILE_NEW_AUTH_MODAL: false
afterEach ->
Backbone.sync.restore()
it 'articles to the reset password API and renders the confirmation page', ->
routes.submitForgotPassword { body: { email: 'PI:EMAIL:<EMAIL>END_PI' } }, @res
Backbone.sync.args[0][2].url.should.containEql 'send_reset_password_instructions?email=PI:EMAIL:<EMAIL>END_PI'
Backbone.sync.args[0][2].success { success: true }
@render.args[0][0].should.equal 'forgot_password_confirmation'
it 'bubbles error', ->
routes.submitForgotPassword(
{ body: email: 'PI:EMAIL:<EMAIL>END_PI' }
@res
)
Backbone.sync.args[0][2].error response: body: error: 'Fail whale'
@render.args[0][1].error.should.equal 'Fail whale'
it 'doesnt choke on errors without bodies', ->
routes.submitForgotPassword(
{ body: email: 'PI:EMAIL:<EMAIL>END_PI' }
@res
)
Backbone.sync.args[0][2].error response: text: 'Whomp'
@render.args[0][1].error.should.containEql 'try again'
describe '#resetPassword', ->
it 'renders the reset form', ->
@res =
render: @render = sinon.stub(),
locals:
sd:
MOBILE_NEW_AUTH_MODAL: false
routes.resetPassword {}, @res
@render.args[0][0].should.equal 'reset_password'
describe '#login', ->
beforeEach ->
@redirectURL = '/artwork/matthew-abbott-lobby-and-supercomputer'
@req =
session: null
get: (-> @redirectURL)
query: {}
body: {}
params: {}
@res =
render: @render = sinon.stub(),
locals:
sd:
MOBILE_NEW_AUTH_MODAL: false
it 'renders the login page', ->
routes.login @req, @res
@render.args[0][0].should.equal 'login'
it 'renders the call_to_action page', ->
req =
session: null
get: (=> @redirectURL)
query: {
action: 'artwork-save',
'redirect-to': @redirectURL
}
body: {}
params: {}
routes.login req, @res
@render.args[0][0].should.equal 'call_to_action'
it 'passes the redirect param to the template', ->
req =
query: { 'redirect-to': '%2Ffollowing%2Fprofiles' }
body: {}
params: {}
get: (-> false)
routes.login req, @res
@render.args[0][1].redirectTo.should.equal '%2Ffollowing%2Fprofiles'
it 'passes the redirect query param to the template', ->
@req.query = { 'redirect_uri': '%2Ffollowing%2Fprofiles' }
@req.get = (-> false)
routes.login @req, @res
@render.args[0][1].redirectTo.should.equal '%2Ffollowing%2Fprofiles'
it 'redirects to new login page if env variable is set', ->
req =
query: { 'redirect-to': '/'}
body: {}
params: {}
get: (-> false)
res =
redirect: @redirect = sinon.stub()
locals:
sd:
MOBILE_NEW_AUTH_MODAL: true
routes.login req, res
@redirect.args[0][0].should.equal '/login?redirect-to=%2F&redirectTo=%2F'
describe '#signUp', ->
beforeEach ->
@req = { session: {}, get: (-> '/auctions/two-x-two'), query: {}, body: {}, params: {}}
@res =
render: @render = sinon.stub()
locals:
sd:
MOBILE_NEW_AUTH_MODAL: false
sinon.stub Backbone, 'sync'
afterEach ->
@render.restore?()
Backbone.sync.restore()
it 'renders the call_to_action if coming from an action', ->
@req.query.action = 'register-for-auction'
routes.signUp @req, @res
@render.args[0][1].action.should.equal 'register-for-auction'
@render.args[0][0].should.equal 'call_to_action'
it 'renders the create account page if ?email=1', ->
@req.query['email'] = '1'
routes.signUp @req, @res
@render.args[0][0].should.equal 'signup_email'
it 'renders the create account page if ?email=1', ->
@req.query['email'] = '1'
routes.signUp @req, @res
@render.args[0][0].should.equal 'signup_email'
it 'sets redirectTo if redirect-to was passed', ->
@req.query['email'] = '1'
@req.query['redirect-to'] = '/auction-registration'
routes.signUp @req, @res
@render.args[0][1].redirectTo.should.equal '/auction-registration'
it 'ignores malicious redirects', ->
req = query: { 'redirect-to': 'http://www.iamveryverysorry.com/' }, body: {}, session: {}, params: {}
routes.signUp req, @res
@render.args[0][1].redirectTo.should.equal '/'
it 'sets the prefill var', ->
@req.query.prefill = 'PI:EMAIL:<EMAIL>END_PI'
routes.signUp @req, @res
@render.args[0][1].prefill.should.equal 'PI:EMAIL:<EMAIL>END_PI'
it 'redirects to new signup page if env variable is set', ->
req =
query: { 'redirect-to': '/'}
body: {}
params: {}
get: (-> false)
res =
redirect: @redirect = sinon.stub()
locals:
sd:
MOBILE_NEW_AUTH_MODAL: true
routes.signUp req, res
@redirect.args[0][0].should.containEql '/signup?redirect-to=%2F&redirectTo=%2F'
|
[
{
"context": "d.equal true\n @client.rtm._token.should.equal 'xoxb-faketoken'\n\n it 'Should initialize with a Web client', ",
"end": 331,
"score": 0.8455631136894226,
"start": 320,
"tag": "KEY",
"value": "xoxb-faketo"
},
{
"context": "d.equal true\n @client.web._token.should.... | test/client.coffee | mtoedtem/jarvis | 0 | {RtmClient, WebClient, MemoryDataStore} = require '@slack/client'
SlackFormatter = require '../src/formatter'
should = require 'should'
_ = require 'lodash'
describe 'Init', ->
it 'Should initialize with an RTM client', ->
(@client.rtm instanceof RtmClient).should.equal true
@client.rtm._token.should.equal 'xoxb-faketoken'
it 'Should initialize with a Web client', ->
(@client.web instanceof WebClient).should.equal true
@client.web._token.should.equal 'xoxb-faketoken'
it 'Should initialize with a SlackFormatter - DEPRECATED', ->
(@client.format instanceof SlackFormatter).should.equal true
describe 'connect()', ->
it 'Should be able to connect', ->
@client.connect();
@stubs._connected.should.be.true
describe 'onEvent()', ->
it 'should not need to be set', ->
@client.rtm.emit('message', { fake: 'message' })
(true).should.be.ok
it 'should emit pre-processed messages to the callback', (done) ->
@client.onEvent (message) =>
message.should.be.ok
message.user.real_name.should.equal @stubs.user.real_name
message.channel.name.should.equal @stubs.channel.name
done()
# the shape of the following object is a raw RTM message event: https://api.slack.com/events/message
@client.rtm.emit('message', {
type: 'message',
user: @stubs.user.id,
channel: @stubs.channel.id,
text: 'blah',
ts: '1355517523.000005'
})
# NOTE: the following check does not appear to work as expected
setTimeout(( =>
@stubs.robot.logger.logs.should.not.have.property('error')
), 0);
it 'should successfully convert bot users', (done) ->
@client.onEvent (message) =>
message.should.be.ok
message.user.id.should.equal @stubs.user.id
message.channel.name.should.equal @stubs.channel.name
done()
# the shape of the following object is a raw RTM message event: https://api.slack.com/events/message
@client.rtm.emit('message', {
type: 'message',
bot_id: 'B123'
channel: @stubs.channel.id,
text: 'blah'
})
# NOTE: the following check does not appear to work as expected
setTimeout(( =>
@stubs.robot.logger.logs.should.not.have.property('error')
), 0);
it 'should handle undefined bot users', (done) ->
@client.onEvent (message) =>
message.should.be.ok
message.channel.name.should.equal @stubs.channel.name
done()
@client.rtm.emit('message', {
type: 'message',
bot_id: 'B789'
channel: @stubs.channel.id,
text: 'blah'
})
setTimeout(( =>
@stubs.robot.logger.logs.should.not.have.property('error')
), 0);
it 'should log an error when expanded info cannot be fetched using the Web API', (done) ->
# NOTE: to be certain nothing goes wrong in the rejection handling, the "unhandledRejection" / "rejectionHandled"
# global events need to be instrumented
@client.onEvent (message) ->
done(new Error('A message was emitted'))
@client.rtm.emit('message', {
type: 'message',
user: 'NOT A USER',
channel: @stubs.channel.id,
text: 'blah',
ts: '1355517523.000005'
})
setImmediate(( =>
@stubs.robot.logger.logs?.error.length.should.equal 1
done()
), 0);
describe 'on() - DEPRECATED', ->
it 'Should register events on the RTM stream', ->
event = undefined
@client.on 'some_event', (e) -> event = e
@client.rtm.emit('some_event', {})
event.should.be.ok
describe 'disconnect()', ->
it 'Should disconnect RTM', ->
@client.disconnect()
@stubs._connected.should.be.false
it 'should remove all RTM listeners - LEGACY', ->
@client.on 'some_event', _.noop
@client.disconnect()
@client.rtm.listeners('some_event', true).should.not.be.ok
describe 'setTopic()', ->
it "Should set the topic in a channel", (done) ->
@client.setTopic @stubs.channel.id, 'iAmTopic'
setImmediate(() =>
@stubs._topic.should.equal 'iAmTopic'
done()
, 0)
it "should not set the topic in a DM", (done) ->
@client.setTopic @stubs.DM.id, 'iAmTopic'
setTimeout(() =>
@stubs.should.not.have.property('_topic')
# NOTE: no good way to assert that debug log was output
done()
, 0)
it "should not set the topic in a MPIM", (done) ->
@client.setTopic @stubs.group.id, 'iAmTopic'
setTimeout(() =>
@stubs.should.not.have.property('_topic')
# NOTE: no good way to assert that debug log was output
done()
, 0)
it "should log an error if the setTopic web API method fails", (done) ->
@client.setTopic 'NOT A CONVERSATION', 'iAmTopic'
setTimeout(() =>
@stubs.should.not.have.property('_topic')
@stubs.robot.logger.logs?.error.length.should.equal 1
done()
, 0)
describe 'send()', ->
it 'Should send a plain string message to room', ->
@client.send {room: 'room1'}, 'Message'
@stubs._msg.should.equal 'Message'
@stubs._room.should.equal 'room1'
it 'Should send an object message to room', ->
@client.send {room: 'room2'}, {text: 'textMessage'}
@stubs._msg.should.equal 'textMessage'
@stubs._room.should.equal 'room2'
it 'Should be able to send a DM to a user object', ->
@client.send @stubs.user, 'DM Message'
@stubs._dmmsg.should.equal 'DM Message'
@stubs._room.should.equal @stubs.user.id
it 'should not send a message to a user without an ID', ->
@client.send { name: "my_crufty_username" }, "don't program with usernames"
@stubs._sendCount.should.equal 0
it 'should log an error when chat.postMessage fails (plain string)', ->
@client.send { room: @stubs.channelWillFailChatPost }, "Message"
@stubs._sendCount.should.equal 0
setImmediate(( =>
@stubs.robot.logger.logs?.error.length.should.equal 1
done()
), 0);
it 'should log an error when chat.postMessage fails (object)', ->
@client.send { room: @stubs.channelWillFailChatPost }, { text: "textMessage" }
@stubs._sendCount.should.equal 0
setImmediate(( =>
@stubs.robot.logger.logs?.error.length.should.equal 1
done()
), 0);
describe 'loadUsers()', ->
it 'should make successive calls to users.list', ->
@client.loadUsers (err, result) =>
@stubs?._listCount.should.equal 2
result.members.length.should.equal 4
it 'should handle errors', ->
@stubs._listError = true
@client.loadUsers (err, result) =>
err.should.be.an.Error
| 47867 | {RtmClient, WebClient, MemoryDataStore} = require '@slack/client'
SlackFormatter = require '../src/formatter'
should = require 'should'
_ = require 'lodash'
describe 'Init', ->
it 'Should initialize with an RTM client', ->
(@client.rtm instanceof RtmClient).should.equal true
@client.rtm._token.should.equal '<KEY>ken'
it 'Should initialize with a Web client', ->
(@client.web instanceof WebClient).should.equal true
@client.web._token.should.equal '<KEY>ken'
it 'Should initialize with a SlackFormatter - DEPRECATED', ->
(@client.format instanceof SlackFormatter).should.equal true
describe 'connect()', ->
it 'Should be able to connect', ->
@client.connect();
@stubs._connected.should.be.true
describe 'onEvent()', ->
it 'should not need to be set', ->
@client.rtm.emit('message', { fake: 'message' })
(true).should.be.ok
it 'should emit pre-processed messages to the callback', (done) ->
@client.onEvent (message) =>
message.should.be.ok
message.user.real_name.should.equal @stubs.user.real_name
message.channel.name.should.equal @stubs.channel.name
done()
# the shape of the following object is a raw RTM message event: https://api.slack.com/events/message
@client.rtm.emit('message', {
type: 'message',
user: @stubs.user.id,
channel: @stubs.channel.id,
text: 'blah',
ts: '1355517523.000005'
})
# NOTE: the following check does not appear to work as expected
setTimeout(( =>
@stubs.robot.logger.logs.should.not.have.property('error')
), 0);
it 'should successfully convert bot users', (done) ->
@client.onEvent (message) =>
message.should.be.ok
message.user.id.should.equal @stubs.user.id
message.channel.name.should.equal @stubs.channel.name
done()
# the shape of the following object is a raw RTM message event: https://api.slack.com/events/message
@client.rtm.emit('message', {
type: 'message',
bot_id: 'B123'
channel: @stubs.channel.id,
text: 'blah'
})
# NOTE: the following check does not appear to work as expected
setTimeout(( =>
@stubs.robot.logger.logs.should.not.have.property('error')
), 0);
it 'should handle undefined bot users', (done) ->
@client.onEvent (message) =>
message.should.be.ok
message.channel.name.should.equal @stubs.channel.name
done()
@client.rtm.emit('message', {
type: 'message',
bot_id: 'B789'
channel: @stubs.channel.id,
text: 'blah'
})
setTimeout(( =>
@stubs.robot.logger.logs.should.not.have.property('error')
), 0);
it 'should log an error when expanded info cannot be fetched using the Web API', (done) ->
# NOTE: to be certain nothing goes wrong in the rejection handling, the "unhandledRejection" / "rejectionHandled"
# global events need to be instrumented
@client.onEvent (message) ->
done(new Error('A message was emitted'))
@client.rtm.emit('message', {
type: 'message',
user: 'NOT A USER',
channel: @stubs.channel.id,
text: 'blah',
ts: '1355517523.000005'
})
setImmediate(( =>
@stubs.robot.logger.logs?.error.length.should.equal 1
done()
), 0);
describe 'on() - DEPRECATED', ->
it 'Should register events on the RTM stream', ->
event = undefined
@client.on 'some_event', (e) -> event = e
@client.rtm.emit('some_event', {})
event.should.be.ok
describe 'disconnect()', ->
it 'Should disconnect RTM', ->
@client.disconnect()
@stubs._connected.should.be.false
it 'should remove all RTM listeners - LEGACY', ->
@client.on 'some_event', _.noop
@client.disconnect()
@client.rtm.listeners('some_event', true).should.not.be.ok
describe 'setTopic()', ->
it "Should set the topic in a channel", (done) ->
@client.setTopic @stubs.channel.id, 'iAmTopic'
setImmediate(() =>
@stubs._topic.should.equal 'iAmTopic'
done()
, 0)
it "should not set the topic in a DM", (done) ->
@client.setTopic @stubs.DM.id, 'iAmTopic'
setTimeout(() =>
@stubs.should.not.have.property('_topic')
# NOTE: no good way to assert that debug log was output
done()
, 0)
it "should not set the topic in a MPIM", (done) ->
@client.setTopic @stubs.group.id, 'iAmTopic'
setTimeout(() =>
@stubs.should.not.have.property('_topic')
# NOTE: no good way to assert that debug log was output
done()
, 0)
it "should log an error if the setTopic web API method fails", (done) ->
@client.setTopic 'NOT A CONVERSATION', 'iAmTopic'
setTimeout(() =>
@stubs.should.not.have.property('_topic')
@stubs.robot.logger.logs?.error.length.should.equal 1
done()
, 0)
describe 'send()', ->
it 'Should send a plain string message to room', ->
@client.send {room: 'room1'}, 'Message'
@stubs._msg.should.equal 'Message'
@stubs._room.should.equal 'room1'
it 'Should send an object message to room', ->
@client.send {room: 'room2'}, {text: 'textMessage'}
@stubs._msg.should.equal 'textMessage'
@stubs._room.should.equal 'room2'
it 'Should be able to send a DM to a user object', ->
@client.send @stubs.user, 'DM Message'
@stubs._dmmsg.should.equal 'DM Message'
@stubs._room.should.equal @stubs.user.id
it 'should not send a message to a user without an ID', ->
@client.send { name: "my_crufty_username" }, "don't program with usernames"
@stubs._sendCount.should.equal 0
it 'should log an error when chat.postMessage fails (plain string)', ->
@client.send { room: @stubs.channelWillFailChatPost }, "Message"
@stubs._sendCount.should.equal 0
setImmediate(( =>
@stubs.robot.logger.logs?.error.length.should.equal 1
done()
), 0);
it 'should log an error when chat.postMessage fails (object)', ->
@client.send { room: @stubs.channelWillFailChatPost }, { text: "textMessage" }
@stubs._sendCount.should.equal 0
setImmediate(( =>
@stubs.robot.logger.logs?.error.length.should.equal 1
done()
), 0);
describe 'loadUsers()', ->
it 'should make successive calls to users.list', ->
@client.loadUsers (err, result) =>
@stubs?._listCount.should.equal 2
result.members.length.should.equal 4
it 'should handle errors', ->
@stubs._listError = true
@client.loadUsers (err, result) =>
err.should.be.an.Error
| true | {RtmClient, WebClient, MemoryDataStore} = require '@slack/client'
SlackFormatter = require '../src/formatter'
should = require 'should'
_ = require 'lodash'
describe 'Init', ->
it 'Should initialize with an RTM client', ->
(@client.rtm instanceof RtmClient).should.equal true
@client.rtm._token.should.equal 'PI:KEY:<KEY>END_PIken'
it 'Should initialize with a Web client', ->
(@client.web instanceof WebClient).should.equal true
@client.web._token.should.equal 'PI:KEY:<KEY>END_PIken'
it 'Should initialize with a SlackFormatter - DEPRECATED', ->
(@client.format instanceof SlackFormatter).should.equal true
describe 'connect()', ->
it 'Should be able to connect', ->
@client.connect();
@stubs._connected.should.be.true
describe 'onEvent()', ->
it 'should not need to be set', ->
@client.rtm.emit('message', { fake: 'message' })
(true).should.be.ok
it 'should emit pre-processed messages to the callback', (done) ->
@client.onEvent (message) =>
message.should.be.ok
message.user.real_name.should.equal @stubs.user.real_name
message.channel.name.should.equal @stubs.channel.name
done()
# the shape of the following object is a raw RTM message event: https://api.slack.com/events/message
@client.rtm.emit('message', {
type: 'message',
user: @stubs.user.id,
channel: @stubs.channel.id,
text: 'blah',
ts: '1355517523.000005'
})
# NOTE: the following check does not appear to work as expected
setTimeout(( =>
@stubs.robot.logger.logs.should.not.have.property('error')
), 0);
it 'should successfully convert bot users', (done) ->
@client.onEvent (message) =>
message.should.be.ok
message.user.id.should.equal @stubs.user.id
message.channel.name.should.equal @stubs.channel.name
done()
# the shape of the following object is a raw RTM message event: https://api.slack.com/events/message
@client.rtm.emit('message', {
type: 'message',
bot_id: 'B123'
channel: @stubs.channel.id,
text: 'blah'
})
# NOTE: the following check does not appear to work as expected
setTimeout(( =>
@stubs.robot.logger.logs.should.not.have.property('error')
), 0);
it 'should handle undefined bot users', (done) ->
@client.onEvent (message) =>
message.should.be.ok
message.channel.name.should.equal @stubs.channel.name
done()
@client.rtm.emit('message', {
type: 'message',
bot_id: 'B789'
channel: @stubs.channel.id,
text: 'blah'
})
setTimeout(( =>
@stubs.robot.logger.logs.should.not.have.property('error')
), 0);
it 'should log an error when expanded info cannot be fetched using the Web API', (done) ->
# NOTE: to be certain nothing goes wrong in the rejection handling, the "unhandledRejection" / "rejectionHandled"
# global events need to be instrumented
@client.onEvent (message) ->
done(new Error('A message was emitted'))
@client.rtm.emit('message', {
type: 'message',
user: 'NOT A USER',
channel: @stubs.channel.id,
text: 'blah',
ts: '1355517523.000005'
})
setImmediate(( =>
@stubs.robot.logger.logs?.error.length.should.equal 1
done()
), 0);
describe 'on() - DEPRECATED', ->
it 'Should register events on the RTM stream', ->
event = undefined
@client.on 'some_event', (e) -> event = e
@client.rtm.emit('some_event', {})
event.should.be.ok
describe 'disconnect()', ->
it 'Should disconnect RTM', ->
@client.disconnect()
@stubs._connected.should.be.false
it 'should remove all RTM listeners - LEGACY', ->
@client.on 'some_event', _.noop
@client.disconnect()
@client.rtm.listeners('some_event', true).should.not.be.ok
describe 'setTopic()', ->
it "Should set the topic in a channel", (done) ->
@client.setTopic @stubs.channel.id, 'iAmTopic'
setImmediate(() =>
@stubs._topic.should.equal 'iAmTopic'
done()
, 0)
it "should not set the topic in a DM", (done) ->
@client.setTopic @stubs.DM.id, 'iAmTopic'
setTimeout(() =>
@stubs.should.not.have.property('_topic')
# NOTE: no good way to assert that debug log was output
done()
, 0)
it "should not set the topic in a MPIM", (done) ->
@client.setTopic @stubs.group.id, 'iAmTopic'
setTimeout(() =>
@stubs.should.not.have.property('_topic')
# NOTE: no good way to assert that debug log was output
done()
, 0)
it "should log an error if the setTopic web API method fails", (done) ->
@client.setTopic 'NOT A CONVERSATION', 'iAmTopic'
setTimeout(() =>
@stubs.should.not.have.property('_topic')
@stubs.robot.logger.logs?.error.length.should.equal 1
done()
, 0)
describe 'send()', ->
it 'Should send a plain string message to room', ->
@client.send {room: 'room1'}, 'Message'
@stubs._msg.should.equal 'Message'
@stubs._room.should.equal 'room1'
it 'Should send an object message to room', ->
@client.send {room: 'room2'}, {text: 'textMessage'}
@stubs._msg.should.equal 'textMessage'
@stubs._room.should.equal 'room2'
it 'Should be able to send a DM to a user object', ->
@client.send @stubs.user, 'DM Message'
@stubs._dmmsg.should.equal 'DM Message'
@stubs._room.should.equal @stubs.user.id
it 'should not send a message to a user without an ID', ->
@client.send { name: "my_crufty_username" }, "don't program with usernames"
@stubs._sendCount.should.equal 0
it 'should log an error when chat.postMessage fails (plain string)', ->
@client.send { room: @stubs.channelWillFailChatPost }, "Message"
@stubs._sendCount.should.equal 0
setImmediate(( =>
@stubs.robot.logger.logs?.error.length.should.equal 1
done()
), 0);
it 'should log an error when chat.postMessage fails (object)', ->
@client.send { room: @stubs.channelWillFailChatPost }, { text: "textMessage" }
@stubs._sendCount.should.equal 0
setImmediate(( =>
@stubs.robot.logger.logs?.error.length.should.equal 1
done()
), 0);
describe 'loadUsers()', ->
it 'should make successive calls to users.list', ->
@client.loadUsers (err, result) =>
@stubs?._listCount.should.equal 2
result.members.length.should.equal 4
it 'should handle errors', ->
@stubs._listError = true
@client.loadUsers (err, result) =>
err.should.be.an.Error
|
[
{
"context": "\n\n a = \n name: \"name\"\n\n $a = $(a)\n\n $a.w",
"end": 4120,
"score": 0.6794300079345703,
"start": 4116,
"tag": "NAME",
"value": "name"
}
] | test/$Spec.coffee | zhongxingdou/mayjs | 0 | # encoding: utf-8
sinon = require 'sinon'
describe '$.js', ->
M = require("../may.js")
M ($, $$) ->
describe "$()", ->
it "包装值类型", ->
m =
wrap: ->
__option__:
supports: [Number]
$.reg m
$8 = $(8)
$8.should.have.property "wrap", m.wrap
it "包装引用类型", ->
m =
wrap1: ->
__option__:
supports: [Array]
$.reg m
a = []
$a = $(a)
$a.should.have.property "wrap1", m.wrap1
it "should wrap a object if its prototype wrapper registed", ->
m =
wrap2: ->
__option__:
supports: [Array.prototype]
$.reg m
$a = $([])
$a.should.have.property "wrap2", m.wrap2
it "should wrap a object if its __interfaces__ wrapper registed", ->
IA = {}
IB = {}
a =
__interfaces__: [IA]
b =
__interfaces__: [IB]
m =
wrap3: ->
__option__:
supports: [IA,IB]
$.reg m
$a = $(a)
$b = $(b)
$a.should.have.property "wrap3", m.wrap3
$b.should.have.property "wrap3", m.wrap3
it "should not wrap the object which wrapper module not registed", ->
m =
wrap4: ->
__option__:
supports: [Array]
$.reg m
$a = $({})
$a.should.not.have.property "wrap4"
it "should wrap all of objects if that wrapper module registed", ->
m =
wrap5: ->
__option__:
supports: [Array, Number]
$.reg m
$a = $([])
$n = $(8)
$a.should.have.property "wrap5", m.wrap5
$n.should.have.property "wrap5", m.wrap5
it "每个M.$wrapper()都将产生新的实例", ->
$1 = M.$wrapper()
$2 = M.$wrapper()
m1 =
wrap6: ->
__option__:
supports: [Object]
m2 =
wrap6: ->
__option__:
supports: [Object]
$1.$.reg m1
$2.$.reg m2
obj = {}
$o1 = $1.$(obj)
$o2 = $2.$(obj)
$o1.should.have.property "wrap6", m1.wrap6
$o2.should.have.property "wrap6", m2.wrap6
describe "$$()", ->
it "should wrap value type object", ->
m =
wrap7: ->
__option__:
supports: [Number]
$.reg m
$8 = $$(8)
$8.hasOwnProperty("wrap7").should.be.ture
$8.wrap7.should.equal(m.wrap7)
it "should wrap reference type object", ->
m =
wrap8: ->
__option__:
supports: [Array]
$.reg m
a = []
$$(a)
a.should.have.property "wrap8", m.wrap8
describe "$() with methodize", ->
it "should wrap method by methodize", ->
spy = sinon.spy()
m =
wrap9: (self) ->
self
spy: spy
wrapA: ->
this.spy()
__option__:
supports: [Object]
methodize: true
$.reg m
a =
name: "name"
$a = $(a)
$a.wrap9().should.eql a
$a.wrapA()
spy.called.should.be.true
describe "global wrap M.$() M.$$() M.$.reg()", ->
afterEach ->
M.$.clear()
it "$() will using global Wrapper with registed by M.$.reg()", ->
spy = sinon.spy()
StringEx =
twice: spy
M.$.reg StringEx, String, {methodize: true}
$('string').twice()
spy.called.should.be.true
spy.reset()
M.$('string').twice()
spy.called.should.be.true
it "M.$() can't using local Wrapper which registed by $.reg()", ->
StringEx =
twice: ->
$.reg StringEx, String, {methodize: true}
M.$("string").should.not.have.property("twice")
| 76941 | # encoding: utf-8
sinon = require 'sinon'
describe '$.js', ->
M = require("../may.js")
M ($, $$) ->
describe "$()", ->
it "包装值类型", ->
m =
wrap: ->
__option__:
supports: [Number]
$.reg m
$8 = $(8)
$8.should.have.property "wrap", m.wrap
it "包装引用类型", ->
m =
wrap1: ->
__option__:
supports: [Array]
$.reg m
a = []
$a = $(a)
$a.should.have.property "wrap1", m.wrap1
it "should wrap a object if its prototype wrapper registed", ->
m =
wrap2: ->
__option__:
supports: [Array.prototype]
$.reg m
$a = $([])
$a.should.have.property "wrap2", m.wrap2
it "should wrap a object if its __interfaces__ wrapper registed", ->
IA = {}
IB = {}
a =
__interfaces__: [IA]
b =
__interfaces__: [IB]
m =
wrap3: ->
__option__:
supports: [IA,IB]
$.reg m
$a = $(a)
$b = $(b)
$a.should.have.property "wrap3", m.wrap3
$b.should.have.property "wrap3", m.wrap3
it "should not wrap the object which wrapper module not registed", ->
m =
wrap4: ->
__option__:
supports: [Array]
$.reg m
$a = $({})
$a.should.not.have.property "wrap4"
it "should wrap all of objects if that wrapper module registed", ->
m =
wrap5: ->
__option__:
supports: [Array, Number]
$.reg m
$a = $([])
$n = $(8)
$a.should.have.property "wrap5", m.wrap5
$n.should.have.property "wrap5", m.wrap5
it "每个M.$wrapper()都将产生新的实例", ->
$1 = M.$wrapper()
$2 = M.$wrapper()
m1 =
wrap6: ->
__option__:
supports: [Object]
m2 =
wrap6: ->
__option__:
supports: [Object]
$1.$.reg m1
$2.$.reg m2
obj = {}
$o1 = $1.$(obj)
$o2 = $2.$(obj)
$o1.should.have.property "wrap6", m1.wrap6
$o2.should.have.property "wrap6", m2.wrap6
describe "$$()", ->
it "should wrap value type object", ->
m =
wrap7: ->
__option__:
supports: [Number]
$.reg m
$8 = $$(8)
$8.hasOwnProperty("wrap7").should.be.ture
$8.wrap7.should.equal(m.wrap7)
it "should wrap reference type object", ->
m =
wrap8: ->
__option__:
supports: [Array]
$.reg m
a = []
$$(a)
a.should.have.property "wrap8", m.wrap8
describe "$() with methodize", ->
it "should wrap method by methodize", ->
spy = sinon.spy()
m =
wrap9: (self) ->
self
spy: spy
wrapA: ->
this.spy()
__option__:
supports: [Object]
methodize: true
$.reg m
a =
name: "<NAME>"
$a = $(a)
$a.wrap9().should.eql a
$a.wrapA()
spy.called.should.be.true
describe "global wrap M.$() M.$$() M.$.reg()", ->
afterEach ->
M.$.clear()
it "$() will using global Wrapper with registed by M.$.reg()", ->
spy = sinon.spy()
StringEx =
twice: spy
M.$.reg StringEx, String, {methodize: true}
$('string').twice()
spy.called.should.be.true
spy.reset()
M.$('string').twice()
spy.called.should.be.true
it "M.$() can't using local Wrapper which registed by $.reg()", ->
StringEx =
twice: ->
$.reg StringEx, String, {methodize: true}
M.$("string").should.not.have.property("twice")
| true | # encoding: utf-8
sinon = require 'sinon'
describe '$.js', ->
M = require("../may.js")
M ($, $$) ->
describe "$()", ->
it "包装值类型", ->
m =
wrap: ->
__option__:
supports: [Number]
$.reg m
$8 = $(8)
$8.should.have.property "wrap", m.wrap
it "包装引用类型", ->
m =
wrap1: ->
__option__:
supports: [Array]
$.reg m
a = []
$a = $(a)
$a.should.have.property "wrap1", m.wrap1
it "should wrap a object if its prototype wrapper registed", ->
m =
wrap2: ->
__option__:
supports: [Array.prototype]
$.reg m
$a = $([])
$a.should.have.property "wrap2", m.wrap2
it "should wrap a object if its __interfaces__ wrapper registed", ->
IA = {}
IB = {}
a =
__interfaces__: [IA]
b =
__interfaces__: [IB]
m =
wrap3: ->
__option__:
supports: [IA,IB]
$.reg m
$a = $(a)
$b = $(b)
$a.should.have.property "wrap3", m.wrap3
$b.should.have.property "wrap3", m.wrap3
it "should not wrap the object which wrapper module not registed", ->
m =
wrap4: ->
__option__:
supports: [Array]
$.reg m
$a = $({})
$a.should.not.have.property "wrap4"
it "should wrap all of objects if that wrapper module registed", ->
m =
wrap5: ->
__option__:
supports: [Array, Number]
$.reg m
$a = $([])
$n = $(8)
$a.should.have.property "wrap5", m.wrap5
$n.should.have.property "wrap5", m.wrap5
it "每个M.$wrapper()都将产生新的实例", ->
$1 = M.$wrapper()
$2 = M.$wrapper()
m1 =
wrap6: ->
__option__:
supports: [Object]
m2 =
wrap6: ->
__option__:
supports: [Object]
$1.$.reg m1
$2.$.reg m2
obj = {}
$o1 = $1.$(obj)
$o2 = $2.$(obj)
$o1.should.have.property "wrap6", m1.wrap6
$o2.should.have.property "wrap6", m2.wrap6
describe "$$()", ->
it "should wrap value type object", ->
m =
wrap7: ->
__option__:
supports: [Number]
$.reg m
$8 = $$(8)
$8.hasOwnProperty("wrap7").should.be.ture
$8.wrap7.should.equal(m.wrap7)
it "should wrap reference type object", ->
m =
wrap8: ->
__option__:
supports: [Array]
$.reg m
a = []
$$(a)
a.should.have.property "wrap8", m.wrap8
describe "$() with methodize", ->
it "should wrap method by methodize", ->
spy = sinon.spy()
m =
wrap9: (self) ->
self
spy: spy
wrapA: ->
this.spy()
__option__:
supports: [Object]
methodize: true
$.reg m
a =
name: "PI:NAME:<NAME>END_PI"
$a = $(a)
$a.wrap9().should.eql a
$a.wrapA()
spy.called.should.be.true
describe "global wrap M.$() M.$$() M.$.reg()", ->
afterEach ->
M.$.clear()
it "$() will using global Wrapper with registed by M.$.reg()", ->
spy = sinon.spy()
StringEx =
twice: spy
M.$.reg StringEx, String, {methodize: true}
$('string').twice()
spy.called.should.be.true
spy.reset()
M.$('string').twice()
spy.called.should.be.true
it "M.$() can't using local Wrapper which registed by $.reg()", ->
StringEx =
twice: ->
$.reg StringEx, String, {methodize: true}
M.$("string").should.not.have.property("twice")
|
[
{
"context": "lectorAll('.js-reply'), {\n csrfToken: \"foo csrf_token\",\n target: \"/foo/\",\n ",
"end": 1949,
"score": 0.5448184013366699,
"start": 1946,
"tag": "PASSWORD",
"value": "foo"
},
{
"context": "torAll('.js-reply'), {\n csrfToken: \"... | spirit/core/static/spirit/scripts/test/suites/editor_file_upload-spec.coffee | Ke-xueting/Spirit | 974 | describe "editor file upload plugin tests", ->
textarea = null
post = null
editor = null
dialog = null
responseData = null
triggerFakeUpload = (name='foo.doc') ->
inputFileOrg = editor.inputFile
try
editor.inputFile = {files: [{name: name}]}
evt = document.createEvent("HTMLEvents")
evt.initEvent("change", false, true)
inputFileOrg.dispatchEvent(evt)
finally
editor.inputFile = inputFileOrg
beforeEach ->
document.body.innerHTML = """
<form class="js-reply" action=".">
<textarea id="id_comment"></textarea>
<div class="js-box-preview-content" style="display:none;"></div>
<ul>
<li><a class="js-box-bold" href="#" title="Bold"></a></li>
<li><a class="js-box-italic" href="#" title="Italic"></a></li>
<li><a class="js-box-list" href="#" title="List"></a></li>
<li><a class="js-box-url" href="#" title="URL"></a></li>
<li><a class="js-box-image" href="#" title="Image"></a></li>
<li><a class="js-box-file" href="#" title="File"></a></li>
<li><a class="js-box-poll" href="#" title="Poll"></a></li>
<li><a class="js-box-preview" href="#" title="Preview"></a></li>
</ul>
</form>
"""
responseData = {url: '/path/foo'}
post = spyOn(global, 'fetch')
post.and.callFake( -> {
then: (func) ->
data = func({ok: true, json: -> responseData})
return {
then: (func) ->
func(data)
return {
catch: -> return
}
}
})
editor = stModules.editorFileUpload(document.querySelectorAll('.js-reply'), {
csrfToken: "foo csrf_token",
target: "/foo/",
placeholderText: "foo uploading {name}",
allowedFileMedia: ".doc,.docx,.pdf"
})[0]
textarea = document.querySelector('textarea')
# Prevent popup
dialog = spyOn(editor.inputFile, 'click')
dialog.and.callFake( -> return)
it "opens the file choose dialog", ->
dialog.calls.reset()
document.querySelector('.js-box-file').click()
expect(dialog).toHaveBeenCalled()
it "uploads the file", ->
post.calls.reset()
formDataMock = jasmine.createSpyObj('formDataMock', ['append', ])
spyOn(global, "FormData").and.returnValue(formDataMock)
triggerFakeUpload('foo.doc')
expect(post.calls.any()).toEqual(true)
expect(post.calls.argsFor(0)[0]).toEqual('/foo/')
expect(post.calls.argsFor(0)[1].body).toEqual(formDataMock)
expect(formDataMock.append).toHaveBeenCalledWith('csrfmiddlewaretoken', 'foo csrf_token')
expect(formDataMock.append).toHaveBeenCalledWith('file', {name : 'foo.doc'})
it "changes the placeholder on upload success", ->
textarea.value = "foobar"
triggerFakeUpload('foo.doc')
expect(textarea.value).toEqual("foobar[foo.doc](/path/foo)")
it "changes the placeholder on upload error", ->
textarea.value = "foobar"
responseData = {error: {foo: 'foo error'}}
triggerFakeUpload('foo.doc')
expect(textarea.value).toEqual('foobar[{"foo":"foo error"}]()')
it "changes the placeholder on upload failure", ->
post.calls.reset()
textarea.value = "foobar"
post.and.callFake( -> {
then: (func) ->
try
func({ok: false, status: 500, statusText: 'foo error'})
catch err
return {
then: -> {
catch: (func) -> func(err)
}
}
})
log = spyOn(console, 'log')
log.and.callFake( -> )
triggerFakeUpload('foo.doc')
expect(post.calls.any()).toEqual(true)
expect(textarea.value).toEqual("foobar[error: 500 foo error]()")
expect(log.calls.argsFor(0)[0]).toEqual('error: 500 foo error')
it "checks for default media file extensions if none are provided", ->
expect(editor.inputFile.accept).toEqual(".doc,.docx,.pdf")
it "checks for custom media file extensions if they are provided", ->
editor = stModules.editorFileUpload(document.querySelectorAll('.js-reply'), {
allowedFileMedia: ".superdoc"
})[0]
expect(editor.inputFile.accept).toEqual(".superdoc")
it "has correct meta data", ->
expect(editor.meta).toEqual({
fieldName: "file",
tag: "[{text}]({url})",
elm: ".js-box-file"
})
| 48422 | describe "editor file upload plugin tests", ->
textarea = null
post = null
editor = null
dialog = null
responseData = null
triggerFakeUpload = (name='foo.doc') ->
inputFileOrg = editor.inputFile
try
editor.inputFile = {files: [{name: name}]}
evt = document.createEvent("HTMLEvents")
evt.initEvent("change", false, true)
inputFileOrg.dispatchEvent(evt)
finally
editor.inputFile = inputFileOrg
beforeEach ->
document.body.innerHTML = """
<form class="js-reply" action=".">
<textarea id="id_comment"></textarea>
<div class="js-box-preview-content" style="display:none;"></div>
<ul>
<li><a class="js-box-bold" href="#" title="Bold"></a></li>
<li><a class="js-box-italic" href="#" title="Italic"></a></li>
<li><a class="js-box-list" href="#" title="List"></a></li>
<li><a class="js-box-url" href="#" title="URL"></a></li>
<li><a class="js-box-image" href="#" title="Image"></a></li>
<li><a class="js-box-file" href="#" title="File"></a></li>
<li><a class="js-box-poll" href="#" title="Poll"></a></li>
<li><a class="js-box-preview" href="#" title="Preview"></a></li>
</ul>
</form>
"""
responseData = {url: '/path/foo'}
post = spyOn(global, 'fetch')
post.and.callFake( -> {
then: (func) ->
data = func({ok: true, json: -> responseData})
return {
then: (func) ->
func(data)
return {
catch: -> return
}
}
})
editor = stModules.editorFileUpload(document.querySelectorAll('.js-reply'), {
csrfToken: "<PASSWORD> <KEY> <PASSWORD>_<KEY>",
target: "/foo/",
placeholderText: "foo uploading {name}",
allowedFileMedia: ".doc,.docx,.pdf"
})[0]
textarea = document.querySelector('textarea')
# Prevent popup
dialog = spyOn(editor.inputFile, 'click')
dialog.and.callFake( -> return)
it "opens the file choose dialog", ->
dialog.calls.reset()
document.querySelector('.js-box-file').click()
expect(dialog).toHaveBeenCalled()
it "uploads the file", ->
post.calls.reset()
formDataMock = jasmine.createSpyObj('formDataMock', ['append', ])
spyOn(global, "FormData").and.returnValue(formDataMock)
triggerFakeUpload('foo.doc')
expect(post.calls.any()).toEqual(true)
expect(post.calls.argsFor(0)[0]).toEqual('/foo/')
expect(post.calls.argsFor(0)[1].body).toEqual(formDataMock)
expect(formDataMock.append).toHaveBeenCalledWith('csrfmiddlewaretoken', 'foo csrf_token')
expect(formDataMock.append).toHaveBeenCalledWith('file', {name : 'foo.doc'})
it "changes the placeholder on upload success", ->
textarea.value = "foobar"
triggerFakeUpload('foo.doc')
expect(textarea.value).toEqual("foobar[foo.doc](/path/foo)")
it "changes the placeholder on upload error", ->
textarea.value = "foobar"
responseData = {error: {foo: 'foo error'}}
triggerFakeUpload('foo.doc')
expect(textarea.value).toEqual('foobar[{"foo":"foo error"}]()')
it "changes the placeholder on upload failure", ->
post.calls.reset()
textarea.value = "foobar"
post.and.callFake( -> {
then: (func) ->
try
func({ok: false, status: 500, statusText: 'foo error'})
catch err
return {
then: -> {
catch: (func) -> func(err)
}
}
})
log = spyOn(console, 'log')
log.and.callFake( -> )
triggerFakeUpload('foo.doc')
expect(post.calls.any()).toEqual(true)
expect(textarea.value).toEqual("foobar[error: 500 foo error]()")
expect(log.calls.argsFor(0)[0]).toEqual('error: 500 foo error')
it "checks for default media file extensions if none are provided", ->
expect(editor.inputFile.accept).toEqual(".doc,.docx,.pdf")
it "checks for custom media file extensions if they are provided", ->
editor = stModules.editorFileUpload(document.querySelectorAll('.js-reply'), {
allowedFileMedia: ".superdoc"
})[0]
expect(editor.inputFile.accept).toEqual(".superdoc")
it "has correct meta data", ->
expect(editor.meta).toEqual({
fieldName: "file",
tag: "[{text}]({url})",
elm: ".js-box-file"
})
| true | describe "editor file upload plugin tests", ->
textarea = null
post = null
editor = null
dialog = null
responseData = null
triggerFakeUpload = (name='foo.doc') ->
inputFileOrg = editor.inputFile
try
editor.inputFile = {files: [{name: name}]}
evt = document.createEvent("HTMLEvents")
evt.initEvent("change", false, true)
inputFileOrg.dispatchEvent(evt)
finally
editor.inputFile = inputFileOrg
beforeEach ->
document.body.innerHTML = """
<form class="js-reply" action=".">
<textarea id="id_comment"></textarea>
<div class="js-box-preview-content" style="display:none;"></div>
<ul>
<li><a class="js-box-bold" href="#" title="Bold"></a></li>
<li><a class="js-box-italic" href="#" title="Italic"></a></li>
<li><a class="js-box-list" href="#" title="List"></a></li>
<li><a class="js-box-url" href="#" title="URL"></a></li>
<li><a class="js-box-image" href="#" title="Image"></a></li>
<li><a class="js-box-file" href="#" title="File"></a></li>
<li><a class="js-box-poll" href="#" title="Poll"></a></li>
<li><a class="js-box-preview" href="#" title="Preview"></a></li>
</ul>
</form>
"""
responseData = {url: '/path/foo'}
post = spyOn(global, 'fetch')
post.and.callFake( -> {
then: (func) ->
data = func({ok: true, json: -> responseData})
return {
then: (func) ->
func(data)
return {
catch: -> return
}
}
})
editor = stModules.editorFileUpload(document.querySelectorAll('.js-reply'), {
csrfToken: "PI:PASSWORD:<PASSWORD>END_PI PI:KEY:<KEY>END_PI PI:PASSWORD:<PASSWORD>END_PI_PI:KEY:<KEY>END_PI",
target: "/foo/",
placeholderText: "foo uploading {name}",
allowedFileMedia: ".doc,.docx,.pdf"
})[0]
textarea = document.querySelector('textarea')
# Prevent popup
dialog = spyOn(editor.inputFile, 'click')
dialog.and.callFake( -> return)
it "opens the file choose dialog", ->
dialog.calls.reset()
document.querySelector('.js-box-file').click()
expect(dialog).toHaveBeenCalled()
it "uploads the file", ->
post.calls.reset()
formDataMock = jasmine.createSpyObj('formDataMock', ['append', ])
spyOn(global, "FormData").and.returnValue(formDataMock)
triggerFakeUpload('foo.doc')
expect(post.calls.any()).toEqual(true)
expect(post.calls.argsFor(0)[0]).toEqual('/foo/')
expect(post.calls.argsFor(0)[1].body).toEqual(formDataMock)
expect(formDataMock.append).toHaveBeenCalledWith('csrfmiddlewaretoken', 'foo csrf_token')
expect(formDataMock.append).toHaveBeenCalledWith('file', {name : 'foo.doc'})
it "changes the placeholder on upload success", ->
textarea.value = "foobar"
triggerFakeUpload('foo.doc')
expect(textarea.value).toEqual("foobar[foo.doc](/path/foo)")
it "changes the placeholder on upload error", ->
textarea.value = "foobar"
responseData = {error: {foo: 'foo error'}}
triggerFakeUpload('foo.doc')
expect(textarea.value).toEqual('foobar[{"foo":"foo error"}]()')
it "changes the placeholder on upload failure", ->
post.calls.reset()
textarea.value = "foobar"
post.and.callFake( -> {
then: (func) ->
try
func({ok: false, status: 500, statusText: 'foo error'})
catch err
return {
then: -> {
catch: (func) -> func(err)
}
}
})
log = spyOn(console, 'log')
log.and.callFake( -> )
triggerFakeUpload('foo.doc')
expect(post.calls.any()).toEqual(true)
expect(textarea.value).toEqual("foobar[error: 500 foo error]()")
expect(log.calls.argsFor(0)[0]).toEqual('error: 500 foo error')
it "checks for default media file extensions if none are provided", ->
expect(editor.inputFile.accept).toEqual(".doc,.docx,.pdf")
it "checks for custom media file extensions if they are provided", ->
editor = stModules.editorFileUpload(document.querySelectorAll('.js-reply'), {
allowedFileMedia: ".superdoc"
})[0]
expect(editor.inputFile.accept).toEqual(".superdoc")
it "has correct meta data", ->
expect(editor.meta).toEqual({
fieldName: "file",
tag: "[{text}]({url})",
elm: ".js-box-file"
})
|
[
{
"context": "or name and message', ->\n message = 'random message'\n error = new TypeError message\n ob",
"end": 8092,
"score": 0.6664804220199585,
"start": 8085,
"tag": "PASSWORD",
"value": "message"
}
] | packages/core/neft-core/src/util/index.test.coffee | Neft-io/neftio | 23 | 'use strict'
util = require './'
describe 'is()', ->
it 'returns true if the given values are the same', ->
assert.is util.is('a', 'a'), true
assert.is util.is(1, 1), true
assert.is util.is(undefined, undefined), true
assert.is util.is(null, null), true
assert.is util.is(true, true), true
assert.is util.is(obj = {}, obj), true
assert.is util.is((func = ->), func), true
it 'returns false if the given values are different', ->
assert.is util.is('a', 'b'), false
assert.is util.is(1, 2), false
assert.is util.is(null, undefined), false
assert.is util.is(false, true), false
assert.is util.is({}, {}), false
assert.is util.is((->), ->), false
it 'returns true if two NaNs have been given', ->
assert.is util.is(NaN, NaN), true
it 'returns false for negative zero and positive zero comparison', ->
assert.is util.is(-0, 0), false
return
describe 'isFloat()', ->
it 'returns true for finite numbers', ->
assert.is util.isFloat(10), true
assert.is util.isFloat(10.5), true
assert.is util.isFloat(-23.12), true
assert.is util.isFloat(0), true
it 'returns false for not infinite numbers', ->
assert.is util.isFloat(NaN), false
assert.is util.isFloat(Infinity), false
assert.is util.isFloat(-Infinity), false
it 'returns false for not numbers type', ->
assert.is util.isFloat(null), false
assert.is util.isFloat(true), false
assert.is util.isFloat(undefined), false
assert.is util.isFloat(->), false
assert.is util.isFloat({}), false
assert.is util.isFloat(Object(4)), false
assert.is util.isFloat('4'), false
return
describe 'isInteger()', ->
it 'returns true for finite numbers with no exponent', ->
assert.is util.isInteger(3), true
assert.is util.isInteger(-5), true
assert.is util.isInteger(0), true
it 'returns false for not finite numbers', ->
assert.is util.isInteger(NaN), false
assert.is util.isInteger(Infinity), false
assert.is util.isInteger(-Infinity), false
it 'returns false for numbers with exponent', ->
assert.is util.isInteger(3.2), false
assert.is util.isInteger(-5.1), false
assert.is util.isInteger(0.3), false
it 'returns false for not numbers type', ->
assert.is util.isInteger(null), false
assert.is util.isInteger(true), false
assert.is util.isInteger(undefined), false
assert.is util.isInteger(->), false
assert.is util.isInteger({}), false
assert.is util.isInteger(Object(4)), false
assert.is util.isInteger('4'), false
return
describe 'isPrimitive()', ->
it 'returns true for a null, string, number boolean or an undefined', ->
assert.is util.isPrimitive(null), true
assert.is util.isPrimitive('a'), true
assert.is util.isPrimitive(12.2), true
assert.is util.isPrimitive(true), true
assert.is util.isPrimitive(undefined), true
it 'returns false for other types', ->
assert.is util.isPrimitive({}), false
assert.is util.isPrimitive([]), false
assert.is util.isPrimitive(->), false
assert.is util.isPrimitive(Object(4)), false
return
describe 'isObject()', ->
it 'returns true if the given value is an array of an object', ->
assert.is util.isObject([]), true
assert.is util.isObject({}), true
assert.is util.isObject(Object.create(null)), true
assert.is util.isObject(Object.create({a: 1})), true
it 'returns false for the given null', ->
assert.is util.isObject(null), false
it 'returns false for types different than object', ->
assert.is util.isObject(->), false
assert.is util.isObject(false), false
assert.is util.isObject(undefined), false
assert.is util.isObject('a'), false
return
describe 'isPlainObject()', ->
it 'returns true for the given object with no prototype', ->
assert.is util.isPlainObject(Object.create(null)), true
it 'returns true for the given object with standard prototype', ->
assert.is util.isPlainObject({}), true
assert.is util.isPlainObject(Object.create(Object.prototype)), true
it 'returns false for object with custom prototype', ->
class A
assert.is util.isPlainObject(Object.create({a: 1})), false
assert.is util.isPlainObject(new A), false
it 'returns false for the given null', ->
assert.is util.isPlainObject(null), false
it 'returns false for types different than object', ->
assert.is util.isPlainObject(->), false
assert.is util.isPlainObject(false), false
assert.is util.isPlainObject(undefined), false
assert.is util.isPlainObject('a'), false
return
describe 'isArguments()', ->
# TODO
describe 'merge()', ->
# TODO
describe 'mergeAll()', ->
# TODO
describe 'mergeDeep()', ->
it 'merges objects deeply', ->
source = {a: {aa: '', ac: 0}, c: 4}
obj = {a: {aa: 1, ab: 2}, b: 3}
expected = {a: {aa: 1, ab: 2, ac: 0}, b: 3, c: 4}
assert.isEqual util.mergeDeep(source, obj), expected
it 'merges lists', ->
source = [1, 2]
obj = [3]
expected = [1, 2, 3]
assert.isEqual util.mergeDeep(source, obj), expected
it 'merges lists deeply', ->
source = {a: [1, 2]}
obj = {a: [3]}
expected = {a: [1, 2, 3]}
assert.isEqual util.mergeDeep(source, obj), expected
return
describe 'fill()', ->
# TODO
describe 'remove()', ->
# TODO
describe 'removeFromUnorderedArray()', ->
# TODO
describe 'getPropertyDescriptor()', ->
# TODO
describe 'lookupGetter()', ->
# TODO
describe 'lookupSetter()', ->
# TODO
describe 'defineProperty()', ->
# TODO
describe 'overrideProperty()', ->
# TODO
describe 'clone()', ->
# TODO
describe 'cloneDeep()', ->
# TODO
describe 'isEmpty()', ->
# TODO
describe 'last()', ->
# TODO
describe 'clear()', ->
# TODO
describe 'setPrototypeOf()', ->
# TODO
describe 'has()', ->
# TODO
describe 'objectToArray()', ->
# TODO
describe 'arrayToObject()', ->
# TODO
describe 'capitalize()', ->
it 'uppercases first character', ->
assert.is util.capitalize('abc'), 'Abc'
assert.is util.capitalize('abc_def'), 'Abc_def'
assert.is util.capitalize('Abc_def'), 'Abc_def'
assert.is util.capitalize('1abc'), '1abc'
assert.is util.capitalize(''), ''
return
describe 'addSlashes()', ->
# TODO
describe 'uid()', ->
# TODO
describe 'tryFunction()', ->
# TODO
describe 'catchError()', ->
# TODO
describe 'bindFunctionContext()', ->
it 'returns bound function', ->
ctx = null
args = null
handleFunc = (localCtx, localArgs...) ->
ctx = localCtx
args = localArgs
boundCtx = a: 2
wrongCtx = wrong: 1
test = (func) ->
funcArgs = []
for i in [0...func.length] by 1
funcArgs.push i
util.bindFunctionContext(func, boundCtx).apply wrongCtx, funcArgs
assert.is ctx, boundCtx
assert.isEqual args, funcArgs
test ((a) -> handleFunc(@, arguments...))
test ((a, b) -> handleFunc(@, arguments...))
test ((a, b, c) -> handleFunc(@, arguments...))
test ((a, b, c, d) -> handleFunc(@, arguments...))
test ((a, b, c, d, e) -> handleFunc(@, arguments...))
test ((a, b, c, d, e, f) -> handleFunc(@, arguments...))
test ((a, b, c, d, e, f, g) -> handleFunc(@, arguments...))
test ((a, b, c, d, e, f, g, h) -> handleFunc(@, arguments...))
test ((a, b, c, d, e, f, g, h, i) -> handleFunc(@, arguments...))
return
describe 'errorToObject()', ->
it 'returns error name and message', ->
message = 'random message'
error = new TypeError message
object = util.errorToObject error
assert.is object.name, 'TypeError'
assert.is object.message, message
it 'merges custom error properties', ->
error = new Error
error.prop = 'a'
object = util.errorToObject error
assert.is object.prop, 'a'
it 'returned object can be stringified', ->
error = new TypeError 'error message'
error.custom = 2
object = util.errorToObject error
json = JSON.parse JSON.stringify object
# line and column are optional
delete json.line
delete json.column
assert.isEqual json,
name: 'TypeError'
message: 'error message'
custom: 2
return
describe 'getOwnProperties()', ->
# TODO
describe 'isEqual()', ->
it 'returns proper value of two objects given', ->
assert.is util.isEqual({a: 1}, {a: 1}), true
assert.is util.isEqual({a: 1}, {b: 1}), false
assert.is util.isEqual({a: 1}, {a: 2}), false
it 'returns proper value of two arrays given', ->
assert.is util.isEqual([1, 2], [1, 2]), true
assert.is util.isEqual([1, 2], [1]), false
assert.is util.isEqual([2, 1], [1, 2]), false
it 'test objects deeply', ->
assert.is util.isEqual({a: [{b: 1}]}, {a: [{b: 1}]}), true
assert.is util.isEqual({a: [{b: 1}]}, {a: [{b: 2}]}), false
it 'compareFunction is used to test primitive values', ->
funcArgs = []
compareFunction = (args...) -> funcArgs.push args
util.isEqual({a: [{b: 1}]}, {a: [{b: 2}]}, compareFunction)
expected = [[1, 2]]
assert.is JSON.stringify(funcArgs), JSON.stringify(expected)
it 'maxDeep specifies how deep objects should be tested', ->
assert.is util.isEqual({a: [{b: 1}]}, {a: [{b: 2}]}, 2), true
assert.is util.isEqual({a: [{b: 1}]}, {a: [{b: 2}]}, 3), false
return
describe 'snakeToCamel()', ->
it 'returns given string as camel case', ->
assert.is util.snakeToCamel('ab_cd_23_efg'), 'abCd23Efg'
return
describe 'kebabToCamel()', ->
it 'returns given string as camel case', ->
assert.is util.kebabToCamel('ab-cd-23-efg'), 'abCd23Efg'
return
describe 'camelToKebab()', ->
it 'returns given string as kebab case', ->
assert.is util.camelToKebab('abCd23EfgAbc'), 'ab-cd23efg-abc'
return
| 214455 | 'use strict'
util = require './'
describe 'is()', ->
it 'returns true if the given values are the same', ->
assert.is util.is('a', 'a'), true
assert.is util.is(1, 1), true
assert.is util.is(undefined, undefined), true
assert.is util.is(null, null), true
assert.is util.is(true, true), true
assert.is util.is(obj = {}, obj), true
assert.is util.is((func = ->), func), true
it 'returns false if the given values are different', ->
assert.is util.is('a', 'b'), false
assert.is util.is(1, 2), false
assert.is util.is(null, undefined), false
assert.is util.is(false, true), false
assert.is util.is({}, {}), false
assert.is util.is((->), ->), false
it 'returns true if two NaNs have been given', ->
assert.is util.is(NaN, NaN), true
it 'returns false for negative zero and positive zero comparison', ->
assert.is util.is(-0, 0), false
return
describe 'isFloat()', ->
it 'returns true for finite numbers', ->
assert.is util.isFloat(10), true
assert.is util.isFloat(10.5), true
assert.is util.isFloat(-23.12), true
assert.is util.isFloat(0), true
it 'returns false for not infinite numbers', ->
assert.is util.isFloat(NaN), false
assert.is util.isFloat(Infinity), false
assert.is util.isFloat(-Infinity), false
it 'returns false for not numbers type', ->
assert.is util.isFloat(null), false
assert.is util.isFloat(true), false
assert.is util.isFloat(undefined), false
assert.is util.isFloat(->), false
assert.is util.isFloat({}), false
assert.is util.isFloat(Object(4)), false
assert.is util.isFloat('4'), false
return
describe 'isInteger()', ->
it 'returns true for finite numbers with no exponent', ->
assert.is util.isInteger(3), true
assert.is util.isInteger(-5), true
assert.is util.isInteger(0), true
it 'returns false for not finite numbers', ->
assert.is util.isInteger(NaN), false
assert.is util.isInteger(Infinity), false
assert.is util.isInteger(-Infinity), false
it 'returns false for numbers with exponent', ->
assert.is util.isInteger(3.2), false
assert.is util.isInteger(-5.1), false
assert.is util.isInteger(0.3), false
it 'returns false for not numbers type', ->
assert.is util.isInteger(null), false
assert.is util.isInteger(true), false
assert.is util.isInteger(undefined), false
assert.is util.isInteger(->), false
assert.is util.isInteger({}), false
assert.is util.isInteger(Object(4)), false
assert.is util.isInteger('4'), false
return
describe 'isPrimitive()', ->
it 'returns true for a null, string, number boolean or an undefined', ->
assert.is util.isPrimitive(null), true
assert.is util.isPrimitive('a'), true
assert.is util.isPrimitive(12.2), true
assert.is util.isPrimitive(true), true
assert.is util.isPrimitive(undefined), true
it 'returns false for other types', ->
assert.is util.isPrimitive({}), false
assert.is util.isPrimitive([]), false
assert.is util.isPrimitive(->), false
assert.is util.isPrimitive(Object(4)), false
return
describe 'isObject()', ->
it 'returns true if the given value is an array of an object', ->
assert.is util.isObject([]), true
assert.is util.isObject({}), true
assert.is util.isObject(Object.create(null)), true
assert.is util.isObject(Object.create({a: 1})), true
it 'returns false for the given null', ->
assert.is util.isObject(null), false
it 'returns false for types different than object', ->
assert.is util.isObject(->), false
assert.is util.isObject(false), false
assert.is util.isObject(undefined), false
assert.is util.isObject('a'), false
return
describe 'isPlainObject()', ->
it 'returns true for the given object with no prototype', ->
assert.is util.isPlainObject(Object.create(null)), true
it 'returns true for the given object with standard prototype', ->
assert.is util.isPlainObject({}), true
assert.is util.isPlainObject(Object.create(Object.prototype)), true
it 'returns false for object with custom prototype', ->
class A
assert.is util.isPlainObject(Object.create({a: 1})), false
assert.is util.isPlainObject(new A), false
it 'returns false for the given null', ->
assert.is util.isPlainObject(null), false
it 'returns false for types different than object', ->
assert.is util.isPlainObject(->), false
assert.is util.isPlainObject(false), false
assert.is util.isPlainObject(undefined), false
assert.is util.isPlainObject('a'), false
return
describe 'isArguments()', ->
# TODO
describe 'merge()', ->
# TODO
describe 'mergeAll()', ->
# TODO
describe 'mergeDeep()', ->
it 'merges objects deeply', ->
source = {a: {aa: '', ac: 0}, c: 4}
obj = {a: {aa: 1, ab: 2}, b: 3}
expected = {a: {aa: 1, ab: 2, ac: 0}, b: 3, c: 4}
assert.isEqual util.mergeDeep(source, obj), expected
it 'merges lists', ->
source = [1, 2]
obj = [3]
expected = [1, 2, 3]
assert.isEqual util.mergeDeep(source, obj), expected
it 'merges lists deeply', ->
source = {a: [1, 2]}
obj = {a: [3]}
expected = {a: [1, 2, 3]}
assert.isEqual util.mergeDeep(source, obj), expected
return
describe 'fill()', ->
# TODO
describe 'remove()', ->
# TODO
describe 'removeFromUnorderedArray()', ->
# TODO
describe 'getPropertyDescriptor()', ->
# TODO
describe 'lookupGetter()', ->
# TODO
describe 'lookupSetter()', ->
# TODO
describe 'defineProperty()', ->
# TODO
describe 'overrideProperty()', ->
# TODO
describe 'clone()', ->
# TODO
describe 'cloneDeep()', ->
# TODO
describe 'isEmpty()', ->
# TODO
describe 'last()', ->
# TODO
describe 'clear()', ->
# TODO
describe 'setPrototypeOf()', ->
# TODO
describe 'has()', ->
# TODO
describe 'objectToArray()', ->
# TODO
describe 'arrayToObject()', ->
# TODO
describe 'capitalize()', ->
it 'uppercases first character', ->
assert.is util.capitalize('abc'), 'Abc'
assert.is util.capitalize('abc_def'), 'Abc_def'
assert.is util.capitalize('Abc_def'), 'Abc_def'
assert.is util.capitalize('1abc'), '1abc'
assert.is util.capitalize(''), ''
return
describe 'addSlashes()', ->
# TODO
describe 'uid()', ->
# TODO
describe 'tryFunction()', ->
# TODO
describe 'catchError()', ->
# TODO
describe 'bindFunctionContext()', ->
it 'returns bound function', ->
ctx = null
args = null
handleFunc = (localCtx, localArgs...) ->
ctx = localCtx
args = localArgs
boundCtx = a: 2
wrongCtx = wrong: 1
test = (func) ->
funcArgs = []
for i in [0...func.length] by 1
funcArgs.push i
util.bindFunctionContext(func, boundCtx).apply wrongCtx, funcArgs
assert.is ctx, boundCtx
assert.isEqual args, funcArgs
test ((a) -> handleFunc(@, arguments...))
test ((a, b) -> handleFunc(@, arguments...))
test ((a, b, c) -> handleFunc(@, arguments...))
test ((a, b, c, d) -> handleFunc(@, arguments...))
test ((a, b, c, d, e) -> handleFunc(@, arguments...))
test ((a, b, c, d, e, f) -> handleFunc(@, arguments...))
test ((a, b, c, d, e, f, g) -> handleFunc(@, arguments...))
test ((a, b, c, d, e, f, g, h) -> handleFunc(@, arguments...))
test ((a, b, c, d, e, f, g, h, i) -> handleFunc(@, arguments...))
return
describe 'errorToObject()', ->
it 'returns error name and message', ->
message = 'random <PASSWORD>'
error = new TypeError message
object = util.errorToObject error
assert.is object.name, 'TypeError'
assert.is object.message, message
it 'merges custom error properties', ->
error = new Error
error.prop = 'a'
object = util.errorToObject error
assert.is object.prop, 'a'
it 'returned object can be stringified', ->
error = new TypeError 'error message'
error.custom = 2
object = util.errorToObject error
json = JSON.parse JSON.stringify object
# line and column are optional
delete json.line
delete json.column
assert.isEqual json,
name: 'TypeError'
message: 'error message'
custom: 2
return
describe 'getOwnProperties()', ->
# TODO
describe 'isEqual()', ->
it 'returns proper value of two objects given', ->
assert.is util.isEqual({a: 1}, {a: 1}), true
assert.is util.isEqual({a: 1}, {b: 1}), false
assert.is util.isEqual({a: 1}, {a: 2}), false
it 'returns proper value of two arrays given', ->
assert.is util.isEqual([1, 2], [1, 2]), true
assert.is util.isEqual([1, 2], [1]), false
assert.is util.isEqual([2, 1], [1, 2]), false
it 'test objects deeply', ->
assert.is util.isEqual({a: [{b: 1}]}, {a: [{b: 1}]}), true
assert.is util.isEqual({a: [{b: 1}]}, {a: [{b: 2}]}), false
it 'compareFunction is used to test primitive values', ->
funcArgs = []
compareFunction = (args...) -> funcArgs.push args
util.isEqual({a: [{b: 1}]}, {a: [{b: 2}]}, compareFunction)
expected = [[1, 2]]
assert.is JSON.stringify(funcArgs), JSON.stringify(expected)
it 'maxDeep specifies how deep objects should be tested', ->
assert.is util.isEqual({a: [{b: 1}]}, {a: [{b: 2}]}, 2), true
assert.is util.isEqual({a: [{b: 1}]}, {a: [{b: 2}]}, 3), false
return
describe 'snakeToCamel()', ->
it 'returns given string as camel case', ->
assert.is util.snakeToCamel('ab_cd_23_efg'), 'abCd23Efg'
return
describe 'kebabToCamel()', ->
it 'returns given string as camel case', ->
assert.is util.kebabToCamel('ab-cd-23-efg'), 'abCd23Efg'
return
describe 'camelToKebab()', ->
it 'returns given string as kebab case', ->
assert.is util.camelToKebab('abCd23EfgAbc'), 'ab-cd23efg-abc'
return
| true | 'use strict'
util = require './'
describe 'is()', ->
it 'returns true if the given values are the same', ->
assert.is util.is('a', 'a'), true
assert.is util.is(1, 1), true
assert.is util.is(undefined, undefined), true
assert.is util.is(null, null), true
assert.is util.is(true, true), true
assert.is util.is(obj = {}, obj), true
assert.is util.is((func = ->), func), true
it 'returns false if the given values are different', ->
assert.is util.is('a', 'b'), false
assert.is util.is(1, 2), false
assert.is util.is(null, undefined), false
assert.is util.is(false, true), false
assert.is util.is({}, {}), false
assert.is util.is((->), ->), false
it 'returns true if two NaNs have been given', ->
assert.is util.is(NaN, NaN), true
it 'returns false for negative zero and positive zero comparison', ->
assert.is util.is(-0, 0), false
return
describe 'isFloat()', ->
it 'returns true for finite numbers', ->
assert.is util.isFloat(10), true
assert.is util.isFloat(10.5), true
assert.is util.isFloat(-23.12), true
assert.is util.isFloat(0), true
it 'returns false for not infinite numbers', ->
assert.is util.isFloat(NaN), false
assert.is util.isFloat(Infinity), false
assert.is util.isFloat(-Infinity), false
it 'returns false for not numbers type', ->
assert.is util.isFloat(null), false
assert.is util.isFloat(true), false
assert.is util.isFloat(undefined), false
assert.is util.isFloat(->), false
assert.is util.isFloat({}), false
assert.is util.isFloat(Object(4)), false
assert.is util.isFloat('4'), false
return
describe 'isInteger()', ->
it 'returns true for finite numbers with no exponent', ->
assert.is util.isInteger(3), true
assert.is util.isInteger(-5), true
assert.is util.isInteger(0), true
it 'returns false for not finite numbers', ->
assert.is util.isInteger(NaN), false
assert.is util.isInteger(Infinity), false
assert.is util.isInteger(-Infinity), false
it 'returns false for numbers with exponent', ->
assert.is util.isInteger(3.2), false
assert.is util.isInteger(-5.1), false
assert.is util.isInteger(0.3), false
it 'returns false for not numbers type', ->
assert.is util.isInteger(null), false
assert.is util.isInteger(true), false
assert.is util.isInteger(undefined), false
assert.is util.isInteger(->), false
assert.is util.isInteger({}), false
assert.is util.isInteger(Object(4)), false
assert.is util.isInteger('4'), false
return
describe 'isPrimitive()', ->
it 'returns true for a null, string, number boolean or an undefined', ->
assert.is util.isPrimitive(null), true
assert.is util.isPrimitive('a'), true
assert.is util.isPrimitive(12.2), true
assert.is util.isPrimitive(true), true
assert.is util.isPrimitive(undefined), true
it 'returns false for other types', ->
assert.is util.isPrimitive({}), false
assert.is util.isPrimitive([]), false
assert.is util.isPrimitive(->), false
assert.is util.isPrimitive(Object(4)), false
return
describe 'isObject()', ->
it 'returns true if the given value is an array of an object', ->
assert.is util.isObject([]), true
assert.is util.isObject({}), true
assert.is util.isObject(Object.create(null)), true
assert.is util.isObject(Object.create({a: 1})), true
it 'returns false for the given null', ->
assert.is util.isObject(null), false
it 'returns false for types different than object', ->
assert.is util.isObject(->), false
assert.is util.isObject(false), false
assert.is util.isObject(undefined), false
assert.is util.isObject('a'), false
return
describe 'isPlainObject()', ->
it 'returns true for the given object with no prototype', ->
assert.is util.isPlainObject(Object.create(null)), true
it 'returns true for the given object with standard prototype', ->
assert.is util.isPlainObject({}), true
assert.is util.isPlainObject(Object.create(Object.prototype)), true
it 'returns false for object with custom prototype', ->
class A
assert.is util.isPlainObject(Object.create({a: 1})), false
assert.is util.isPlainObject(new A), false
it 'returns false for the given null', ->
assert.is util.isPlainObject(null), false
it 'returns false for types different than object', ->
assert.is util.isPlainObject(->), false
assert.is util.isPlainObject(false), false
assert.is util.isPlainObject(undefined), false
assert.is util.isPlainObject('a'), false
return
describe 'isArguments()', ->
# TODO
describe 'merge()', ->
# TODO
describe 'mergeAll()', ->
# TODO
describe 'mergeDeep()', ->
it 'merges objects deeply', ->
source = {a: {aa: '', ac: 0}, c: 4}
obj = {a: {aa: 1, ab: 2}, b: 3}
expected = {a: {aa: 1, ab: 2, ac: 0}, b: 3, c: 4}
assert.isEqual util.mergeDeep(source, obj), expected
it 'merges lists', ->
source = [1, 2]
obj = [3]
expected = [1, 2, 3]
assert.isEqual util.mergeDeep(source, obj), expected
it 'merges lists deeply', ->
source = {a: [1, 2]}
obj = {a: [3]}
expected = {a: [1, 2, 3]}
assert.isEqual util.mergeDeep(source, obj), expected
return
describe 'fill()', ->
# TODO
describe 'remove()', ->
# TODO
describe 'removeFromUnorderedArray()', ->
# TODO
describe 'getPropertyDescriptor()', ->
# TODO
describe 'lookupGetter()', ->
# TODO
describe 'lookupSetter()', ->
# TODO
describe 'defineProperty()', ->
# TODO
describe 'overrideProperty()', ->
# TODO
describe 'clone()', ->
# TODO
describe 'cloneDeep()', ->
# TODO
describe 'isEmpty()', ->
# TODO
describe 'last()', ->
# TODO
describe 'clear()', ->
# TODO
describe 'setPrototypeOf()', ->
# TODO
describe 'has()', ->
# TODO
describe 'objectToArray()', ->
# TODO
describe 'arrayToObject()', ->
# TODO
describe 'capitalize()', ->
it 'uppercases first character', ->
assert.is util.capitalize('abc'), 'Abc'
assert.is util.capitalize('abc_def'), 'Abc_def'
assert.is util.capitalize('Abc_def'), 'Abc_def'
assert.is util.capitalize('1abc'), '1abc'
assert.is util.capitalize(''), ''
return
describe 'addSlashes()', ->
# TODO
describe 'uid()', ->
# TODO
describe 'tryFunction()', ->
# TODO
describe 'catchError()', ->
# TODO
describe 'bindFunctionContext()', ->
it 'returns bound function', ->
ctx = null
args = null
handleFunc = (localCtx, localArgs...) ->
ctx = localCtx
args = localArgs
boundCtx = a: 2
wrongCtx = wrong: 1
test = (func) ->
funcArgs = []
for i in [0...func.length] by 1
funcArgs.push i
util.bindFunctionContext(func, boundCtx).apply wrongCtx, funcArgs
assert.is ctx, boundCtx
assert.isEqual args, funcArgs
test ((a) -> handleFunc(@, arguments...))
test ((a, b) -> handleFunc(@, arguments...))
test ((a, b, c) -> handleFunc(@, arguments...))
test ((a, b, c, d) -> handleFunc(@, arguments...))
test ((a, b, c, d, e) -> handleFunc(@, arguments...))
test ((a, b, c, d, e, f) -> handleFunc(@, arguments...))
test ((a, b, c, d, e, f, g) -> handleFunc(@, arguments...))
test ((a, b, c, d, e, f, g, h) -> handleFunc(@, arguments...))
test ((a, b, c, d, e, f, g, h, i) -> handleFunc(@, arguments...))
return
describe 'errorToObject()', ->
it 'returns error name and message', ->
message = 'random PI:PASSWORD:<PASSWORD>END_PI'
error = new TypeError message
object = util.errorToObject error
assert.is object.name, 'TypeError'
assert.is object.message, message
it 'merges custom error properties', ->
error = new Error
error.prop = 'a'
object = util.errorToObject error
assert.is object.prop, 'a'
it 'returned object can be stringified', ->
error = new TypeError 'error message'
error.custom = 2
object = util.errorToObject error
json = JSON.parse JSON.stringify object
# line and column are optional
delete json.line
delete json.column
assert.isEqual json,
name: 'TypeError'
message: 'error message'
custom: 2
return
describe 'getOwnProperties()', ->
# TODO
describe 'isEqual()', ->
it 'returns proper value of two objects given', ->
assert.is util.isEqual({a: 1}, {a: 1}), true
assert.is util.isEqual({a: 1}, {b: 1}), false
assert.is util.isEqual({a: 1}, {a: 2}), false
it 'returns proper value of two arrays given', ->
assert.is util.isEqual([1, 2], [1, 2]), true
assert.is util.isEqual([1, 2], [1]), false
assert.is util.isEqual([2, 1], [1, 2]), false
it 'test objects deeply', ->
assert.is util.isEqual({a: [{b: 1}]}, {a: [{b: 1}]}), true
assert.is util.isEqual({a: [{b: 1}]}, {a: [{b: 2}]}), false
it 'compareFunction is used to test primitive values', ->
funcArgs = []
compareFunction = (args...) -> funcArgs.push args
util.isEqual({a: [{b: 1}]}, {a: [{b: 2}]}, compareFunction)
expected = [[1, 2]]
assert.is JSON.stringify(funcArgs), JSON.stringify(expected)
it 'maxDeep specifies how deep objects should be tested', ->
assert.is util.isEqual({a: [{b: 1}]}, {a: [{b: 2}]}, 2), true
assert.is util.isEqual({a: [{b: 1}]}, {a: [{b: 2}]}, 3), false
return
describe 'snakeToCamel()', ->
it 'returns given string as camel case', ->
assert.is util.snakeToCamel('ab_cd_23_efg'), 'abCd23Efg'
return
describe 'kebabToCamel()', ->
it 'returns given string as camel case', ->
assert.is util.kebabToCamel('ab-cd-23-efg'), 'abCd23Efg'
return
describe 'camelToKebab()', ->
it 'returns given string as kebab case', ->
assert.is util.camelToKebab('abCd23EfgAbc'), 'ab-cd23efg-abc'
return
|
[
{
"context": "ask\", ->\n # Given\n newTask = name: \"New task\", done: false\n\n # When\n ctrl.addTas",
"end": 1779,
"score": 0.8579176664352417,
"start": 1771,
"tag": "NAME",
"value": "New task"
}
] | client/test/unit/controllers/tasks_ctrl_spec.coffee | lucassus/angular-coffee-seed | 2 | describe "Controller `TasksCtrl`", ->
beforeEach module "myApp"
ctrl = null
$scope = null
# Initialize the controller and a mock scope
beforeEach inject ($rootScope, $controller) ->
$scope = $rootScope.$new()
ctrl = $controller "TasksCtrl", $scope: $scope
it "assigns tasks", ->
expect(ctrl.tasks).to.not.be.undefined
expect(ctrl.tasks.length).to.equal 3
describe "#archive()", ->
beforeEach ->
ctrl.tasks = [
{ done: false }, { done: true }, { done: true }
]
it "removes completed task from the list", ->
expect(ctrl.tasks.length).to.equal 3
ctrl.archive()
expect(ctrl.tasks.length).to.equal 1
describe "#tasksCount()", ->
it "returns the number of all tasks", ->
ctrl.tasks = [{}, {}, {}]
expect(ctrl.tasksCount()).to.equal 3
describe "#remainingTasksCount()", ->
describe "when task list is empty" ,->
beforeEach -> ctrl.tasks = []
it "returns 0", ->
expect(ctrl.remainingTasksCount()).to.equal 0
describe "when task list contains some uncompleted tasks", ->
beforeEach ->
ctrl.tasks = [
{ done: false }, { done: false }, { done: true }
]
it "returns > 0", ->
expect(ctrl.remainingTasksCount()).to.equal 2
describe "when all tasks are completed", ->
beforeEach ->
ctrl.tasks = [
{ done: true }, { done: true }, { done: true }
]
it "returns 0", ->
expect(ctrl.remainingTasksCount()).to.equal 0
describe "#addTask()", ->
describe "when the form is valid", ->
beforeEach ->
$scope.taskForm = $valid: true
sinon.stub(ctrl, "reset")
it "adds a new task", ->
# Given
newTask = name: "New task", done: false
# When
ctrl.addTask(newTask)
# Then
expect(ctrl.tasks.length).to.equal 4
lastTask = ctrl.tasks[3]
expect(lastTask).to.not.be.undefined
expect(lastTask.name).to.equal newTask.name
expect(lastTask.done).to.equal newTask.done
it "resets the form", ->
# When
ctrl.addTask({})
# Then
expect(ctrl.reset).to.be.called
describe "when the form is not valid", ->
beforeEach -> $scope.taskForm = $valid: false
it "does nothing", ->
# When
ctrl.addTask({})
# Then
expect(ctrl.tasks.length).to.equal 3
it "does not reset the form", ->
# Given
mock = sinon.mock(ctrl).expects("reset").never()
# When
ctrl.addTask({})
# Then
expect(mock).to.not.be.called
mock.verify()
| 105851 | describe "Controller `TasksCtrl`", ->
beforeEach module "myApp"
ctrl = null
$scope = null
# Initialize the controller and a mock scope
beforeEach inject ($rootScope, $controller) ->
$scope = $rootScope.$new()
ctrl = $controller "TasksCtrl", $scope: $scope
it "assigns tasks", ->
expect(ctrl.tasks).to.not.be.undefined
expect(ctrl.tasks.length).to.equal 3
describe "#archive()", ->
beforeEach ->
ctrl.tasks = [
{ done: false }, { done: true }, { done: true }
]
it "removes completed task from the list", ->
expect(ctrl.tasks.length).to.equal 3
ctrl.archive()
expect(ctrl.tasks.length).to.equal 1
describe "#tasksCount()", ->
it "returns the number of all tasks", ->
ctrl.tasks = [{}, {}, {}]
expect(ctrl.tasksCount()).to.equal 3
describe "#remainingTasksCount()", ->
describe "when task list is empty" ,->
beforeEach -> ctrl.tasks = []
it "returns 0", ->
expect(ctrl.remainingTasksCount()).to.equal 0
describe "when task list contains some uncompleted tasks", ->
beforeEach ->
ctrl.tasks = [
{ done: false }, { done: false }, { done: true }
]
it "returns > 0", ->
expect(ctrl.remainingTasksCount()).to.equal 2
describe "when all tasks are completed", ->
beforeEach ->
ctrl.tasks = [
{ done: true }, { done: true }, { done: true }
]
it "returns 0", ->
expect(ctrl.remainingTasksCount()).to.equal 0
describe "#addTask()", ->
describe "when the form is valid", ->
beforeEach ->
$scope.taskForm = $valid: true
sinon.stub(ctrl, "reset")
it "adds a new task", ->
# Given
newTask = name: "<NAME>", done: false
# When
ctrl.addTask(newTask)
# Then
expect(ctrl.tasks.length).to.equal 4
lastTask = ctrl.tasks[3]
expect(lastTask).to.not.be.undefined
expect(lastTask.name).to.equal newTask.name
expect(lastTask.done).to.equal newTask.done
it "resets the form", ->
# When
ctrl.addTask({})
# Then
expect(ctrl.reset).to.be.called
describe "when the form is not valid", ->
beforeEach -> $scope.taskForm = $valid: false
it "does nothing", ->
# When
ctrl.addTask({})
# Then
expect(ctrl.tasks.length).to.equal 3
it "does not reset the form", ->
# Given
mock = sinon.mock(ctrl).expects("reset").never()
# When
ctrl.addTask({})
# Then
expect(mock).to.not.be.called
mock.verify()
| true | describe "Controller `TasksCtrl`", ->
beforeEach module "myApp"
ctrl = null
$scope = null
# Initialize the controller and a mock scope
beforeEach inject ($rootScope, $controller) ->
$scope = $rootScope.$new()
ctrl = $controller "TasksCtrl", $scope: $scope
it "assigns tasks", ->
expect(ctrl.tasks).to.not.be.undefined
expect(ctrl.tasks.length).to.equal 3
describe "#archive()", ->
beforeEach ->
ctrl.tasks = [
{ done: false }, { done: true }, { done: true }
]
it "removes completed task from the list", ->
expect(ctrl.tasks.length).to.equal 3
ctrl.archive()
expect(ctrl.tasks.length).to.equal 1
describe "#tasksCount()", ->
it "returns the number of all tasks", ->
ctrl.tasks = [{}, {}, {}]
expect(ctrl.tasksCount()).to.equal 3
describe "#remainingTasksCount()", ->
describe "when task list is empty" ,->
beforeEach -> ctrl.tasks = []
it "returns 0", ->
expect(ctrl.remainingTasksCount()).to.equal 0
describe "when task list contains some uncompleted tasks", ->
beforeEach ->
ctrl.tasks = [
{ done: false }, { done: false }, { done: true }
]
it "returns > 0", ->
expect(ctrl.remainingTasksCount()).to.equal 2
describe "when all tasks are completed", ->
beforeEach ->
ctrl.tasks = [
{ done: true }, { done: true }, { done: true }
]
it "returns 0", ->
expect(ctrl.remainingTasksCount()).to.equal 0
describe "#addTask()", ->
describe "when the form is valid", ->
beforeEach ->
$scope.taskForm = $valid: true
sinon.stub(ctrl, "reset")
it "adds a new task", ->
# Given
newTask = name: "PI:NAME:<NAME>END_PI", done: false
# When
ctrl.addTask(newTask)
# Then
expect(ctrl.tasks.length).to.equal 4
lastTask = ctrl.tasks[3]
expect(lastTask).to.not.be.undefined
expect(lastTask.name).to.equal newTask.name
expect(lastTask.done).to.equal newTask.done
it "resets the form", ->
# When
ctrl.addTask({})
# Then
expect(ctrl.reset).to.be.called
describe "when the form is not valid", ->
beforeEach -> $scope.taskForm = $valid: false
it "does nothing", ->
# When
ctrl.addTask({})
# Then
expect(ctrl.tasks.length).to.equal 3
it "does not reset the form", ->
# Given
mock = sinon.mock(ctrl).expects("reset").never()
# When
ctrl.addTask({})
# Then
expect(mock).to.not.be.called
mock.verify()
|
[
{
"context": "get the weakest enemy when possible.\n *\n * @name Bully\n * @prerequisite Attack 75 times\n * @effect +5%",
"end": 371,
"score": 0.9185209274291992,
"start": 366,
"tag": "NAME",
"value": "Bully"
}
] | src/character/personalities/Bully.coffee | jawsome/IdleLands | 3 |
Personality = require "../base/Personality"
Constants = require "../../system/utilities/Constants"
_ = require "lodash"
slay = (player, enemies) ->
targets = _.sortBy (_.compact enemies.result), (enemy) -> enemy.hp.getValue()
{ probability: 200, result: [targets[0]] }
`/**
* This personality makes you target the weakest enemy when possible.
*
* @name Bully
* @prerequisite Attack 75 times
* @effect +5% STR
* @effect -5% INT
* @effect -5% WIS
* @category Personalities
* @package Player
*/`
class Bully extends Personality
constructor: ->
physicalAttackTargets: slay
magicalAttackTargets: slay
strPercent: -> 5
intPercent: -> -5
wisPercent: -> -5
@canUse = (player) ->
player.statistics["combat self attack"] >= 75
@desc = "Attack 75 times"
module.exports = exports = Bully | 12434 |
Personality = require "../base/Personality"
Constants = require "../../system/utilities/Constants"
_ = require "lodash"
slay = (player, enemies) ->
targets = _.sortBy (_.compact enemies.result), (enemy) -> enemy.hp.getValue()
{ probability: 200, result: [targets[0]] }
`/**
* This personality makes you target the weakest enemy when possible.
*
* @name <NAME>
* @prerequisite Attack 75 times
* @effect +5% STR
* @effect -5% INT
* @effect -5% WIS
* @category Personalities
* @package Player
*/`
class Bully extends Personality
constructor: ->
physicalAttackTargets: slay
magicalAttackTargets: slay
strPercent: -> 5
intPercent: -> -5
wisPercent: -> -5
@canUse = (player) ->
player.statistics["combat self attack"] >= 75
@desc = "Attack 75 times"
module.exports = exports = Bully | true |
Personality = require "../base/Personality"
Constants = require "../../system/utilities/Constants"
_ = require "lodash"
slay = (player, enemies) ->
targets = _.sortBy (_.compact enemies.result), (enemy) -> enemy.hp.getValue()
{ probability: 200, result: [targets[0]] }
`/**
* This personality makes you target the weakest enemy when possible.
*
* @name PI:NAME:<NAME>END_PI
* @prerequisite Attack 75 times
* @effect +5% STR
* @effect -5% INT
* @effect -5% WIS
* @category Personalities
* @package Player
*/`
class Bully extends Personality
constructor: ->
physicalAttackTargets: slay
magicalAttackTargets: slay
strPercent: -> 5
intPercent: -> -5
wisPercent: -> -5
@canUse = (player) ->
player.statistics["combat self attack"] >= 75
@desc = "Attack 75 times"
module.exports = exports = Bully |
[
{
"context": ", Safari4+, Chrome4+ and Opera 10.5+\n *\n * Author: Andris Reinman, andris.reinman@gmail.com\n * Project homepage: ww",
"end": 265,
"score": 0.999866783618927,
"start": 251,
"tag": "NAME",
"value": "Andris Reinman"
},
{
"context": "me4+ and Opera 10.5+\n *\n * Author: ... | coffee/api/3rd/jstorage.coffee | Spartan322/Spartan-Dev-Project | 1 | `/*
* ----------------------------- JSTORAGE -------------------------------------
* Simple local storage wrapper to save data on the browser side, supporting
* all major browsers - IE6+, Firefox2+, Safari4+, Chrome4+ and Opera 10.5+
*
* Author: Andris Reinman, andris.reinman@gmail.com
* Project homepage: www.jstorage.info
*
* Licensed under Unlicense:
*
* This is free and unencumbered software released into the public domain.
*
* Anyone is free to copy, modify, publish, use, compile, sell, or
* distribute this software, either in source code form or as a compiled
* binary, for any purpose, commercial or non-commercial, and by any
* means.
*
* In jurisdictions that recognize copyright laws, the author or authors
* of this software dedicate any and all copyright interest in the
* software to the public domain. We make this dedication for the benefit
* of the public at large and to the detriment of our heirs and
* successors. We intend this dedication to be an overt act of
* relinquishment in perpetuity of all present and future rights to this
* software under copyright law.
*
* THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
* IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR
* OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
* ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
* OTHER DEALINGS IN THE SOFTWARE.
*
* For more information, please refer to <http://unlicense.org/>
*/
/* global ActiveXObject: false */
/* jshint browser: true */
(function() {
'use strict';
var
/* jStorage version */
JSTORAGE_VERSION = '0.4.12',
/* detect a dollar object or create one if not found */
$ = window.jQuery || window.$ || (window.$ = {}),
/* check for a JSON handling support */
JSON = {
parse: window.JSON && (window.JSON.parse || window.JSON.decode) ||
String.prototype.evalJSON && function(str) {
return String(str).evalJSON();
} ||
$.parseJSON ||
$.evalJSON,
stringify: Object.toJSON ||
window.JSON && (window.JSON.stringify || window.JSON.encode) ||
$.toJSON
};
// Break if no JSON support was found
if (typeof JSON.parse !== 'function' || typeof JSON.stringify !== 'function') {
throw new Error('No JSON support found, include //cdnjs.cloudflare.com/ajax/libs/json2/20110223/json2.js to page');
}
var
/* This is the object, that holds the cached values */
_storage = {
__jstorage_meta: {
CRC32: {}
}
},
/* Actual browser storage (localStorage or globalStorage['domain']) */
_storage_service = {
jStorage: '{}'
},
/* DOM element for older IE versions, holds userData behavior */
_storage_elm = null,
/* How much space does the storage take */
_storage_size = 0,
/* which backend is currently used */
_backend = false,
/* onchange observers */
_observers = {},
/* timeout to wait after onchange event */
_observer_timeout = false,
/* last update time */
_observer_update = 0,
/* pubsub observers */
_pubsub_observers = {},
/* skip published items older than current timestamp */
_pubsub_last = +new Date(),
/* Next check for TTL */
_ttl_timeout,
/**
* XML encoding and decoding as XML nodes can't be JSON'ized
* XML nodes are encoded and decoded if the node is the value to be saved
* but not if it's as a property of another object
* Eg. -
* $.jStorage.set('key', xmlNode); // IS OK
* $.jStorage.set('key', {xml: xmlNode}); // NOT OK
*/
_XMLService = {
/**
* Validates a XML node to be XML
* based on jQuery.isXML function
*/
isXML: function(elm) {
var documentElement = (elm ? elm.ownerDocument || elm : 0).documentElement;
return documentElement ? documentElement.nodeName !== 'HTML' : false;
},
/**
* Encodes a XML node to string
* based on http://www.mercurytide.co.uk/news/article/issues-when-working-ajax/
*/
encode: function(xmlNode) {
if (!this.isXML(xmlNode)) {
return false;
}
try { // Mozilla, Webkit, Opera
return new XMLSerializer().serializeToString(xmlNode);
} catch (E1) {
try { // IE
return xmlNode.xml;
} catch (E2) {}
}
return false;
},
/**
* Decodes a XML node from string
* loosely based on http://outwestmedia.com/jquery-plugins/xmldom/
*/
decode: function(xmlString) {
var dom_parser = ('DOMParser' in window && (new DOMParser()).parseFromString) ||
(window.ActiveXObject && function(_xmlString) {
var xml_doc = new ActiveXObject('Microsoft.XMLDOM');
xml_doc.async = 'false';
xml_doc.loadXML(_xmlString);
return xml_doc;
}),
resultXML;
if (!dom_parser) {
return false;
}
resultXML = dom_parser.call('DOMParser' in window && (new DOMParser()) || window, xmlString, 'text/xml');
return this.isXML(resultXML) ? resultXML : false;
}
};
////////////////////////// PRIVATE METHODS ////////////////////////
/**
* Initialization function. Detects if the browser supports DOM Storage
* or userData behavior and behaves accordingly.
*/
function _init() {
/* Check if browser supports localStorage */
var localStorageReallyWorks = false;
if ('localStorage' in window) {
try {
window.localStorage.setItem('_tmptest', 'tmpval');
localStorageReallyWorks = true;
window.localStorage.removeItem('_tmptest');
} catch (BogusQuotaExceededErrorOnIos5) {
// Thanks be to iOS5 Private Browsing mode which throws
// QUOTA_EXCEEDED_ERRROR DOM Exception 22.
}
}
if (localStorageReallyWorks) {
try {
if (window.localStorage) {
_storage_service = window.localStorage;
_backend = 'localStorage';
_observer_update = _storage_service.jStorage_update;
}
} catch (E3) { /* Firefox fails when touching localStorage and cookies are disabled */ }
}
/* Check if browser supports globalStorage */
else if ('globalStorage' in window) {
try {
if (window.globalStorage) {
if (window.location.hostname == 'localhost') {
_storage_service = window.globalStorage['localhost.localdomain'];
} else {
_storage_service = window.globalStorage[window.location.hostname];
}
_backend = 'globalStorage';
_observer_update = _storage_service.jStorage_update;
}
} catch (E4) { /* Firefox fails when touching localStorage and cookies are disabled */ }
}
/* Check if browser supports userData behavior */
else {
_storage_elm = document.createElement('link');
if (_storage_elm.addBehavior) {
/* Use a DOM element to act as userData storage */
_storage_elm.style.behavior = 'url(#default#userData)';
/* userData element needs to be inserted into the DOM! */
document.getElementsByTagName('head')[0].appendChild(_storage_elm);
try {
_storage_elm.load('jStorage');
} catch (E) {
// try to reset cache
_storage_elm.setAttribute('jStorage', '{}');
_storage_elm.save('jStorage');
_storage_elm.load('jStorage');
}
var data = '{}';
try {
data = _storage_elm.getAttribute('jStorage');
} catch (E5) {}
try {
_observer_update = _storage_elm.getAttribute('jStorage_update');
} catch (E6) {}
_storage_service.jStorage = data;
_backend = 'userDataBehavior';
} else {
_storage_elm = null;
return;
}
}
// Load data from storage
_load_storage();
// remove dead keys
_handleTTL();
// start listening for changes
_setupObserver();
// initialize publish-subscribe service
_handlePubSub();
// handle cached navigation
if ('addEventListener' in window) {
window.addEventListener('pageshow', function(event) {
if (event.persisted) {
_storageObserver();
}
}, false);
}
}
/**
* Reload data from storage when needed
*/
function _reloadData() {
var data = '{}';
if (_backend == 'userDataBehavior') {
_storage_elm.load('jStorage');
try {
data = _storage_elm.getAttribute('jStorage');
} catch (E5) {}
try {
_observer_update = _storage_elm.getAttribute('jStorage_update');
} catch (E6) {}
_storage_service.jStorage = data;
}
_load_storage();
// remove dead keys
_handleTTL();
_handlePubSub();
}
/**
* Sets up a storage change observer
*/
function _setupObserver() {
if (_backend == 'localStorage' || _backend == 'globalStorage') {
if ('addEventListener' in window) {
window.addEventListener('storage', _storageObserver, false);
} else {
document.attachEvent('onstorage', _storageObserver);
}
} else if (_backend == 'userDataBehavior') {
setInterval(_storageObserver, 1000);
}
}
/**
* Fired on any kind of data change, needs to check if anything has
* really been changed
*/
function _storageObserver() {
var updateTime;
// cumulate change notifications with timeout
clearTimeout(_observer_timeout);
_observer_timeout = setTimeout(function() {
if (_backend == 'localStorage' || _backend == 'globalStorage') {
updateTime = _storage_service.jStorage_update;
} else if (_backend == 'userDataBehavior') {
_storage_elm.load('jStorage');
try {
updateTime = _storage_elm.getAttribute('jStorage_update');
} catch (E5) {}
}
if (updateTime && updateTime != _observer_update) {
_observer_update = updateTime;
_checkUpdatedKeys();
}
}, 25);
}
/**
* Reloads the data and checks if any keys are changed
*/
function _checkUpdatedKeys() {
var oldCrc32List = JSON.parse(JSON.stringify(_storage.__jstorage_meta.CRC32)),
newCrc32List;
_reloadData();
newCrc32List = JSON.parse(JSON.stringify(_storage.__jstorage_meta.CRC32));
var key,
updated = [],
removed = [];
for (key in oldCrc32List) {
if (oldCrc32List.hasOwnProperty(key)) {
if (!newCrc32List[key]) {
removed.push(key);
continue;
}
if (oldCrc32List[key] != newCrc32List[key] && String(oldCrc32List[key]).substr(0, 2) == '2.') {
updated.push(key);
}
}
}
for (key in newCrc32List) {
if (newCrc32List.hasOwnProperty(key)) {
if (!oldCrc32List[key]) {
updated.push(key);
}
}
}
_fireObservers(updated, 'updated');
_fireObservers(removed, 'deleted');
}
/**
* Fires observers for updated keys
*
* @param {Array|String} keys Array of key names or a key
* @param {String} action What happened with the value (updated, deleted, flushed)
*/
function _fireObservers(keys, action) {
keys = [].concat(keys || []);
var i, j, len, jlen;
if (action == 'flushed') {
keys = [];
for (var key in _observers) {
if (_observers.hasOwnProperty(key)) {
keys.push(key);
}
}
action = 'deleted';
}
for (i = 0, len = keys.length; i < len; i++) {
if (_observers[keys[i]]) {
for (j = 0, jlen = _observers[keys[i]].length; j < jlen; j++) {
_observers[keys[i]][j](keys[i], action);
}
}
if (_observers['*']) {
for (j = 0, jlen = _observers['*'].length; j < jlen; j++) {
_observers['*'][j](keys[i], action);
}
}
}
}
/**
* Publishes key change to listeners
*/
function _publishChange() {
var updateTime = (+new Date()).toString();
if (_backend == 'localStorage' || _backend == 'globalStorage') {
try {
_storage_service.jStorage_update = updateTime;
} catch (E8) {
// safari private mode has been enabled after the jStorage initialization
_backend = false;
}
} else if (_backend == 'userDataBehavior') {
_storage_elm.setAttribute('jStorage_update', updateTime);
_storage_elm.save('jStorage');
}
_storageObserver();
}
/**
* Loads the data from the storage based on the supported mechanism
*/
function _load_storage() {
/* if jStorage string is retrieved, then decode it */
if (_storage_service.jStorage) {
try {
_storage = JSON.parse(String(_storage_service.jStorage));
} catch (E6) {
_storage_service.jStorage = '{}';
}
} else {
_storage_service.jStorage = '{}';
}
_storage_size = _storage_service.jStorage ? String(_storage_service.jStorage).length : 0;
if (!_storage.__jstorage_meta) {
_storage.__jstorage_meta = {};
}
if (!_storage.__jstorage_meta.CRC32) {
_storage.__jstorage_meta.CRC32 = {};
}
}
/**
* This functions provides the 'save' mechanism to store the jStorage object
*/
function _save() {
_dropOldEvents(); // remove expired events
try {
_storage_service.jStorage = JSON.stringify(_storage);
// If userData is used as the storage engine, additional
if (_storage_elm) {
_storage_elm.setAttribute('jStorage', _storage_service.jStorage);
_storage_elm.save('jStorage');
}
_storage_size = _storage_service.jStorage ? String(_storage_service.jStorage).length : 0;
} catch (E7) { /* probably cache is full, nothing is saved this way*/ }
}
/**
* Function checks if a key is set and is string or numberic
*
* @param {String} key Key name
*/
function _checkKey(key) {
if (typeof key != 'string' && typeof key != 'number') {
throw new TypeError('Key name must be string or numeric');
}
if (key == '__jstorage_meta') {
throw new TypeError('Reserved key name');
}
return true;
}
/**
* Removes expired keys
*/
function _handleTTL() {
var curtime, i, TTL, CRC32, nextExpire = Infinity,
changed = false,
deleted = [];
clearTimeout(_ttl_timeout);
if (!_storage.__jstorage_meta || typeof _storage.__jstorage_meta.TTL != 'object') {
// nothing to do here
return;
}
curtime = +new Date();
TTL = _storage.__jstorage_meta.TTL;
CRC32 = _storage.__jstorage_meta.CRC32;
for (i in TTL) {
if (TTL.hasOwnProperty(i)) {
if (TTL[i] <= curtime) {
delete TTL[i];
delete CRC32[i];
delete _storage[i];
changed = true;
deleted.push(i);
} else if (TTL[i] < nextExpire) {
nextExpire = TTL[i];
}
}
}
// set next check
if (nextExpire != Infinity) {
_ttl_timeout = setTimeout(_handleTTL, Math.min(nextExpire - curtime, 0x7FFFFFFF));
}
// save changes
if (changed) {
_save();
_publishChange();
_fireObservers(deleted, 'deleted');
}
}
/**
* Checks if there's any events on hold to be fired to listeners
*/
function _handlePubSub() {
var i, len;
if (!_storage.__jstorage_meta.PubSub) {
return;
}
var pubelm,
_pubsubCurrent = _pubsub_last,
needFired = [];
for (i = len = _storage.__jstorage_meta.PubSub.length - 1; i >= 0; i--) {
pubelm = _storage.__jstorage_meta.PubSub[i];
if (pubelm[0] > _pubsub_last) {
_pubsubCurrent = pubelm[0];
needFired.unshift(pubelm);
}
}
for (i = needFired.length - 1; i >= 0; i--) {
_fireSubscribers(needFired[i][1], needFired[i][2]);
}
_pubsub_last = _pubsubCurrent;
}
/**
* Fires all subscriber listeners for a pubsub channel
*
* @param {String} channel Channel name
* @param {Mixed} payload Payload data to deliver
*/
function _fireSubscribers(channel, payload) {
if (_pubsub_observers[channel]) {
for (var i = 0, len = _pubsub_observers[channel].length; i < len; i++) {
// send immutable data that can't be modified by listeners
try {
_pubsub_observers[channel][i](channel, JSON.parse(JSON.stringify(payload)));
} catch (E) {}
}
}
}
/**
* Remove old events from the publish stream (at least 2sec old)
*/
function _dropOldEvents() {
if (!_storage.__jstorage_meta.PubSub) {
return;
}
var retire = +new Date() - 2000;
for (var i = 0, len = _storage.__jstorage_meta.PubSub.length; i < len; i++) {
if (_storage.__jstorage_meta.PubSub[i][0] <= retire) {
// deleteCount is needed for IE6
_storage.__jstorage_meta.PubSub.splice(i, _storage.__jstorage_meta.PubSub.length - i);
break;
}
}
if (!_storage.__jstorage_meta.PubSub.length) {
delete _storage.__jstorage_meta.PubSub;
}
}
/**
* Publish payload to a channel
*
* @param {String} channel Channel name
* @param {Mixed} payload Payload to send to the subscribers
*/
function _publish(channel, payload) {
if (!_storage.__jstorage_meta) {
_storage.__jstorage_meta = {};
}
if (!_storage.__jstorage_meta.PubSub) {
_storage.__jstorage_meta.PubSub = [];
}
_storage.__jstorage_meta.PubSub.unshift([+new Date(), channel, payload]);
_save();
_publishChange();
}
/**
* JS Implementation of MurmurHash2
*
* SOURCE: https://github.com/garycourt/murmurhash-js (MIT licensed)
*
* @author <a href='mailto:gary.court@gmail.com'>Gary Court</a>
* @see http://github.com/garycourt/murmurhash-js
* @author <a href='mailto:aappleby@gmail.com'>Austin Appleby</a>
* @see http://sites.google.com/site/murmurhash/
*
* @param {string} str ASCII only
* @param {number} seed Positive integer only
* @return {number} 32-bit positive integer hash
*/
function murmurhash2_32_gc(str, seed) {
var
l = str.length,
h = seed ^ l,
i = 0,
k;
while (l >= 4) {
k =
((str.charCodeAt(i) & 0xff)) |
((str.charCodeAt(++i) & 0xff) << 8) |
((str.charCodeAt(++i) & 0xff) << 16) |
((str.charCodeAt(++i) & 0xff) << 24);
k = (((k & 0xffff) * 0x5bd1e995) + ((((k >>> 16) * 0x5bd1e995) & 0xffff) << 16));
k ^= k >>> 24;
k = (((k & 0xffff) * 0x5bd1e995) + ((((k >>> 16) * 0x5bd1e995) & 0xffff) << 16));
h = (((h & 0xffff) * 0x5bd1e995) + ((((h >>> 16) * 0x5bd1e995) & 0xffff) << 16)) ^ k;
l -= 4;
++i;
}
switch (l) {
case 3:
h ^= (str.charCodeAt(i + 2) & 0xff) << 16;
/* falls through */
case 2:
h ^= (str.charCodeAt(i + 1) & 0xff) << 8;
/* falls through */
case 1:
h ^= (str.charCodeAt(i) & 0xff);
h = (((h & 0xffff) * 0x5bd1e995) + ((((h >>> 16) * 0x5bd1e995) & 0xffff) << 16));
}
h ^= h >>> 13;
h = (((h & 0xffff) * 0x5bd1e995) + ((((h >>> 16) * 0x5bd1e995) & 0xffff) << 16));
h ^= h >>> 15;
return h >>> 0;
}
////////////////////////// PUBLIC INTERFACE /////////////////////////
$.jStorage = {
/* Version number */
version: JSTORAGE_VERSION,
/**
* Sets a key's value.
*
* @param {String} key Key to set. If this value is not set or not
* a string an exception is raised.
* @param {Mixed} value Value to set. This can be any value that is JSON
* compatible (Numbers, Strings, Objects etc.).
* @param {Object} [options] - possible options to use
* @param {Number} [options.TTL] - optional TTL value, in milliseconds
* @return {Mixed} the used value
*/
set: function(key, value, options) {
_checkKey(key);
options = options || {};
// undefined values are deleted automatically
if (typeof value == 'undefined') {
this.deleteKey(key);
return value;
}
if (_XMLService.isXML(value)) {
value = {
_is_xml: true,
xml: _XMLService.encode(value)
};
} else if (typeof value == 'function') {
return undefined; // functions can't be saved!
} else if (value && typeof value == 'object') {
// clone the object before saving to _storage tree
value = JSON.parse(JSON.stringify(value));
}
_storage[key] = value;
_storage.__jstorage_meta.CRC32[key] = '2.' + murmurhash2_32_gc(JSON.stringify(value), 0x9747b28c);
this.setTTL(key, options.TTL || 0); // also handles saving and _publishChange
_fireObservers(key, 'updated');
return value;
},
/**
* Looks up a key in cache
*
* @param {String} key - Key to look up.
* @param {mixed} def - Default value to return, if key didn't exist.
* @return {Mixed} the key value, default value or null
*/
get: function(key, def) {
_checkKey(key);
if (key in _storage) {
if (_storage[key] && typeof _storage[key] == 'object' && _storage[key]._is_xml) {
return _XMLService.decode(_storage[key].xml);
} else {
return _storage[key];
}
}
return typeof(def) == 'undefined' ? null : def;
},
/**
* Deletes a key from cache.
*
* @param {String} key - Key to delete.
* @return {Boolean} true if key existed or false if it didn't
*/
deleteKey: function(key) {
_checkKey(key);
if (key in _storage) {
delete _storage[key];
// remove from TTL list
if (typeof _storage.__jstorage_meta.TTL == 'object' &&
key in _storage.__jstorage_meta.TTL) {
delete _storage.__jstorage_meta.TTL[key];
}
delete _storage.__jstorage_meta.CRC32[key];
_save();
_publishChange();
_fireObservers(key, 'deleted');
return true;
}
return false;
},
/**
* Sets a TTL for a key, or remove it if ttl value is 0 or below
*
* @param {String} key - key to set the TTL for
* @param {Number} ttl - TTL timeout in milliseconds
* @return {Boolean} true if key existed or false if it didn't
*/
setTTL: function(key, ttl) {
var curtime = +new Date();
_checkKey(key);
ttl = Number(ttl) || 0;
if (key in _storage) {
if (!_storage.__jstorage_meta.TTL) {
_storage.__jstorage_meta.TTL = {};
}
// Set TTL value for the key
if (ttl > 0) {
_storage.__jstorage_meta.TTL[key] = curtime + ttl;
} else {
delete _storage.__jstorage_meta.TTL[key];
}
_save();
_handleTTL();
_publishChange();
return true;
}
return false;
},
/**
* Gets remaining TTL (in milliseconds) for a key or 0 when no TTL has been set
*
* @param {String} key Key to check
* @return {Number} Remaining TTL in milliseconds
*/
getTTL: function(key) {
var curtime = +new Date(),
ttl;
_checkKey(key);
if (key in _storage && _storage.__jstorage_meta.TTL && _storage.__jstorage_meta.TTL[key]) {
ttl = _storage.__jstorage_meta.TTL[key] - curtime;
return ttl || 0;
}
return 0;
},
/**
* Deletes everything in cache.
*
* @return {Boolean} Always true
*/
flush: function() {
_storage = {
__jstorage_meta: {
CRC32: {}
}
};
_save();
_publishChange();
_fireObservers(null, 'flushed');
return true;
},
/**
* Returns a read-only copy of _storage
*
* @return {Object} Read-only copy of _storage
*/
storageObj: function() {
function F() {}
F.prototype = _storage;
return new F();
},
/**
* Returns an index of all used keys as an array
* ['key1', 'key2',..'keyN']
*
* @return {Array} Used keys
*/
index: function() {
var index = [],
i;
for (i in _storage) {
if (_storage.hasOwnProperty(i) && i != '__jstorage_meta') {
index.push(i);
}
}
return index;
},
/**
* How much space in bytes does the storage take?
*
* @return {Number} Storage size in chars (not the same as in bytes,
* since some chars may take several bytes)
*/
storageSize: function() {
return _storage_size;
},
/**
* Which backend is currently in use?
*
* @return {String} Backend name
*/
currentBackend: function() {
return _backend;
},
/**
* Test if storage is available
*
* @return {Boolean} True if storage can be used
*/
storageAvailable: function() {
return !!_backend;
},
/**
* Register change listeners
*
* @param {String} key Key name
* @param {Function} callback Function to run when the key changes
*/
listenKeyChange: function(key, callback) {
_checkKey(key);
if (!_observers[key]) {
_observers[key] = [];
}
_observers[key].push(callback);
},
/**
* Remove change listeners
*
* @param {String} key Key name to unregister listeners against
* @param {Function} [callback] If set, unregister the callback, if not - unregister all
*/
stopListening: function(key, callback) {
_checkKey(key);
if (!_observers[key]) {
return;
}
if (!callback) {
delete _observers[key];
return;
}
for (var i = _observers[key].length - 1; i >= 0; i--) {
if (_observers[key][i] == callback) {
_observers[key].splice(i, 1);
}
}
},
/**
* Subscribe to a Publish/Subscribe event stream
*
* @param {String} channel Channel name
* @param {Function} callback Function to run when the something is published to the channel
*/
subscribe: function(channel, callback) {
channel = (channel || '').toString();
if (!channel) {
throw new TypeError('Channel not defined');
}
if (!_pubsub_observers[channel]) {
_pubsub_observers[channel] = [];
}
_pubsub_observers[channel].push(callback);
},
/**
* Publish data to an event stream
*
* @param {String} channel Channel name
* @param {Mixed} payload Payload to deliver
*/
publish: function(channel, payload) {
channel = (channel || '').toString();
if (!channel) {
throw new TypeError('Channel not defined');
}
_publish(channel, payload);
},
/**
* Reloads the data from browser storage
*/
reInit: function() {
_reloadData();
},
/**
* Removes reference from global objects and saves it as jStorage
*
* @param {Boolean} option if needed to save object as simple 'jStorage' in windows context
*/
noConflict: function(saveInGlobal) {
delete window.$.jStorage;
if (saveInGlobal) {
window.jStorage = this;
}
return this;
}
};
// Initialize jStorage
_init();
})()` | 76050 | `/*
* ----------------------------- JSTORAGE -------------------------------------
* Simple local storage wrapper to save data on the browser side, supporting
* all major browsers - IE6+, Firefox2+, Safari4+, Chrome4+ and Opera 10.5+
*
* Author: <NAME>, <EMAIL>
* Project homepage: www.jstorage.info
*
* Licensed under Unlicense:
*
* This is free and unencumbered software released into the public domain.
*
* Anyone is free to copy, modify, publish, use, compile, sell, or
* distribute this software, either in source code form or as a compiled
* binary, for any purpose, commercial or non-commercial, and by any
* means.
*
* In jurisdictions that recognize copyright laws, the author or authors
* of this software dedicate any and all copyright interest in the
* software to the public domain. We make this dedication for the benefit
* of the public at large and to the detriment of our heirs and
* successors. We intend this dedication to be an overt act of
* relinquishment in perpetuity of all present and future rights to this
* software under copyright law.
*
* THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
* IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR
* OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
* ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
* OTHER DEALINGS IN THE SOFTWARE.
*
* For more information, please refer to <http://unlicense.org/>
*/
/* global ActiveXObject: false */
/* jshint browser: true */
(function() {
'use strict';
var
/* jStorage version */
JSTORAGE_VERSION = '0.4.12',
/* detect a dollar object or create one if not found */
$ = window.jQuery || window.$ || (window.$ = {}),
/* check for a JSON handling support */
JSON = {
parse: window.JSON && (window.JSON.parse || window.JSON.decode) ||
String.prototype.evalJSON && function(str) {
return String(str).evalJSON();
} ||
$.parseJSON ||
$.evalJSON,
stringify: Object.toJSON ||
window.JSON && (window.JSON.stringify || window.JSON.encode) ||
$.toJSON
};
// Break if no JSON support was found
if (typeof JSON.parse !== 'function' || typeof JSON.stringify !== 'function') {
throw new Error('No JSON support found, include //cdnjs.cloudflare.com/ajax/libs/json2/20110223/json2.js to page');
}
var
/* This is the object, that holds the cached values */
_storage = {
__jstorage_meta: {
CRC32: {}
}
},
/* Actual browser storage (localStorage or globalStorage['domain']) */
_storage_service = {
jStorage: '{}'
},
/* DOM element for older IE versions, holds userData behavior */
_storage_elm = null,
/* How much space does the storage take */
_storage_size = 0,
/* which backend is currently used */
_backend = false,
/* onchange observers */
_observers = {},
/* timeout to wait after onchange event */
_observer_timeout = false,
/* last update time */
_observer_update = 0,
/* pubsub observers */
_pubsub_observers = {},
/* skip published items older than current timestamp */
_pubsub_last = +new Date(),
/* Next check for TTL */
_ttl_timeout,
/**
* XML encoding and decoding as XML nodes can't be JSON'ized
* XML nodes are encoded and decoded if the node is the value to be saved
* but not if it's as a property of another object
* Eg. -
* $.jStorage.set('key', xmlNode); // IS OK
* $.jStorage.set('key', {xml: xmlNode}); // NOT OK
*/
_XMLService = {
/**
* Validates a XML node to be XML
* based on jQuery.isXML function
*/
isXML: function(elm) {
var documentElement = (elm ? elm.ownerDocument || elm : 0).documentElement;
return documentElement ? documentElement.nodeName !== 'HTML' : false;
},
/**
* Encodes a XML node to string
* based on http://www.mercurytide.co.uk/news/article/issues-when-working-ajax/
*/
encode: function(xmlNode) {
if (!this.isXML(xmlNode)) {
return false;
}
try { // Mozilla, Webkit, Opera
return new XMLSerializer().serializeToString(xmlNode);
} catch (E1) {
try { // IE
return xmlNode.xml;
} catch (E2) {}
}
return false;
},
/**
* Decodes a XML node from string
* loosely based on http://outwestmedia.com/jquery-plugins/xmldom/
*/
decode: function(xmlString) {
var dom_parser = ('DOMParser' in window && (new DOMParser()).parseFromString) ||
(window.ActiveXObject && function(_xmlString) {
var xml_doc = new ActiveXObject('Microsoft.XMLDOM');
xml_doc.async = 'false';
xml_doc.loadXML(_xmlString);
return xml_doc;
}),
resultXML;
if (!dom_parser) {
return false;
}
resultXML = dom_parser.call('DOMParser' in window && (new DOMParser()) || window, xmlString, 'text/xml');
return this.isXML(resultXML) ? resultXML : false;
}
};
////////////////////////// PRIVATE METHODS ////////////////////////
/**
* Initialization function. Detects if the browser supports DOM Storage
* or userData behavior and behaves accordingly.
*/
function _init() {
/* Check if browser supports localStorage */
var localStorageReallyWorks = false;
if ('localStorage' in window) {
try {
window.localStorage.setItem('_tmptest', 'tmpval');
localStorageReallyWorks = true;
window.localStorage.removeItem('_tmptest');
} catch (BogusQuotaExceededErrorOnIos5) {
// Thanks be to iOS5 Private Browsing mode which throws
// QUOTA_EXCEEDED_ERRROR DOM Exception 22.
}
}
if (localStorageReallyWorks) {
try {
if (window.localStorage) {
_storage_service = window.localStorage;
_backend = 'localStorage';
_observer_update = _storage_service.jStorage_update;
}
} catch (E3) { /* Firefox fails when touching localStorage and cookies are disabled */ }
}
/* Check if browser supports globalStorage */
else if ('globalStorage' in window) {
try {
if (window.globalStorage) {
if (window.location.hostname == 'localhost') {
_storage_service = window.globalStorage['localhost.localdomain'];
} else {
_storage_service = window.globalStorage[window.location.hostname];
}
_backend = 'globalStorage';
_observer_update = _storage_service.jStorage_update;
}
} catch (E4) { /* Firefox fails when touching localStorage and cookies are disabled */ }
}
/* Check if browser supports userData behavior */
else {
_storage_elm = document.createElement('link');
if (_storage_elm.addBehavior) {
/* Use a DOM element to act as userData storage */
_storage_elm.style.behavior = 'url(#default#userData)';
/* userData element needs to be inserted into the DOM! */
document.getElementsByTagName('head')[0].appendChild(_storage_elm);
try {
_storage_elm.load('jStorage');
} catch (E) {
// try to reset cache
_storage_elm.setAttribute('jStorage', '{}');
_storage_elm.save('jStorage');
_storage_elm.load('jStorage');
}
var data = '{}';
try {
data = _storage_elm.getAttribute('jStorage');
} catch (E5) {}
try {
_observer_update = _storage_elm.getAttribute('jStorage_update');
} catch (E6) {}
_storage_service.jStorage = data;
_backend = 'userDataBehavior';
} else {
_storage_elm = null;
return;
}
}
// Load data from storage
_load_storage();
// remove dead keys
_handleTTL();
// start listening for changes
_setupObserver();
// initialize publish-subscribe service
_handlePubSub();
// handle cached navigation
if ('addEventListener' in window) {
window.addEventListener('pageshow', function(event) {
if (event.persisted) {
_storageObserver();
}
}, false);
}
}
/**
* Reload data from storage when needed
*/
function _reloadData() {
var data = '{}';
if (_backend == 'userDataBehavior') {
_storage_elm.load('jStorage');
try {
data = _storage_elm.getAttribute('jStorage');
} catch (E5) {}
try {
_observer_update = _storage_elm.getAttribute('jStorage_update');
} catch (E6) {}
_storage_service.jStorage = data;
}
_load_storage();
// remove dead keys
_handleTTL();
_handlePubSub();
}
/**
* Sets up a storage change observer
*/
function _setupObserver() {
if (_backend == 'localStorage' || _backend == 'globalStorage') {
if ('addEventListener' in window) {
window.addEventListener('storage', _storageObserver, false);
} else {
document.attachEvent('onstorage', _storageObserver);
}
} else if (_backend == 'userDataBehavior') {
setInterval(_storageObserver, 1000);
}
}
/**
* Fired on any kind of data change, needs to check if anything has
* really been changed
*/
function _storageObserver() {
var updateTime;
// cumulate change notifications with timeout
clearTimeout(_observer_timeout);
_observer_timeout = setTimeout(function() {
if (_backend == 'localStorage' || _backend == 'globalStorage') {
updateTime = _storage_service.jStorage_update;
} else if (_backend == 'userDataBehavior') {
_storage_elm.load('jStorage');
try {
updateTime = _storage_elm.getAttribute('jStorage_update');
} catch (E5) {}
}
if (updateTime && updateTime != _observer_update) {
_observer_update = updateTime;
_checkUpdatedKeys();
}
}, 25);
}
/**
* Reloads the data and checks if any keys are changed
*/
function _checkUpdatedKeys() {
var oldCrc32List = JSON.parse(JSON.stringify(_storage.__jstorage_meta.CRC32)),
newCrc32List;
_reloadData();
newCrc32List = JSON.parse(JSON.stringify(_storage.__jstorage_meta.CRC32));
var key,
updated = [],
removed = [];
for (key in oldCrc32List) {
if (oldCrc32List.hasOwnProperty(key)) {
if (!newCrc32List[key]) {
removed.push(key);
continue;
}
if (oldCrc32List[key] != newCrc32List[key] && String(oldCrc32List[key]).substr(0, 2) == '2.') {
updated.push(key);
}
}
}
for (key in newCrc32List) {
if (newCrc32List.hasOwnProperty(key)) {
if (!oldCrc32List[key]) {
updated.push(key);
}
}
}
_fireObservers(updated, 'updated');
_fireObservers(removed, 'deleted');
}
/**
* Fires observers for updated keys
*
* @param {Array|String} keys Array of key names or a key
* @param {String} action What happened with the value (updated, deleted, flushed)
*/
function _fireObservers(keys, action) {
keys = [].concat(keys || []);
var i, j, len, jlen;
if (action == 'flushed') {
keys = [];
for (var key in _observers) {
if (_observers.hasOwnProperty(key)) {
keys.push(key);
}
}
action = 'deleted';
}
for (i = 0, len = keys.length; i < len; i++) {
if (_observers[keys[i]]) {
for (j = 0, jlen = _observers[keys[i]].length; j < jlen; j++) {
_observers[keys[i]][j](keys[i], action);
}
}
if (_observers['*']) {
for (j = 0, jlen = _observers['*'].length; j < jlen; j++) {
_observers['*'][j](keys[i], action);
}
}
}
}
/**
* Publishes key change to listeners
*/
function _publishChange() {
var updateTime = (+new Date()).toString();
if (_backend == 'localStorage' || _backend == 'globalStorage') {
try {
_storage_service.jStorage_update = updateTime;
} catch (E8) {
// safari private mode has been enabled after the jStorage initialization
_backend = false;
}
} else if (_backend == 'userDataBehavior') {
_storage_elm.setAttribute('jStorage_update', updateTime);
_storage_elm.save('jStorage');
}
_storageObserver();
}
/**
* Loads the data from the storage based on the supported mechanism
*/
function _load_storage() {
/* if jStorage string is retrieved, then decode it */
if (_storage_service.jStorage) {
try {
_storage = JSON.parse(String(_storage_service.jStorage));
} catch (E6) {
_storage_service.jStorage = '{}';
}
} else {
_storage_service.jStorage = '{}';
}
_storage_size = _storage_service.jStorage ? String(_storage_service.jStorage).length : 0;
if (!_storage.__jstorage_meta) {
_storage.__jstorage_meta = {};
}
if (!_storage.__jstorage_meta.CRC32) {
_storage.__jstorage_meta.CRC32 = {};
}
}
/**
* This functions provides the 'save' mechanism to store the jStorage object
*/
function _save() {
_dropOldEvents(); // remove expired events
try {
_storage_service.jStorage = JSON.stringify(_storage);
// If userData is used as the storage engine, additional
if (_storage_elm) {
_storage_elm.setAttribute('jStorage', _storage_service.jStorage);
_storage_elm.save('jStorage');
}
_storage_size = _storage_service.jStorage ? String(_storage_service.jStorage).length : 0;
} catch (E7) { /* probably cache is full, nothing is saved this way*/ }
}
/**
* Function checks if a key is set and is string or numberic
*
* @param {String} key Key name
*/
function _checkKey(key) {
if (typeof key != 'string' && typeof key != 'number') {
throw new TypeError('Key name must be string or numeric');
}
if (key == <KEY>') {
throw new TypeError('Reserved key name');
}
return true;
}
/**
* Removes expired keys
*/
function _handleTTL() {
var curtime, i, TTL, CRC32, nextExpire = Infinity,
changed = false,
deleted = [];
clearTimeout(_ttl_timeout);
if (!_storage.__jstorage_meta || typeof _storage.__jstorage_meta.TTL != 'object') {
// nothing to do here
return;
}
curtime = +new Date();
TTL = _storage.__jstorage_meta.TTL;
CRC32 = _storage.__jstorage_meta.CRC32;
for (i in TTL) {
if (TTL.hasOwnProperty(i)) {
if (TTL[i] <= curtime) {
delete TTL[i];
delete CRC32[i];
delete _storage[i];
changed = true;
deleted.push(i);
} else if (TTL[i] < nextExpire) {
nextExpire = TTL[i];
}
}
}
// set next check
if (nextExpire != Infinity) {
_ttl_timeout = setTimeout(_handleTTL, Math.min(nextExpire - curtime, 0x7FFFFFFF));
}
// save changes
if (changed) {
_save();
_publishChange();
_fireObservers(deleted, 'deleted');
}
}
/**
* Checks if there's any events on hold to be fired to listeners
*/
function _handlePubSub() {
var i, len;
if (!_storage.__jstorage_meta.PubSub) {
return;
}
var pubelm,
_pubsubCurrent = _pubsub_last,
needFired = [];
for (i = len = _storage.__jstorage_meta.PubSub.length - 1; i >= 0; i--) {
pubelm = _storage.__jstorage_meta.PubSub[i];
if (pubelm[0] > _pubsub_last) {
_pubsubCurrent = pubelm[0];
needFired.unshift(pubelm);
}
}
for (i = needFired.length - 1; i >= 0; i--) {
_fireSubscribers(needFired[i][1], needFired[i][2]);
}
_pubsub_last = _pubsubCurrent;
}
/**
* Fires all subscriber listeners for a pubsub channel
*
* @param {String} channel Channel name
* @param {Mixed} payload Payload data to deliver
*/
function _fireSubscribers(channel, payload) {
if (_pubsub_observers[channel]) {
for (var i = 0, len = _pubsub_observers[channel].length; i < len; i++) {
// send immutable data that can't be modified by listeners
try {
_pubsub_observers[channel][i](channel, JSON.parse(JSON.stringify(payload)));
} catch (E) {}
}
}
}
/**
* Remove old events from the publish stream (at least 2sec old)
*/
function _dropOldEvents() {
if (!_storage.__jstorage_meta.PubSub) {
return;
}
var retire = +new Date() - 2000;
for (var i = 0, len = _storage.__jstorage_meta.PubSub.length; i < len; i++) {
if (_storage.__jstorage_meta.PubSub[i][0] <= retire) {
// deleteCount is needed for IE6
_storage.__jstorage_meta.PubSub.splice(i, _storage.__jstorage_meta.PubSub.length - i);
break;
}
}
if (!_storage.__jstorage_meta.PubSub.length) {
delete _storage.__jstorage_meta.PubSub;
}
}
/**
* Publish payload to a channel
*
* @param {String} channel Channel name
* @param {Mixed} payload Payload to send to the subscribers
*/
function _publish(channel, payload) {
if (!_storage.__jstorage_meta) {
_storage.__jstorage_meta = {};
}
if (!_storage.__jstorage_meta.PubSub) {
_storage.__jstorage_meta.PubSub = [];
}
_storage.__jstorage_meta.PubSub.unshift([+new Date(), channel, payload]);
_save();
_publishChange();
}
/**
* JS Implementation of MurmurHash2
*
* SOURCE: https://github.com/garycourt/murmurhash-js (MIT licensed)
*
* @author <a href='mailto:<EMAIL>'><NAME></a>
* @see http://github.com/garycourt/murmurhash-js
* @author <a href='mailto:<EMAIL>'><NAME></a>
* @see http://sites.google.com/site/murmurhash/
*
* @param {string} str ASCII only
* @param {number} seed Positive integer only
* @return {number} 32-bit positive integer hash
*/
function murmurhash2_32_gc(str, seed) {
var
l = str.length,
h = seed ^ l,
i = 0,
k;
while (l >= 4) {
k =
((str.charCodeAt(i) & 0xff)) |
((str.charCodeAt(++i) & 0xff) << 8) |
((str.charCodeAt(++i) & 0xff) << 16) |
((str.charCodeAt(++i) & 0xff) << 24);
k = (((k & 0xffff) * 0x5bd1e995) + ((((k >>> 16) * 0x5bd1e995) & 0xffff) << 16));
k ^= k >>> 24;
k = (((k & 0xffff) * 0x5bd1e995) + ((((k >>> 16) * 0x5bd1e995) & 0xffff) << 16));
h = (((h & 0xffff) * 0x5bd1e995) + ((((h >>> 16) * 0x5bd1e995) & 0xffff) << 16)) ^ k;
l -= 4;
++i;
}
switch (l) {
case 3:
h ^= (str.charCodeAt(i + 2) & 0xff) << 16;
/* falls through */
case 2:
h ^= (str.charCodeAt(i + 1) & 0xff) << 8;
/* falls through */
case 1:
h ^= (str.charCodeAt(i) & 0xff);
h = (((h & 0xffff) * 0x5bd1e995) + ((((h >>> 16) * 0x5bd1e995) & 0xffff) << 16));
}
h ^= h >>> 13;
h = (((h & 0xffff) * 0x5bd1e995) + ((((h >>> 16) * 0x5bd1e995) & 0xffff) << 16));
h ^= h >>> 15;
return h >>> 0;
}
////////////////////////// PUBLIC INTERFACE /////////////////////////
$.jStorage = {
/* Version number */
version: JSTORAGE_VERSION,
/**
* Sets a key's value.
*
* @param {String} key Key to set. If this value is not set or not
* a string an exception is raised.
* @param {Mixed} value Value to set. This can be any value that is JSON
* compatible (Numbers, Strings, Objects etc.).
* @param {Object} [options] - possible options to use
* @param {Number} [options.TTL] - optional TTL value, in milliseconds
* @return {Mixed} the used value
*/
set: function(key, value, options) {
_checkKey(key);
options = options || {};
// undefined values are deleted automatically
if (typeof value == 'undefined') {
this.deleteKey(key);
return value;
}
if (_XMLService.isXML(value)) {
value = {
_is_xml: true,
xml: _XMLService.encode(value)
};
} else if (typeof value == 'function') {
return undefined; // functions can't be saved!
} else if (value && typeof value == 'object') {
// clone the object before saving to _storage tree
value = JSON.parse(JSON.stringify(value));
}
_storage[key] = value;
_storage.__jstorage_meta.CRC32[key] = '2.' + murmurhash2_32_gc(JSON.stringify(value), 0x9747b28c);
this.setTTL(key, options.TTL || 0); // also handles saving and _publishChange
_fireObservers(key, 'updated');
return value;
},
/**
* Looks up a key in cache
*
* @param {String} key - Key to look up.
* @param {mixed} def - Default value to return, if key didn't exist.
* @return {Mixed} the key value, default value or null
*/
get: function(key, def) {
_checkKey(key);
if (key in _storage) {
if (_storage[key] && typeof _storage[key] == 'object' && _storage[key]._is_xml) {
return _XMLService.decode(_storage[key].xml);
} else {
return _storage[key];
}
}
return typeof(def) == 'undefined' ? null : def;
},
/**
* Deletes a key from cache.
*
* @param {String} key - Key to delete.
* @return {Boolean} true if key existed or false if it didn't
*/
deleteKey: function(key) {
_checkKey(key);
if (key in _storage) {
delete _storage[key];
// remove from TTL list
if (typeof _storage.__jstorage_meta.TTL == 'object' &&
key in _storage.__jstorage_meta.TTL) {
delete _storage.__jstorage_meta.TTL[key];
}
delete _storage.__jstorage_meta.CRC32[key];
_save();
_publishChange();
_fireObservers(key, 'deleted');
return true;
}
return false;
},
/**
* Sets a TTL for a key, or remove it if ttl value is 0 or below
*
* @param {String} key - key to set the TTL for
* @param {Number} ttl - TTL timeout in milliseconds
* @return {Boolean} true if key existed or false if it didn't
*/
setTTL: function(key, ttl) {
var curtime = +new Date();
_checkKey(key);
ttl = Number(ttl) || 0;
if (key in _storage) {
if (!_storage.__jstorage_meta.TTL) {
_storage.__jstorage_meta.TTL = {};
}
// Set TTL value for the key
if (ttl > 0) {
_storage.__jstorage_meta.TTL[key] = curtime + ttl;
} else {
delete _storage.__jstorage_meta.TTL[key];
}
_save();
_handleTTL();
_publishChange();
return true;
}
return false;
},
/**
* Gets remaining TTL (in milliseconds) for a key or 0 when no TTL has been set
*
* @param {String} key Key to check
* @return {Number} Remaining TTL in milliseconds
*/
getTTL: function(key) {
var curtime = +new Date(),
ttl;
_checkKey(key);
if (key in _storage && _storage.__jstorage_meta.TTL && _storage.__jstorage_meta.TTL[key]) {
ttl = _storage.__jstorage_meta.TTL[key] - curtime;
return ttl || 0;
}
return 0;
},
/**
* Deletes everything in cache.
*
* @return {Boolean} Always true
*/
flush: function() {
_storage = {
__jstorage_meta: {
CRC32: {}
}
};
_save();
_publishChange();
_fireObservers(null, 'flushed');
return true;
},
/**
* Returns a read-only copy of _storage
*
* @return {Object} Read-only copy of _storage
*/
storageObj: function() {
function F() {}
F.prototype = _storage;
return new F();
},
/**
* Returns an index of all used keys as an array
* ['key1', 'key2',..'keyN']
*
* @return {Array} Used keys
*/
index: function() {
var index = [],
i;
for (i in _storage) {
if (_storage.hasOwnProperty(i) && i != '__jstorage_meta') {
index.push(i);
}
}
return index;
},
/**
* How much space in bytes does the storage take?
*
* @return {Number} Storage size in chars (not the same as in bytes,
* since some chars may take several bytes)
*/
storageSize: function() {
return _storage_size;
},
/**
* Which backend is currently in use?
*
* @return {String} Backend name
*/
currentBackend: function() {
return _backend;
},
/**
* Test if storage is available
*
* @return {Boolean} True if storage can be used
*/
storageAvailable: function() {
return !!_backend;
},
/**
* Register change listeners
*
* @param {String} key Key name
* @param {Function} callback Function to run when the key changes
*/
listenKeyChange: function(key, callback) {
_checkKey(key);
if (!_observers[key]) {
_observers[key] = [];
}
_observers[key].push(callback);
},
/**
* Remove change listeners
*
* @param {String} key Key name to unregister listeners against
* @param {Function} [callback] If set, unregister the callback, if not - unregister all
*/
stopListening: function(key, callback) {
_checkKey(key);
if (!_observers[key]) {
return;
}
if (!callback) {
delete _observers[key];
return;
}
for (var i = _observers[key].length - 1; i >= 0; i--) {
if (_observers[key][i] == callback) {
_observers[key].splice(i, 1);
}
}
},
/**
* Subscribe to a Publish/Subscribe event stream
*
* @param {String} channel Channel name
* @param {Function} callback Function to run when the something is published to the channel
*/
subscribe: function(channel, callback) {
channel = (channel || '').toString();
if (!channel) {
throw new TypeError('Channel not defined');
}
if (!_pubsub_observers[channel]) {
_pubsub_observers[channel] = [];
}
_pubsub_observers[channel].push(callback);
},
/**
* Publish data to an event stream
*
* @param {String} channel Channel name
* @param {Mixed} payload Payload to deliver
*/
publish: function(channel, payload) {
channel = (channel || '').toString();
if (!channel) {
throw new TypeError('Channel not defined');
}
_publish(channel, payload);
},
/**
* Reloads the data from browser storage
*/
reInit: function() {
_reloadData();
},
/**
* Removes reference from global objects and saves it as jStorage
*
* @param {Boolean} option if needed to save object as simple 'jStorage' in windows context
*/
noConflict: function(saveInGlobal) {
delete window.$.jStorage;
if (saveInGlobal) {
window.jStorage = this;
}
return this;
}
};
// Initialize jStorage
_init();
})()` | true | `/*
* ----------------------------- JSTORAGE -------------------------------------
* Simple local storage wrapper to save data on the browser side, supporting
* all major browsers - IE6+, Firefox2+, Safari4+, Chrome4+ and Opera 10.5+
*
* Author: PI:NAME:<NAME>END_PI, PI:EMAIL:<EMAIL>END_PI
* Project homepage: www.jstorage.info
*
* Licensed under Unlicense:
*
* This is free and unencumbered software released into the public domain.
*
* Anyone is free to copy, modify, publish, use, compile, sell, or
* distribute this software, either in source code form or as a compiled
* binary, for any purpose, commercial or non-commercial, and by any
* means.
*
* In jurisdictions that recognize copyright laws, the author or authors
* of this software dedicate any and all copyright interest in the
* software to the public domain. We make this dedication for the benefit
* of the public at large and to the detriment of our heirs and
* successors. We intend this dedication to be an overt act of
* relinquishment in perpetuity of all present and future rights to this
* software under copyright law.
*
* THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
* IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR
* OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
* ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
* OTHER DEALINGS IN THE SOFTWARE.
*
* For more information, please refer to <http://unlicense.org/>
*/
/* global ActiveXObject: false */
/* jshint browser: true */
(function() {
'use strict';
var
/* jStorage version */
JSTORAGE_VERSION = '0.4.12',
/* detect a dollar object or create one if not found */
$ = window.jQuery || window.$ || (window.$ = {}),
/* check for a JSON handling support */
JSON = {
parse: window.JSON && (window.JSON.parse || window.JSON.decode) ||
String.prototype.evalJSON && function(str) {
return String(str).evalJSON();
} ||
$.parseJSON ||
$.evalJSON,
stringify: Object.toJSON ||
window.JSON && (window.JSON.stringify || window.JSON.encode) ||
$.toJSON
};
// Break if no JSON support was found
if (typeof JSON.parse !== 'function' || typeof JSON.stringify !== 'function') {
throw new Error('No JSON support found, include //cdnjs.cloudflare.com/ajax/libs/json2/20110223/json2.js to page');
}
var
/* This is the object, that holds the cached values */
_storage = {
__jstorage_meta: {
CRC32: {}
}
},
/* Actual browser storage (localStorage or globalStorage['domain']) */
_storage_service = {
jStorage: '{}'
},
/* DOM element for older IE versions, holds userData behavior */
_storage_elm = null,
/* How much space does the storage take */
_storage_size = 0,
/* which backend is currently used */
_backend = false,
/* onchange observers */
_observers = {},
/* timeout to wait after onchange event */
_observer_timeout = false,
/* last update time */
_observer_update = 0,
/* pubsub observers */
_pubsub_observers = {},
/* skip published items older than current timestamp */
_pubsub_last = +new Date(),
/* Next check for TTL */
_ttl_timeout,
/**
* XML encoding and decoding as XML nodes can't be JSON'ized
* XML nodes are encoded and decoded if the node is the value to be saved
* but not if it's as a property of another object
* Eg. -
* $.jStorage.set('key', xmlNode); // IS OK
* $.jStorage.set('key', {xml: xmlNode}); // NOT OK
*/
_XMLService = {
/**
* Validates a XML node to be XML
* based on jQuery.isXML function
*/
isXML: function(elm) {
var documentElement = (elm ? elm.ownerDocument || elm : 0).documentElement;
return documentElement ? documentElement.nodeName !== 'HTML' : false;
},
/**
* Encodes a XML node to string
* based on http://www.mercurytide.co.uk/news/article/issues-when-working-ajax/
*/
encode: function(xmlNode) {
if (!this.isXML(xmlNode)) {
return false;
}
try { // Mozilla, Webkit, Opera
return new XMLSerializer().serializeToString(xmlNode);
} catch (E1) {
try { // IE
return xmlNode.xml;
} catch (E2) {}
}
return false;
},
/**
* Decodes a XML node from string
* loosely based on http://outwestmedia.com/jquery-plugins/xmldom/
*/
decode: function(xmlString) {
var dom_parser = ('DOMParser' in window && (new DOMParser()).parseFromString) ||
(window.ActiveXObject && function(_xmlString) {
var xml_doc = new ActiveXObject('Microsoft.XMLDOM');
xml_doc.async = 'false';
xml_doc.loadXML(_xmlString);
return xml_doc;
}),
resultXML;
if (!dom_parser) {
return false;
}
resultXML = dom_parser.call('DOMParser' in window && (new DOMParser()) || window, xmlString, 'text/xml');
return this.isXML(resultXML) ? resultXML : false;
}
};
////////////////////////// PRIVATE METHODS ////////////////////////
/**
* Initialization function. Detects if the browser supports DOM Storage
* or userData behavior and behaves accordingly.
*/
function _init() {
/* Check if browser supports localStorage */
var localStorageReallyWorks = false;
if ('localStorage' in window) {
try {
window.localStorage.setItem('_tmptest', 'tmpval');
localStorageReallyWorks = true;
window.localStorage.removeItem('_tmptest');
} catch (BogusQuotaExceededErrorOnIos5) {
// Thanks be to iOS5 Private Browsing mode which throws
// QUOTA_EXCEEDED_ERRROR DOM Exception 22.
}
}
if (localStorageReallyWorks) {
try {
if (window.localStorage) {
_storage_service = window.localStorage;
_backend = 'localStorage';
_observer_update = _storage_service.jStorage_update;
}
} catch (E3) { /* Firefox fails when touching localStorage and cookies are disabled */ }
}
/* Check if browser supports globalStorage */
else if ('globalStorage' in window) {
try {
if (window.globalStorage) {
if (window.location.hostname == 'localhost') {
_storage_service = window.globalStorage['localhost.localdomain'];
} else {
_storage_service = window.globalStorage[window.location.hostname];
}
_backend = 'globalStorage';
_observer_update = _storage_service.jStorage_update;
}
} catch (E4) { /* Firefox fails when touching localStorage and cookies are disabled */ }
}
/* Check if browser supports userData behavior */
else {
_storage_elm = document.createElement('link');
if (_storage_elm.addBehavior) {
/* Use a DOM element to act as userData storage */
_storage_elm.style.behavior = 'url(#default#userData)';
/* userData element needs to be inserted into the DOM! */
document.getElementsByTagName('head')[0].appendChild(_storage_elm);
try {
_storage_elm.load('jStorage');
} catch (E) {
// try to reset cache
_storage_elm.setAttribute('jStorage', '{}');
_storage_elm.save('jStorage');
_storage_elm.load('jStorage');
}
var data = '{}';
try {
data = _storage_elm.getAttribute('jStorage');
} catch (E5) {}
try {
_observer_update = _storage_elm.getAttribute('jStorage_update');
} catch (E6) {}
_storage_service.jStorage = data;
_backend = 'userDataBehavior';
} else {
_storage_elm = null;
return;
}
}
// Load data from storage
_load_storage();
// remove dead keys
_handleTTL();
// start listening for changes
_setupObserver();
// initialize publish-subscribe service
_handlePubSub();
// handle cached navigation
if ('addEventListener' in window) {
window.addEventListener('pageshow', function(event) {
if (event.persisted) {
_storageObserver();
}
}, false);
}
}
/**
* Reload data from storage when needed
*/
function _reloadData() {
var data = '{}';
if (_backend == 'userDataBehavior') {
_storage_elm.load('jStorage');
try {
data = _storage_elm.getAttribute('jStorage');
} catch (E5) {}
try {
_observer_update = _storage_elm.getAttribute('jStorage_update');
} catch (E6) {}
_storage_service.jStorage = data;
}
_load_storage();
// remove dead keys
_handleTTL();
_handlePubSub();
}
/**
* Sets up a storage change observer
*/
function _setupObserver() {
if (_backend == 'localStorage' || _backend == 'globalStorage') {
if ('addEventListener' in window) {
window.addEventListener('storage', _storageObserver, false);
} else {
document.attachEvent('onstorage', _storageObserver);
}
} else if (_backend == 'userDataBehavior') {
setInterval(_storageObserver, 1000);
}
}
/**
* Fired on any kind of data change, needs to check if anything has
* really been changed
*/
function _storageObserver() {
var updateTime;
// cumulate change notifications with timeout
clearTimeout(_observer_timeout);
_observer_timeout = setTimeout(function() {
if (_backend == 'localStorage' || _backend == 'globalStorage') {
updateTime = _storage_service.jStorage_update;
} else if (_backend == 'userDataBehavior') {
_storage_elm.load('jStorage');
try {
updateTime = _storage_elm.getAttribute('jStorage_update');
} catch (E5) {}
}
if (updateTime && updateTime != _observer_update) {
_observer_update = updateTime;
_checkUpdatedKeys();
}
}, 25);
}
/**
* Reloads the data and checks if any keys are changed
*/
function _checkUpdatedKeys() {
var oldCrc32List = JSON.parse(JSON.stringify(_storage.__jstorage_meta.CRC32)),
newCrc32List;
_reloadData();
newCrc32List = JSON.parse(JSON.stringify(_storage.__jstorage_meta.CRC32));
var key,
updated = [],
removed = [];
for (key in oldCrc32List) {
if (oldCrc32List.hasOwnProperty(key)) {
if (!newCrc32List[key]) {
removed.push(key);
continue;
}
if (oldCrc32List[key] != newCrc32List[key] && String(oldCrc32List[key]).substr(0, 2) == '2.') {
updated.push(key);
}
}
}
for (key in newCrc32List) {
if (newCrc32List.hasOwnProperty(key)) {
if (!oldCrc32List[key]) {
updated.push(key);
}
}
}
_fireObservers(updated, 'updated');
_fireObservers(removed, 'deleted');
}
/**
* Fires observers for updated keys
*
* @param {Array|String} keys Array of key names or a key
* @param {String} action What happened with the value (updated, deleted, flushed)
*/
function _fireObservers(keys, action) {
keys = [].concat(keys || []);
var i, j, len, jlen;
if (action == 'flushed') {
keys = [];
for (var key in _observers) {
if (_observers.hasOwnProperty(key)) {
keys.push(key);
}
}
action = 'deleted';
}
for (i = 0, len = keys.length; i < len; i++) {
if (_observers[keys[i]]) {
for (j = 0, jlen = _observers[keys[i]].length; j < jlen; j++) {
_observers[keys[i]][j](keys[i], action);
}
}
if (_observers['*']) {
for (j = 0, jlen = _observers['*'].length; j < jlen; j++) {
_observers['*'][j](keys[i], action);
}
}
}
}
/**
* Publishes key change to listeners
*/
function _publishChange() {
var updateTime = (+new Date()).toString();
if (_backend == 'localStorage' || _backend == 'globalStorage') {
try {
_storage_service.jStorage_update = updateTime;
} catch (E8) {
// safari private mode has been enabled after the jStorage initialization
_backend = false;
}
} else if (_backend == 'userDataBehavior') {
_storage_elm.setAttribute('jStorage_update', updateTime);
_storage_elm.save('jStorage');
}
_storageObserver();
}
/**
* Loads the data from the storage based on the supported mechanism
*/
function _load_storage() {
/* if jStorage string is retrieved, then decode it */
if (_storage_service.jStorage) {
try {
_storage = JSON.parse(String(_storage_service.jStorage));
} catch (E6) {
_storage_service.jStorage = '{}';
}
} else {
_storage_service.jStorage = '{}';
}
_storage_size = _storage_service.jStorage ? String(_storage_service.jStorage).length : 0;
if (!_storage.__jstorage_meta) {
_storage.__jstorage_meta = {};
}
if (!_storage.__jstorage_meta.CRC32) {
_storage.__jstorage_meta.CRC32 = {};
}
}
/**
* This functions provides the 'save' mechanism to store the jStorage object
*/
function _save() {
_dropOldEvents(); // remove expired events
try {
_storage_service.jStorage = JSON.stringify(_storage);
// If userData is used as the storage engine, additional
if (_storage_elm) {
_storage_elm.setAttribute('jStorage', _storage_service.jStorage);
_storage_elm.save('jStorage');
}
_storage_size = _storage_service.jStorage ? String(_storage_service.jStorage).length : 0;
} catch (E7) { /* probably cache is full, nothing is saved this way*/ }
}
/**
* Function checks if a key is set and is string or numberic
*
* @param {String} key Key name
*/
function _checkKey(key) {
if (typeof key != 'string' && typeof key != 'number') {
throw new TypeError('Key name must be string or numeric');
}
if (key == PI:KEY:<KEY>END_PI') {
throw new TypeError('Reserved key name');
}
return true;
}
/**
* Removes expired keys
*/
function _handleTTL() {
var curtime, i, TTL, CRC32, nextExpire = Infinity,
changed = false,
deleted = [];
clearTimeout(_ttl_timeout);
if (!_storage.__jstorage_meta || typeof _storage.__jstorage_meta.TTL != 'object') {
// nothing to do here
return;
}
curtime = +new Date();
TTL = _storage.__jstorage_meta.TTL;
CRC32 = _storage.__jstorage_meta.CRC32;
for (i in TTL) {
if (TTL.hasOwnProperty(i)) {
if (TTL[i] <= curtime) {
delete TTL[i];
delete CRC32[i];
delete _storage[i];
changed = true;
deleted.push(i);
} else if (TTL[i] < nextExpire) {
nextExpire = TTL[i];
}
}
}
// set next check
if (nextExpire != Infinity) {
_ttl_timeout = setTimeout(_handleTTL, Math.min(nextExpire - curtime, 0x7FFFFFFF));
}
// save changes
if (changed) {
_save();
_publishChange();
_fireObservers(deleted, 'deleted');
}
}
/**
* Checks if there's any events on hold to be fired to listeners
*/
function _handlePubSub() {
var i, len;
if (!_storage.__jstorage_meta.PubSub) {
return;
}
var pubelm,
_pubsubCurrent = _pubsub_last,
needFired = [];
for (i = len = _storage.__jstorage_meta.PubSub.length - 1; i >= 0; i--) {
pubelm = _storage.__jstorage_meta.PubSub[i];
if (pubelm[0] > _pubsub_last) {
_pubsubCurrent = pubelm[0];
needFired.unshift(pubelm);
}
}
for (i = needFired.length - 1; i >= 0; i--) {
_fireSubscribers(needFired[i][1], needFired[i][2]);
}
_pubsub_last = _pubsubCurrent;
}
/**
* Fires all subscriber listeners for a pubsub channel
*
* @param {String} channel Channel name
* @param {Mixed} payload Payload data to deliver
*/
function _fireSubscribers(channel, payload) {
if (_pubsub_observers[channel]) {
for (var i = 0, len = _pubsub_observers[channel].length; i < len; i++) {
// send immutable data that can't be modified by listeners
try {
_pubsub_observers[channel][i](channel, JSON.parse(JSON.stringify(payload)));
} catch (E) {}
}
}
}
/**
* Remove old events from the publish stream (at least 2sec old)
*/
function _dropOldEvents() {
if (!_storage.__jstorage_meta.PubSub) {
return;
}
var retire = +new Date() - 2000;
for (var i = 0, len = _storage.__jstorage_meta.PubSub.length; i < len; i++) {
if (_storage.__jstorage_meta.PubSub[i][0] <= retire) {
// deleteCount is needed for IE6
_storage.__jstorage_meta.PubSub.splice(i, _storage.__jstorage_meta.PubSub.length - i);
break;
}
}
if (!_storage.__jstorage_meta.PubSub.length) {
delete _storage.__jstorage_meta.PubSub;
}
}
/**
* Publish payload to a channel
*
* @param {String} channel Channel name
* @param {Mixed} payload Payload to send to the subscribers
*/
function _publish(channel, payload) {
if (!_storage.__jstorage_meta) {
_storage.__jstorage_meta = {};
}
if (!_storage.__jstorage_meta.PubSub) {
_storage.__jstorage_meta.PubSub = [];
}
_storage.__jstorage_meta.PubSub.unshift([+new Date(), channel, payload]);
_save();
_publishChange();
}
/**
* JS Implementation of MurmurHash2
*
* SOURCE: https://github.com/garycourt/murmurhash-js (MIT licensed)
*
* @author <a href='mailto:PI:EMAIL:<EMAIL>END_PI'>PI:NAME:<NAME>END_PI</a>
* @see http://github.com/garycourt/murmurhash-js
* @author <a href='mailto:PI:EMAIL:<EMAIL>END_PI'>PI:NAME:<NAME>END_PI</a>
* @see http://sites.google.com/site/murmurhash/
*
* @param {string} str ASCII only
* @param {number} seed Positive integer only
* @return {number} 32-bit positive integer hash
*/
function murmurhash2_32_gc(str, seed) {
var
l = str.length,
h = seed ^ l,
i = 0,
k;
while (l >= 4) {
k =
((str.charCodeAt(i) & 0xff)) |
((str.charCodeAt(++i) & 0xff) << 8) |
((str.charCodeAt(++i) & 0xff) << 16) |
((str.charCodeAt(++i) & 0xff) << 24);
k = (((k & 0xffff) * 0x5bd1e995) + ((((k >>> 16) * 0x5bd1e995) & 0xffff) << 16));
k ^= k >>> 24;
k = (((k & 0xffff) * 0x5bd1e995) + ((((k >>> 16) * 0x5bd1e995) & 0xffff) << 16));
h = (((h & 0xffff) * 0x5bd1e995) + ((((h >>> 16) * 0x5bd1e995) & 0xffff) << 16)) ^ k;
l -= 4;
++i;
}
switch (l) {
case 3:
h ^= (str.charCodeAt(i + 2) & 0xff) << 16;
/* falls through */
case 2:
h ^= (str.charCodeAt(i + 1) & 0xff) << 8;
/* falls through */
case 1:
h ^= (str.charCodeAt(i) & 0xff);
h = (((h & 0xffff) * 0x5bd1e995) + ((((h >>> 16) * 0x5bd1e995) & 0xffff) << 16));
}
h ^= h >>> 13;
h = (((h & 0xffff) * 0x5bd1e995) + ((((h >>> 16) * 0x5bd1e995) & 0xffff) << 16));
h ^= h >>> 15;
return h >>> 0;
}
////////////////////////// PUBLIC INTERFACE /////////////////////////
$.jStorage = {
/* Version number */
version: JSTORAGE_VERSION,
/**
* Sets a key's value.
*
* @param {String} key Key to set. If this value is not set or not
* a string an exception is raised.
* @param {Mixed} value Value to set. This can be any value that is JSON
* compatible (Numbers, Strings, Objects etc.).
* @param {Object} [options] - possible options to use
* @param {Number} [options.TTL] - optional TTL value, in milliseconds
* @return {Mixed} the used value
*/
set: function(key, value, options) {
_checkKey(key);
options = options || {};
// undefined values are deleted automatically
if (typeof value == 'undefined') {
this.deleteKey(key);
return value;
}
if (_XMLService.isXML(value)) {
value = {
_is_xml: true,
xml: _XMLService.encode(value)
};
} else if (typeof value == 'function') {
return undefined; // functions can't be saved!
} else if (value && typeof value == 'object') {
// clone the object before saving to _storage tree
value = JSON.parse(JSON.stringify(value));
}
_storage[key] = value;
_storage.__jstorage_meta.CRC32[key] = '2.' + murmurhash2_32_gc(JSON.stringify(value), 0x9747b28c);
this.setTTL(key, options.TTL || 0); // also handles saving and _publishChange
_fireObservers(key, 'updated');
return value;
},
/**
* Looks up a key in cache
*
* @param {String} key - Key to look up.
* @param {mixed} def - Default value to return, if key didn't exist.
* @return {Mixed} the key value, default value or null
*/
get: function(key, def) {
_checkKey(key);
if (key in _storage) {
if (_storage[key] && typeof _storage[key] == 'object' && _storage[key]._is_xml) {
return _XMLService.decode(_storage[key].xml);
} else {
return _storage[key];
}
}
return typeof(def) == 'undefined' ? null : def;
},
/**
* Deletes a key from cache.
*
* @param {String} key - Key to delete.
* @return {Boolean} true if key existed or false if it didn't
*/
deleteKey: function(key) {
_checkKey(key);
if (key in _storage) {
delete _storage[key];
// remove from TTL list
if (typeof _storage.__jstorage_meta.TTL == 'object' &&
key in _storage.__jstorage_meta.TTL) {
delete _storage.__jstorage_meta.TTL[key];
}
delete _storage.__jstorage_meta.CRC32[key];
_save();
_publishChange();
_fireObservers(key, 'deleted');
return true;
}
return false;
},
/**
* Sets a TTL for a key, or remove it if ttl value is 0 or below
*
* @param {String} key - key to set the TTL for
* @param {Number} ttl - TTL timeout in milliseconds
* @return {Boolean} true if key existed or false if it didn't
*/
setTTL: function(key, ttl) {
var curtime = +new Date();
_checkKey(key);
ttl = Number(ttl) || 0;
if (key in _storage) {
if (!_storage.__jstorage_meta.TTL) {
_storage.__jstorage_meta.TTL = {};
}
// Set TTL value for the key
if (ttl > 0) {
_storage.__jstorage_meta.TTL[key] = curtime + ttl;
} else {
delete _storage.__jstorage_meta.TTL[key];
}
_save();
_handleTTL();
_publishChange();
return true;
}
return false;
},
/**
* Gets remaining TTL (in milliseconds) for a key or 0 when no TTL has been set
*
* @param {String} key Key to check
* @return {Number} Remaining TTL in milliseconds
*/
getTTL: function(key) {
var curtime = +new Date(),
ttl;
_checkKey(key);
if (key in _storage && _storage.__jstorage_meta.TTL && _storage.__jstorage_meta.TTL[key]) {
ttl = _storage.__jstorage_meta.TTL[key] - curtime;
return ttl || 0;
}
return 0;
},
/**
* Deletes everything in cache.
*
* @return {Boolean} Always true
*/
flush: function() {
_storage = {
__jstorage_meta: {
CRC32: {}
}
};
_save();
_publishChange();
_fireObservers(null, 'flushed');
return true;
},
/**
* Returns a read-only copy of _storage
*
* @return {Object} Read-only copy of _storage
*/
storageObj: function() {
function F() {}
F.prototype = _storage;
return new F();
},
/**
* Returns an index of all used keys as an array
* ['key1', 'key2',..'keyN']
*
* @return {Array} Used keys
*/
index: function() {
var index = [],
i;
for (i in _storage) {
if (_storage.hasOwnProperty(i) && i != '__jstorage_meta') {
index.push(i);
}
}
return index;
},
/**
* How much space in bytes does the storage take?
*
* @return {Number} Storage size in chars (not the same as in bytes,
* since some chars may take several bytes)
*/
storageSize: function() {
return _storage_size;
},
/**
* Which backend is currently in use?
*
* @return {String} Backend name
*/
currentBackend: function() {
return _backend;
},
/**
* Test if storage is available
*
* @return {Boolean} True if storage can be used
*/
storageAvailable: function() {
return !!_backend;
},
/**
* Register change listeners
*
* @param {String} key Key name
* @param {Function} callback Function to run when the key changes
*/
listenKeyChange: function(key, callback) {
_checkKey(key);
if (!_observers[key]) {
_observers[key] = [];
}
_observers[key].push(callback);
},
/**
* Remove change listeners
*
* @param {String} key Key name to unregister listeners against
* @param {Function} [callback] If set, unregister the callback, if not - unregister all
*/
stopListening: function(key, callback) {
_checkKey(key);
if (!_observers[key]) {
return;
}
if (!callback) {
delete _observers[key];
return;
}
for (var i = _observers[key].length - 1; i >= 0; i--) {
if (_observers[key][i] == callback) {
_observers[key].splice(i, 1);
}
}
},
/**
* Subscribe to a Publish/Subscribe event stream
*
* @param {String} channel Channel name
* @param {Function} callback Function to run when the something is published to the channel
*/
subscribe: function(channel, callback) {
channel = (channel || '').toString();
if (!channel) {
throw new TypeError('Channel not defined');
}
if (!_pubsub_observers[channel]) {
_pubsub_observers[channel] = [];
}
_pubsub_observers[channel].push(callback);
},
/**
* Publish data to an event stream
*
* @param {String} channel Channel name
* @param {Mixed} payload Payload to deliver
*/
publish: function(channel, payload) {
channel = (channel || '').toString();
if (!channel) {
throw new TypeError('Channel not defined');
}
_publish(channel, payload);
},
/**
* Reloads the data from browser storage
*/
reInit: function() {
_reloadData();
},
/**
* Removes reference from global objects and saves it as jStorage
*
* @param {Boolean} option if needed to save object as simple 'jStorage' in windows context
*/
noConflict: function(saveInGlobal) {
delete window.$.jStorage;
if (saveInGlobal) {
window.jStorage = this;
}
return this;
}
};
// Initialize jStorage
_init();
})()` |
[
{
"context": " implementation of Ruby's string.succ method.\n# By Devon Govett\n#\n# Returns the successor to str. The successor i",
"end": 71,
"score": 0.9998854398727417,
"start": 59,
"tag": "NAME",
"value": "Devon Govett"
}
] | lib/font/utils.coffee | sergeyt/pdfkit | 5 | ###
# An implementation of Ruby's string.succ method.
# By Devon Govett
#
# Returns the successor to str. The successor is calculated by incrementing characters starting
# from the rightmost alphanumeric (or the rightmost character if there are no alphanumerics) in the
# string. Incrementing a digit always results in another digit, and incrementing a letter results in
# another letter of the same case.
#
# If the increment generates a carry, the character to the left of it is incremented. This
# process repeats until there is no carry, adding an additional character if necessary.
#
# succ("abcd") == "abce"
# succ("THX1138") == "THX1139"
# succ("<<koala>>") == "<<koalb>>"
# succ("1999zzz") == "2000aaa"
# succ("ZZZ9999") == "AAAA0000"
###
exports.successorOf = (input) ->
alphabet = 'abcdefghijklmnopqrstuvwxyz'
length = alphabet.length
result = input
i = input.length
while i >= 0
last = input.charAt(--i)
if isNaN(last)
index = alphabet.indexOf(last.toLowerCase())
if index is -1
next = last
carry = true
else
next = alphabet.charAt((index + 1) % length)
isUpperCase = last is last.toUpperCase()
if isUpperCase
next = next.toUpperCase()
carry = index + 1 >= length
if carry and i is 0
added = if isUpperCase then 'A' else 'a'
result = added + next + result.slice(1)
break
else
next = +last + 1
carry = next > 9
next = 0 if carry
if carry and i is 0
result = '1' + next + result.slice(1)
break
result = result.slice(0, i) + next + result.slice(i + 1)
break unless carry
return result
# Swaps the properties and values of an object and returns the result
exports.invert = (object) ->
ret = {}
for key, val of object
ret[val] = key
return ret | 115138 | ###
# An implementation of Ruby's string.succ method.
# By <NAME>
#
# Returns the successor to str. The successor is calculated by incrementing characters starting
# from the rightmost alphanumeric (or the rightmost character if there are no alphanumerics) in the
# string. Incrementing a digit always results in another digit, and incrementing a letter results in
# another letter of the same case.
#
# If the increment generates a carry, the character to the left of it is incremented. This
# process repeats until there is no carry, adding an additional character if necessary.
#
# succ("abcd") == "abce"
# succ("THX1138") == "THX1139"
# succ("<<koala>>") == "<<koalb>>"
# succ("1999zzz") == "2000aaa"
# succ("ZZZ9999") == "AAAA0000"
###
exports.successorOf = (input) ->
alphabet = 'abcdefghijklmnopqrstuvwxyz'
length = alphabet.length
result = input
i = input.length
while i >= 0
last = input.charAt(--i)
if isNaN(last)
index = alphabet.indexOf(last.toLowerCase())
if index is -1
next = last
carry = true
else
next = alphabet.charAt((index + 1) % length)
isUpperCase = last is last.toUpperCase()
if isUpperCase
next = next.toUpperCase()
carry = index + 1 >= length
if carry and i is 0
added = if isUpperCase then 'A' else 'a'
result = added + next + result.slice(1)
break
else
next = +last + 1
carry = next > 9
next = 0 if carry
if carry and i is 0
result = '1' + next + result.slice(1)
break
result = result.slice(0, i) + next + result.slice(i + 1)
break unless carry
return result
# Swaps the properties and values of an object and returns the result
exports.invert = (object) ->
ret = {}
for key, val of object
ret[val] = key
return ret | true | ###
# An implementation of Ruby's string.succ method.
# By PI:NAME:<NAME>END_PI
#
# Returns the successor to str. The successor is calculated by incrementing characters starting
# from the rightmost alphanumeric (or the rightmost character if there are no alphanumerics) in the
# string. Incrementing a digit always results in another digit, and incrementing a letter results in
# another letter of the same case.
#
# If the increment generates a carry, the character to the left of it is incremented. This
# process repeats until there is no carry, adding an additional character if necessary.
#
# succ("abcd") == "abce"
# succ("THX1138") == "THX1139"
# succ("<<koala>>") == "<<koalb>>"
# succ("1999zzz") == "2000aaa"
# succ("ZZZ9999") == "AAAA0000"
###
exports.successorOf = (input) ->
alphabet = 'abcdefghijklmnopqrstuvwxyz'
length = alphabet.length
result = input
i = input.length
while i >= 0
last = input.charAt(--i)
if isNaN(last)
index = alphabet.indexOf(last.toLowerCase())
if index is -1
next = last
carry = true
else
next = alphabet.charAt((index + 1) % length)
isUpperCase = last is last.toUpperCase()
if isUpperCase
next = next.toUpperCase()
carry = index + 1 >= length
if carry and i is 0
added = if isUpperCase then 'A' else 'a'
result = added + next + result.slice(1)
break
else
next = +last + 1
carry = next > 9
next = 0 if carry
if carry and i is 0
result = '1' + next + result.slice(1)
break
result = result.slice(0, i) + next + result.slice(i + 1)
break unless carry
return result
# Swaps the properties and values of an object and returns the result
exports.invert = (object) ->
ret = {}
for key, val of object
ret[val] = key
return ret |
[
{
"context": "\nconcatする際のファイルの順番を指定する\n@create 2016-06-23\n@author KoutarouYabe <idolm@ster.pw>\n###\n\nmodule.exports =\n cssFilesT",
"end": 87,
"score": 0.9998893737792969,
"start": 75,
"tag": "NAME",
"value": "KoutarouYabe"
},
{
"context": "の順番を指定する\n@create 2016-06-23\n@author ... | tasks/pipeline.coffee | ky0615/atc_tram | 0 | ###
Gulp configure file
concatする際のファイルの順番を指定する
@create 2016-06-23
@author KoutarouYabe <idolm@ster.pw>
###
module.exports =
cssFilesToInject: [
"lib/**/*.css"
"**/*.css"
].map (path)-> "www/css/" + path
jsFilesToInject: [
"lib/angular.js"
"lib/**/*.js"
"application.js"
"**/*.js"
].map (path)-> "www/js/" + path
templateFilesToInject: [
"assets/**/*.html"
"!assets/index.html"
]
dependenciesFilesToInject: [
"angular/angular.js"
"angular-animate/angular-animate.js"
"angular-aria/angular-aria.js"
"angular-bootstrap/ui-bootstrap.js"
"angular-bootstrap/ui-bootstrap-tpls.js"
"angular-material/angular-material.css"
"angular-material/angular-material.js"
"angular-messages/angular-messages.min.js"
"angular-mocks/angular-mocks.js"
"angular-resource/angular-resource.js"
"angular-sanitize/angular-sanitize.js"
"angular-ui-router/release/angular-ui-router.js"
# "bootstrap/dist/css/bootstrap.css"
# "bootstrap/dist/css/bootstrap-theme.css"
"bootstrap/dist/fonts/glyphicons-halflings-regular.eot"
"bootstrap/dist/fonts/glyphicons-halflings-regular.svg"
"bootstrap/dist/fonts/glyphicons-halflings-regular.ttf"
"bootstrap/dist/fonts/glyphicons-halflings-regular.woff"
"bootstrap/dist/fonts/glyphicons-halflings-regular.woff"
"jquery/dist/jquery.js"
"firebase/firebase.js"
].map (path)-> require("path").resolve __dirname, "../node_modules/", path
| 49335 | ###
Gulp configure file
concatする際のファイルの順番を指定する
@create 2016-06-23
@author <NAME> <<EMAIL>>
###
module.exports =
cssFilesToInject: [
"lib/**/*.css"
"**/*.css"
].map (path)-> "www/css/" + path
jsFilesToInject: [
"lib/angular.js"
"lib/**/*.js"
"application.js"
"**/*.js"
].map (path)-> "www/js/" + path
templateFilesToInject: [
"assets/**/*.html"
"!assets/index.html"
]
dependenciesFilesToInject: [
"angular/angular.js"
"angular-animate/angular-animate.js"
"angular-aria/angular-aria.js"
"angular-bootstrap/ui-bootstrap.js"
"angular-bootstrap/ui-bootstrap-tpls.js"
"angular-material/angular-material.css"
"angular-material/angular-material.js"
"angular-messages/angular-messages.min.js"
"angular-mocks/angular-mocks.js"
"angular-resource/angular-resource.js"
"angular-sanitize/angular-sanitize.js"
"angular-ui-router/release/angular-ui-router.js"
# "bootstrap/dist/css/bootstrap.css"
# "bootstrap/dist/css/bootstrap-theme.css"
"bootstrap/dist/fonts/glyphicons-halflings-regular.eot"
"bootstrap/dist/fonts/glyphicons-halflings-regular.svg"
"bootstrap/dist/fonts/glyphicons-halflings-regular.ttf"
"bootstrap/dist/fonts/glyphicons-halflings-regular.woff"
"bootstrap/dist/fonts/glyphicons-halflings-regular.woff"
"jquery/dist/jquery.js"
"firebase/firebase.js"
].map (path)-> require("path").resolve __dirname, "../node_modules/", path
| true | ###
Gulp configure file
concatする際のファイルの順番を指定する
@create 2016-06-23
@author PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI>
###
module.exports =
cssFilesToInject: [
"lib/**/*.css"
"**/*.css"
].map (path)-> "www/css/" + path
jsFilesToInject: [
"lib/angular.js"
"lib/**/*.js"
"application.js"
"**/*.js"
].map (path)-> "www/js/" + path
templateFilesToInject: [
"assets/**/*.html"
"!assets/index.html"
]
dependenciesFilesToInject: [
"angular/angular.js"
"angular-animate/angular-animate.js"
"angular-aria/angular-aria.js"
"angular-bootstrap/ui-bootstrap.js"
"angular-bootstrap/ui-bootstrap-tpls.js"
"angular-material/angular-material.css"
"angular-material/angular-material.js"
"angular-messages/angular-messages.min.js"
"angular-mocks/angular-mocks.js"
"angular-resource/angular-resource.js"
"angular-sanitize/angular-sanitize.js"
"angular-ui-router/release/angular-ui-router.js"
# "bootstrap/dist/css/bootstrap.css"
# "bootstrap/dist/css/bootstrap-theme.css"
"bootstrap/dist/fonts/glyphicons-halflings-regular.eot"
"bootstrap/dist/fonts/glyphicons-halflings-regular.svg"
"bootstrap/dist/fonts/glyphicons-halflings-regular.ttf"
"bootstrap/dist/fonts/glyphicons-halflings-regular.woff"
"bootstrap/dist/fonts/glyphicons-halflings-regular.woff"
"jquery/dist/jquery.js"
"firebase/firebase.js"
].map (path)-> require("path").resolve __dirname, "../node_modules/", path
|
[
{
"context": " destroy)\nasync.series([\n -> dyn.senders.create(\"foo@bars.com\").then (x) ->\n log.info 'RESULT', \"created se",
"end": 354,
"score": 0.9999256134033203,
"start": 342,
"tag": "EMAIL",
"value": "foo@bars.com"
},
{
"context": "ON.stringify(x)}\"\n x\n -> dyn.... | examples/messaging_example.coffee | dyninc/dyn-js | 6 |
Dyn = require '../lib/dyn-js'
async = require 'async-q'
log = require 'npmlog'
dynClient = Dyn({messaging:{apikey:"yourapikey"}})
dynClient.log.level = 'silly'
dyn = dynClient.messaging
fail = (bad) -> console.log 'FAIL', arguments
# senders: list, create, update, details, status, dkim, destroy)
async.series([
-> dyn.senders.create("foo@bars.com").then (x) ->
log.info 'RESULT', "created sender: #{JSON.stringify(x)}"
x
-> dyn.senders.list().then (x) ->
log.info 'RESULT', "got senders: #{JSON.stringify(x)}"
x
-> dyn.senders.update("foo@bars.com", 3).then (x) ->
log.info 'RESULT', "updated sender: #{JSON.stringify(x)}"
x
-> dyn.senders.status("foo@bars.com").then (x) ->
log.info 'RESULT', "got sender status: #{JSON.stringify(x)}"
x
-> dyn.senders.details("foo@bars.com").then (x) ->
log.info 'RESULT', "got sender detail: #{JSON.stringify(x)}"
x
-> dyn.senders.dkim("foo@bars.com", "testdkim").then (x) ->
log.info 'RESULT', "set sender dkim: #{JSON.stringify(x)}"
x
-> dyn.senders.destroy("foo@bars.com").then (x) ->
log.info 'RESULT', "deleted sender: #{JSON.stringify(x)}"
x
]).then ->
_(arguments[0]).forEach (x) ->
log.info 'RESULT', "finished : #{JSON.stringify(x)}"
, fail
# accounts: list, create, destroy, list xheaders, update xheaders
async.series([
-> dyn.accounts.create("example@foo.com", "secret", "bar", "1234567890").then (x) ->
log.info 'RESULT', "created account: #{JSON.stringify(x)}"
x
-> dyn.accounts.list().then (x) ->
log.info 'RESULT', "got accounts: #{JSON.stringify(x)}"
x
-> dyn.accounts.list_xheaders().then (x) ->
log.info 'RESULT', "got xheaders: #{JSON.stringify(x)}"
x
-> dyn.accounts.update_xheaders("X-Test1", "X-AnotherTest2", "X-Testing3", "X-FullyTested4").then (x) ->
log.info 'RESULT', "updated xheaders: #{JSON.stringify(x)}"
x
-> dyn.accounts.destroy("example@foo.com").then (x) ->
log.info 'RESULT', "deleted account: #{JSON.stringify(x)}"
x
]).then ->
_(arguments[0]).forEach (x) ->
log.info 'RESULT', "finished : #{JSON.stringify(x)}"
, fail
# recipients: activate, status
async.series([
-> dyn.recipients.activate("foo@bars.com").then (x) ->
log.info 'RESULT', "activated recipient: #{JSON.stringify(x)}"
x
-> dyn.recipients.status("foo@bars.com").then (x) ->
log.info 'RESULT', "got status of recipient: #{JSON.stringify(x)}"
x
]).then ->
_(arguments[0]).forEach (x) ->
log.info 'RESULT', "finished : #{JSON.stringify(x)}"
, fail
# send mail: create
async.series([
-> dyn.send_mail.create("foo@bars.com", "recipient@destination.com", "hello, new js api", "it works!").then (x) ->
log.info 'RESULT', "sent mail: #{JSON.stringify(x)}"
x
]).then ->
_(arguments[0]).forEach (x) ->
log.info 'RESULT', "finished : #{JSON.stringify(x)}"
, fail
# suppressions: list, create, activate, count
async.series([
-> dyn.suppressions.create("foos@bars.com").then (x) ->
log.info 'RESULT', "suppressed: #{JSON.stringify(x)}"
x
-> dyn.suppressions.list().then (x) ->
log.info 'RESULT', "got suppressions: #{JSON.stringify(x)}"
x
-> dyn.suppressions.activate("foos@bars.com").then (x) ->
log.info 'RESULT', "activated suppression: #{JSON.stringify(x)}"
x
-> dyn.suppressions.activate("foos@bars.com").then (x) ->
log.info 'RESULT', "activated suppression: #{JSON.stringify(x)}"
x
-> dyn.suppressions.count().then (x) ->
log.info 'RESULT', "suppression count: #{JSON.stringify(x)}"
x
]).then ->
_(arguments[0]).forEach (x) ->
log.info 'RESULT', "finished : #{JSON.stringify(x)}"
, fail
# sent: list, count
async.series([
-> dyn.sent_mail.list().then (x) ->
log.info 'RESULT', "got sent list: #{JSON.stringify(x)}"
x
-> dyn.sent_mail.count().then (x) ->
log.info 'RESULT', "got sent count: #{JSON.stringify(x)}"
x
]).then ->
_(arguments[0]).forEach (x) ->
log.info 'RESULT', "finished : #{JSON.stringify(x)}"
, fail
# delivery: list, count
async.series([
-> dyn.delivery.list('2014-01-01', '2014-07-18').then (x) ->
log.info 'RESULT', "got delivery list: #{JSON.stringify(x)}"
x
-> dyn.delivery.count('2014-01-01', '2014-07-18').then (x) ->
log.info 'RESULT', "got delivery count: #{JSON.stringify(x)}"
x
]).then ->
_(arguments[0]).forEach (x) ->
log.info 'RESULT', "finished : #{JSON.stringify(x)}"
, fail
# bounces: list, count
async.series([
-> dyn.bounces.list('2013-11-19', '2014-06-18').then (x) ->
log.info 'RESULT', "got bounces list: #{JSON.stringify(x)}"
x
-> dyn.bounces.count('2013-11-19', '2014-06-18').then (x) ->
log.info 'RESULT', "got bounces count: #{JSON.stringify(x)}"
x
]).then ->
_(arguments[0]).forEach (x) ->
log.info 'RESULT', "finished : #{JSON.stringify(x)}"
, fail
# complaints: list, count
async.series([
-> dyn.complaints.list('2013-11-19', '2014-06-18').then (x) ->
log.info 'RESULT', "got complaints list: #{JSON.stringify(x)}"
x
-> dyn.complaints.count('2013-11-19', '2014-06-18').then (x) ->
log.info 'RESULT', "got complaints count: #{JSON.stringify(x)}"
x
]).then ->
_(arguments[0]).forEach (x) ->
log.info 'RESULT', "finished : #{JSON.stringify(x)}"
, fail
# issues: list, count
async.series([
-> dyn.issues.list('2013-11-19', '2014-06-18').then (x) ->
log.info 'RESULT', "got issues list: #{JSON.stringify(x)}"
x
-> dyn.issues.count('2013-11-19', '2014-06-18').then (x) ->
log.info 'RESULT', "got issues count: #{JSON.stringify(x)}"
x
]).then ->
_(arguments[0]).forEach (x) ->
log.info 'RESULT', "finished : #{JSON.stringify(x)}"
, fail
# opens: list, count, unique, unique count
async.series([
-> dyn.opens.list('2013-11-19', '2014-06-18').then (x) ->
log.info 'RESULT', "got opens list: #{JSON.stringify(x)}"
x
-> dyn.opens.count('2013-11-19', '2014-06-18').then (x) ->
log.info 'RESULT', "got opens count: #{JSON.stringify(x)}"
x
-> dyn.opens.unique('2013-11-19', '2014-06-18').then (x) ->
log.info 'RESULT', "got unique opens list: #{JSON.stringify(x)}"
x
-> dyn.opens.unique_count('2013-11-19', '2014-06-18').then (x) ->
log.info 'RESULT', "got unique opens count: #{JSON.stringify(x)}"
x
]).then ->
_(arguments[0]).forEach (x) ->
log.info 'RESULT', "finished : #{JSON.stringify(x)}"
, fail
# clicks: list, count, unique, unique count
async.series([
-> dyn.clicks.list('2013-11-19', '2014-06-18').then (x) ->
log.info 'RESULT', "got clicks list: #{JSON.stringify(x)}"
x
-> dyn.clicks.count('2013-11-19', '2014-06-18').then (x) ->
log.info 'RESULT', "got clicks count: #{JSON.stringify(x)}"
x
-> dyn.clicks.unique('2013-11-19', '2014-06-18').then (x) ->
log.info 'RESULT', "got unique clicks list: #{JSON.stringify(x)}"
x
-> dyn.clicks.unique_count('2013-11-19', '2014-06-18').then (x) ->
log.info 'RESULT', "got unique clicks count: #{JSON.stringify(x)}"
x
]).then ->
_(arguments[0]).forEach (x) ->
log.info 'RESULT', "finished : #{JSON.stringify(x)}"
, fail
| 223168 |
Dyn = require '../lib/dyn-js'
async = require 'async-q'
log = require 'npmlog'
dynClient = Dyn({messaging:{apikey:"yourapikey"}})
dynClient.log.level = 'silly'
dyn = dynClient.messaging
fail = (bad) -> console.log 'FAIL', arguments
# senders: list, create, update, details, status, dkim, destroy)
async.series([
-> dyn.senders.create("<EMAIL>").then (x) ->
log.info 'RESULT', "created sender: #{JSON.stringify(x)}"
x
-> dyn.senders.list().then (x) ->
log.info 'RESULT', "got senders: #{JSON.stringify(x)}"
x
-> dyn.senders.update("<EMAIL>", 3).then (x) ->
log.info 'RESULT', "updated sender: #{JSON.stringify(x)}"
x
-> dyn.senders.status("<EMAIL>").then (x) ->
log.info 'RESULT', "got sender status: #{JSON.stringify(x)}"
x
-> dyn.senders.details("<EMAIL>").then (x) ->
log.info 'RESULT', "got sender detail: #{JSON.stringify(x)}"
x
-> dyn.senders.dkim("<EMAIL>", "testdkim").then (x) ->
log.info 'RESULT', "set sender dkim: #{JSON.stringify(x)}"
x
-> dyn.senders.destroy("<EMAIL>").then (x) ->
log.info 'RESULT', "deleted sender: #{JSON.stringify(x)}"
x
]).then ->
_(arguments[0]).forEach (x) ->
log.info 'RESULT', "finished : #{JSON.stringify(x)}"
, fail
# accounts: list, create, destroy, list xheaders, update xheaders
async.series([
-> dyn.accounts.create("<EMAIL>", "secret", "bar", "1234567890").then (x) ->
log.info 'RESULT', "created account: #{JSON.stringify(x)}"
x
-> dyn.accounts.list().then (x) ->
log.info 'RESULT', "got accounts: #{JSON.stringify(x)}"
x
-> dyn.accounts.list_xheaders().then (x) ->
log.info 'RESULT', "got xheaders: #{JSON.stringify(x)}"
x
-> dyn.accounts.update_xheaders("X-Test1", "X-AnotherTest2", "X-Testing3", "X-FullyTested4").then (x) ->
log.info 'RESULT', "updated xheaders: #{JSON.stringify(x)}"
x
-> dyn.accounts.destroy("<EMAIL>").then (x) ->
log.info 'RESULT', "deleted account: #{JSON.stringify(x)}"
x
]).then ->
_(arguments[0]).forEach (x) ->
log.info 'RESULT', "finished : #{JSON.stringify(x)}"
, fail
# recipients: activate, status
async.series([
-> dyn.recipients.activate("<EMAIL>").then (x) ->
log.info 'RESULT', "activated recipient: #{JSON.stringify(x)}"
x
-> dyn.recipients.status("<EMAIL>").then (x) ->
log.info 'RESULT', "got status of recipient: #{JSON.stringify(x)}"
x
]).then ->
_(arguments[0]).forEach (x) ->
log.info 'RESULT', "finished : #{JSON.stringify(x)}"
, fail
# send mail: create
async.series([
-> dyn.send_mail.create("<EMAIL>", "<EMAIL>", "hello, new js api", "it works!").then (x) ->
log.info 'RESULT', "sent mail: #{JSON.stringify(x)}"
x
]).then ->
_(arguments[0]).forEach (x) ->
log.info 'RESULT', "finished : #{JSON.stringify(x)}"
, fail
# suppressions: list, create, activate, count
async.series([
-> dyn.suppressions.create("<EMAIL>").then (x) ->
log.info 'RESULT', "suppressed: #{JSON.stringify(x)}"
x
-> dyn.suppressions.list().then (x) ->
log.info 'RESULT', "got suppressions: #{JSON.stringify(x)}"
x
-> dyn.suppressions.activate("<EMAIL>").then (x) ->
log.info 'RESULT', "activated suppression: #{JSON.stringify(x)}"
x
-> dyn.suppressions.activate("<EMAIL>").then (x) ->
log.info 'RESULT', "activated suppression: #{JSON.stringify(x)}"
x
-> dyn.suppressions.count().then (x) ->
log.info 'RESULT', "suppression count: #{JSON.stringify(x)}"
x
]).then ->
_(arguments[0]).forEach (x) ->
log.info 'RESULT', "finished : #{JSON.stringify(x)}"
, fail
# sent: list, count
async.series([
-> dyn.sent_mail.list().then (x) ->
log.info 'RESULT', "got sent list: #{JSON.stringify(x)}"
x
-> dyn.sent_mail.count().then (x) ->
log.info 'RESULT', "got sent count: #{JSON.stringify(x)}"
x
]).then ->
_(arguments[0]).forEach (x) ->
log.info 'RESULT', "finished : #{JSON.stringify(x)}"
, fail
# delivery: list, count
async.series([
-> dyn.delivery.list('2014-01-01', '2014-07-18').then (x) ->
log.info 'RESULT', "got delivery list: #{JSON.stringify(x)}"
x
-> dyn.delivery.count('2014-01-01', '2014-07-18').then (x) ->
log.info 'RESULT', "got delivery count: #{JSON.stringify(x)}"
x
]).then ->
_(arguments[0]).forEach (x) ->
log.info 'RESULT', "finished : #{JSON.stringify(x)}"
, fail
# bounces: list, count
async.series([
-> dyn.bounces.list('2013-11-19', '2014-06-18').then (x) ->
log.info 'RESULT', "got bounces list: #{JSON.stringify(x)}"
x
-> dyn.bounces.count('2013-11-19', '2014-06-18').then (x) ->
log.info 'RESULT', "got bounces count: #{JSON.stringify(x)}"
x
]).then ->
_(arguments[0]).forEach (x) ->
log.info 'RESULT', "finished : #{JSON.stringify(x)}"
, fail
# complaints: list, count
async.series([
-> dyn.complaints.list('2013-11-19', '2014-06-18').then (x) ->
log.info 'RESULT', "got complaints list: #{JSON.stringify(x)}"
x
-> dyn.complaints.count('2013-11-19', '2014-06-18').then (x) ->
log.info 'RESULT', "got complaints count: #{JSON.stringify(x)}"
x
]).then ->
_(arguments[0]).forEach (x) ->
log.info 'RESULT', "finished : #{JSON.stringify(x)}"
, fail
# issues: list, count
async.series([
-> dyn.issues.list('2013-11-19', '2014-06-18').then (x) ->
log.info 'RESULT', "got issues list: #{JSON.stringify(x)}"
x
-> dyn.issues.count('2013-11-19', '2014-06-18').then (x) ->
log.info 'RESULT', "got issues count: #{JSON.stringify(x)}"
x
]).then ->
_(arguments[0]).forEach (x) ->
log.info 'RESULT', "finished : #{JSON.stringify(x)}"
, fail
# opens: list, count, unique, unique count
async.series([
-> dyn.opens.list('2013-11-19', '2014-06-18').then (x) ->
log.info 'RESULT', "got opens list: #{JSON.stringify(x)}"
x
-> dyn.opens.count('2013-11-19', '2014-06-18').then (x) ->
log.info 'RESULT', "got opens count: #{JSON.stringify(x)}"
x
-> dyn.opens.unique('2013-11-19', '2014-06-18').then (x) ->
log.info 'RESULT', "got unique opens list: #{JSON.stringify(x)}"
x
-> dyn.opens.unique_count('2013-11-19', '2014-06-18').then (x) ->
log.info 'RESULT', "got unique opens count: #{JSON.stringify(x)}"
x
]).then ->
_(arguments[0]).forEach (x) ->
log.info 'RESULT', "finished : #{JSON.stringify(x)}"
, fail
# clicks: list, count, unique, unique count
async.series([
-> dyn.clicks.list('2013-11-19', '2014-06-18').then (x) ->
log.info 'RESULT', "got clicks list: #{JSON.stringify(x)}"
x
-> dyn.clicks.count('2013-11-19', '2014-06-18').then (x) ->
log.info 'RESULT', "got clicks count: #{JSON.stringify(x)}"
x
-> dyn.clicks.unique('2013-11-19', '2014-06-18').then (x) ->
log.info 'RESULT', "got unique clicks list: #{JSON.stringify(x)}"
x
-> dyn.clicks.unique_count('2013-11-19', '2014-06-18').then (x) ->
log.info 'RESULT', "got unique clicks count: #{JSON.stringify(x)}"
x
]).then ->
_(arguments[0]).forEach (x) ->
log.info 'RESULT', "finished : #{JSON.stringify(x)}"
, fail
| true |
Dyn = require '../lib/dyn-js'
async = require 'async-q'
log = require 'npmlog'
dynClient = Dyn({messaging:{apikey:"yourapikey"}})
dynClient.log.level = 'silly'
dyn = dynClient.messaging
fail = (bad) -> console.log 'FAIL', arguments
# senders: list, create, update, details, status, dkim, destroy)
async.series([
-> dyn.senders.create("PI:EMAIL:<EMAIL>END_PI").then (x) ->
log.info 'RESULT', "created sender: #{JSON.stringify(x)}"
x
-> dyn.senders.list().then (x) ->
log.info 'RESULT', "got senders: #{JSON.stringify(x)}"
x
-> dyn.senders.update("PI:EMAIL:<EMAIL>END_PI", 3).then (x) ->
log.info 'RESULT', "updated sender: #{JSON.stringify(x)}"
x
-> dyn.senders.status("PI:EMAIL:<EMAIL>END_PI").then (x) ->
log.info 'RESULT', "got sender status: #{JSON.stringify(x)}"
x
-> dyn.senders.details("PI:EMAIL:<EMAIL>END_PI").then (x) ->
log.info 'RESULT', "got sender detail: #{JSON.stringify(x)}"
x
-> dyn.senders.dkim("PI:EMAIL:<EMAIL>END_PI", "testdkim").then (x) ->
log.info 'RESULT', "set sender dkim: #{JSON.stringify(x)}"
x
-> dyn.senders.destroy("PI:EMAIL:<EMAIL>END_PI").then (x) ->
log.info 'RESULT', "deleted sender: #{JSON.stringify(x)}"
x
]).then ->
_(arguments[0]).forEach (x) ->
log.info 'RESULT', "finished : #{JSON.stringify(x)}"
, fail
# accounts: list, create, destroy, list xheaders, update xheaders
async.series([
-> dyn.accounts.create("PI:EMAIL:<EMAIL>END_PI", "secret", "bar", "1234567890").then (x) ->
log.info 'RESULT', "created account: #{JSON.stringify(x)}"
x
-> dyn.accounts.list().then (x) ->
log.info 'RESULT', "got accounts: #{JSON.stringify(x)}"
x
-> dyn.accounts.list_xheaders().then (x) ->
log.info 'RESULT', "got xheaders: #{JSON.stringify(x)}"
x
-> dyn.accounts.update_xheaders("X-Test1", "X-AnotherTest2", "X-Testing3", "X-FullyTested4").then (x) ->
log.info 'RESULT', "updated xheaders: #{JSON.stringify(x)}"
x
-> dyn.accounts.destroy("PI:EMAIL:<EMAIL>END_PI").then (x) ->
log.info 'RESULT', "deleted account: #{JSON.stringify(x)}"
x
]).then ->
_(arguments[0]).forEach (x) ->
log.info 'RESULT', "finished : #{JSON.stringify(x)}"
, fail
# recipients: activate, status
async.series([
-> dyn.recipients.activate("PI:EMAIL:<EMAIL>END_PI").then (x) ->
log.info 'RESULT', "activated recipient: #{JSON.stringify(x)}"
x
-> dyn.recipients.status("PI:EMAIL:<EMAIL>END_PI").then (x) ->
log.info 'RESULT', "got status of recipient: #{JSON.stringify(x)}"
x
]).then ->
_(arguments[0]).forEach (x) ->
log.info 'RESULT', "finished : #{JSON.stringify(x)}"
, fail
# send mail: create
async.series([
-> dyn.send_mail.create("PI:EMAIL:<EMAIL>END_PI", "PI:EMAIL:<EMAIL>END_PI", "hello, new js api", "it works!").then (x) ->
log.info 'RESULT', "sent mail: #{JSON.stringify(x)}"
x
]).then ->
_(arguments[0]).forEach (x) ->
log.info 'RESULT', "finished : #{JSON.stringify(x)}"
, fail
# suppressions: list, create, activate, count
async.series([
-> dyn.suppressions.create("PI:EMAIL:<EMAIL>END_PI").then (x) ->
log.info 'RESULT', "suppressed: #{JSON.stringify(x)}"
x
-> dyn.suppressions.list().then (x) ->
log.info 'RESULT', "got suppressions: #{JSON.stringify(x)}"
x
-> dyn.suppressions.activate("PI:EMAIL:<EMAIL>END_PI").then (x) ->
log.info 'RESULT', "activated suppression: #{JSON.stringify(x)}"
x
-> dyn.suppressions.activate("PI:EMAIL:<EMAIL>END_PI").then (x) ->
log.info 'RESULT', "activated suppression: #{JSON.stringify(x)}"
x
-> dyn.suppressions.count().then (x) ->
log.info 'RESULT', "suppression count: #{JSON.stringify(x)}"
x
]).then ->
_(arguments[0]).forEach (x) ->
log.info 'RESULT', "finished : #{JSON.stringify(x)}"
, fail
# sent: list, count
async.series([
-> dyn.sent_mail.list().then (x) ->
log.info 'RESULT', "got sent list: #{JSON.stringify(x)}"
x
-> dyn.sent_mail.count().then (x) ->
log.info 'RESULT', "got sent count: #{JSON.stringify(x)}"
x
]).then ->
_(arguments[0]).forEach (x) ->
log.info 'RESULT', "finished : #{JSON.stringify(x)}"
, fail
# delivery: list, count
async.series([
-> dyn.delivery.list('2014-01-01', '2014-07-18').then (x) ->
log.info 'RESULT', "got delivery list: #{JSON.stringify(x)}"
x
-> dyn.delivery.count('2014-01-01', '2014-07-18').then (x) ->
log.info 'RESULT', "got delivery count: #{JSON.stringify(x)}"
x
]).then ->
_(arguments[0]).forEach (x) ->
log.info 'RESULT', "finished : #{JSON.stringify(x)}"
, fail
# bounces: list, count
async.series([
-> dyn.bounces.list('2013-11-19', '2014-06-18').then (x) ->
log.info 'RESULT', "got bounces list: #{JSON.stringify(x)}"
x
-> dyn.bounces.count('2013-11-19', '2014-06-18').then (x) ->
log.info 'RESULT', "got bounces count: #{JSON.stringify(x)}"
x
]).then ->
_(arguments[0]).forEach (x) ->
log.info 'RESULT', "finished : #{JSON.stringify(x)}"
, fail
# complaints: list, count
async.series([
-> dyn.complaints.list('2013-11-19', '2014-06-18').then (x) ->
log.info 'RESULT', "got complaints list: #{JSON.stringify(x)}"
x
-> dyn.complaints.count('2013-11-19', '2014-06-18').then (x) ->
log.info 'RESULT', "got complaints count: #{JSON.stringify(x)}"
x
]).then ->
_(arguments[0]).forEach (x) ->
log.info 'RESULT', "finished : #{JSON.stringify(x)}"
, fail
# issues: list, count
async.series([
-> dyn.issues.list('2013-11-19', '2014-06-18').then (x) ->
log.info 'RESULT', "got issues list: #{JSON.stringify(x)}"
x
-> dyn.issues.count('2013-11-19', '2014-06-18').then (x) ->
log.info 'RESULT', "got issues count: #{JSON.stringify(x)}"
x
]).then ->
_(arguments[0]).forEach (x) ->
log.info 'RESULT', "finished : #{JSON.stringify(x)}"
, fail
# opens: list, count, unique, unique count
async.series([
-> dyn.opens.list('2013-11-19', '2014-06-18').then (x) ->
log.info 'RESULT', "got opens list: #{JSON.stringify(x)}"
x
-> dyn.opens.count('2013-11-19', '2014-06-18').then (x) ->
log.info 'RESULT', "got opens count: #{JSON.stringify(x)}"
x
-> dyn.opens.unique('2013-11-19', '2014-06-18').then (x) ->
log.info 'RESULT', "got unique opens list: #{JSON.stringify(x)}"
x
-> dyn.opens.unique_count('2013-11-19', '2014-06-18').then (x) ->
log.info 'RESULT', "got unique opens count: #{JSON.stringify(x)}"
x
]).then ->
_(arguments[0]).forEach (x) ->
log.info 'RESULT', "finished : #{JSON.stringify(x)}"
, fail
# clicks: list, count, unique, unique count
async.series([
-> dyn.clicks.list('2013-11-19', '2014-06-18').then (x) ->
log.info 'RESULT', "got clicks list: #{JSON.stringify(x)}"
x
-> dyn.clicks.count('2013-11-19', '2014-06-18').then (x) ->
log.info 'RESULT', "got clicks count: #{JSON.stringify(x)}"
x
-> dyn.clicks.unique('2013-11-19', '2014-06-18').then (x) ->
log.info 'RESULT', "got unique clicks list: #{JSON.stringify(x)}"
x
-> dyn.clicks.unique_count('2013-11-19', '2014-06-18').then (x) ->
log.info 'RESULT', "got unique clicks count: #{JSON.stringify(x)}"
x
]).then ->
_(arguments[0]).forEach (x) ->
log.info 'RESULT', "finished : #{JSON.stringify(x)}"
, fail
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.