entities listlengths 1 8.61k | max_stars_repo_path stringlengths 7 172 | max_stars_repo_name stringlengths 5 89 | max_stars_count int64 0 82k | content stringlengths 14 1.05M | id stringlengths 2 6 | new_content stringlengths 15 1.05M | modified bool 1 class | references stringlengths 29 1.05M |
|---|---|---|---|---|---|---|---|---|
[
{
"context": "re 'crypto'\r\n genToken = (type, id, t, secret = \"akfish\") ->\r\n # TODO: make sever-side secret configur",
"end": 1128,
"score": 0.647170901298523,
"start": 1122,
"tag": "KEY",
"value": "akfish"
}
] | lib/io/server.coffee | akfish/atom-entanglement | 0 | if require.main == module
console.log "Loaded"
app = require('express')()
server = require('http').Server(app)
sio = require('socket.io')(server)
Logger = require('./logger')
atom_io = sio.of '/atom' # atom registery
device_io = sio.of '/device' # device registery
log_io = sio.of '/log' # log channel
rpc_io = sio.of '/rpc' # rpc channel
# 0. Namespace / is for watch dogs
# 1. Atoms/devices must first complete registery on /.
# Each ns should do auth as configured. Server will hold their socket.id
# as authentication token.
# 2. Then they can connect to `/rpc` channel and request endpoint list.
# 3. They can connect to `/log` channel to read and write logs
# 4. Atoms can control server on `/atom` channel
# 5. They can perform RPC over `/rpc` channel
logger = new Logger(log_io)
epAC =
atom:
primary: '/atom'
allowed: ['/atom', '/rpc', '/log']
device:
primary: '/device'
allowed: ['/device', '/rpc', '/log']
crypto = require 'crypto'
genToken = (type, id, t, secret = "akfish") ->
# TODO: make sever-side secret configurable
checksum = crypto.createHash('sha1')
checksum.update("#{type}:#{id}@#{t}-#{secret}")
checksum.digest('hex')
epReg = {}
accessControl = (ns) ->
(socket, next) ->
logger.log "AC #{socket.id}"
query = socket.handshake.query
access_token = query.access_token
if not access_token?
logger.log "No access_token"
return next(new Error("Access token is not provided"))
if not epReg[access_token]?
logger.log "Invalid access_token"
return next(new Error("Access token is not valid"))
permission = epReg[access_token]
ac = epAC[permission.type]
logger.log permission
logger.log ac
# TODO: validate token against parent socket.id
actual_token = genToken(permission.type, query.parent, permission.t)
if actual_token != access_token
logger.log "Invalid access_token: token does not match the socket"
logger.log "Expect: #{access_token}, Actual: #{actual_token}"
return next(new Error("Access token is not valid: token does not match the socket"))
if ns not in ac.allowed
logger.log "Endpoint '#{permission.type}' is not allowed to access #{ns}"
return next(new Error("Endpoint '#{permission.type}' is not allowed to access #{ns}"))
next()
atom_io.use accessControl('/atom')
log_io.use accessControl('/log')
sio.on "connection", (socket) ->
logger.log "Socket #{socket.id} connected"
socket.on "register", (ep, cb) ->
# TODO: only allow localhost to register as atom
logger.log "Registering #{socket.id}"
logger.log ep
ac = epAC[ep.type]
if not ac?
return cb(new Error("Unknown endpoint type: #{ep.type}"))
# TODO: store token timestamp for validation
t = new Date().getTime()
token = genToken ep.type, socket.id, t
epReg[token] =
type: ep.type
t: t
cb null, access: ac, token: token
socket.on "disconnect", ->
logger.log "Socket #{socket.id} disconnected"
sio.on "error", (err) ->
logger.log err
start = (port) ->
logger.log "Server started: #{port}"
server.listen parseInt(port)
main = (opts) ->
logger.log "Process started: #{process.pid}"
start opts.port
logger.log process.argv
opts = require('minimist')(process.argv.slice(2))
logger.log opts
main(opts)
prepareServerProcessArgv = (opts) ->
path = require('path')
coffee_executable = path.resolve __dirname, "../../node_modules/coffee-script/bin/coffee"
console.log coffee_executable
console.log opts
argv = [coffee_executable, __filename]
for key, value of opts
argv.push "--#{key}"
argv.push value
console.log argv
argv
module.exports =
isRunning: (opts, cb) ->
io = require('socket.io-client')("http://localhost:#{opts.port}/")
notRunning = (reason) ->
io.disconnect()
cb new Error("Server is not running. Reason: #{reason}")
io.on "connect", ->
io.disconnect()
cb null
io.on "error", notRunning
io.on "connect_error", notRunning
watch: (opts) ->
io = require('socket.io-client')("http://localhost:#{opts.port}/", forceNew: true)
child = null
killer = -1
tryRun = =>
if child?
if child.exitCode != null
console.log "Child already existed with #{child.exitCode}"
child = null
else if not io.connected
console.log "Still trying"
if killer < 0
console.log "Kill in 10s"
killer = setTimeout (->
console.log "Killing child"
child.kill()
killer = -1
tryRun()
), 10000
return
console.log "Server is down, restarting"
child ?= this.run opts
io.on "connect", ->
if killer > 0
console.log "Kill the killer"
clearTimeout killer
console.log "Server is up"
io.on "disconnect", ->
console.log child
console.log "Disconnected from server"
tryRun()
# io.on "error", (err) ->
# console.log "Error: "
# console.log err
# tryRun()
io.on "connect_error", (err) ->
console.log "Connection error"
console.log err
tryRun()
run: (opts) ->
console.log "Run"
{spawn} = require('child_process')
fs = require('fs')
log = fs.openSync('i:\\server.log', 'a')
# log = fs.openSync('./server.log', 'a')
argv = prepareServerProcessArgv(opts)
child = spawn 'node', argv, {detached: true, stdio: ['ignore', log, log]}
child.unref()
child.on "error", (err) ->
console.log "Server process error:"
console.log err
child.on "exit", (code, signal) ->
console.log "Server process exited: #{code}, SGN = #{signal}"
console.log "Server process: #{child.pid}, port: #{opts.port}"
child
| 18889 | if require.main == module
console.log "Loaded"
app = require('express')()
server = require('http').Server(app)
sio = require('socket.io')(server)
Logger = require('./logger')
atom_io = sio.of '/atom' # atom registery
device_io = sio.of '/device' # device registery
log_io = sio.of '/log' # log channel
rpc_io = sio.of '/rpc' # rpc channel
# 0. Namespace / is for watch dogs
# 1. Atoms/devices must first complete registery on /.
# Each ns should do auth as configured. Server will hold their socket.id
# as authentication token.
# 2. Then they can connect to `/rpc` channel and request endpoint list.
# 3. They can connect to `/log` channel to read and write logs
# 4. Atoms can control server on `/atom` channel
# 5. They can perform RPC over `/rpc` channel
logger = new Logger(log_io)
epAC =
atom:
primary: '/atom'
allowed: ['/atom', '/rpc', '/log']
device:
primary: '/device'
allowed: ['/device', '/rpc', '/log']
crypto = require 'crypto'
genToken = (type, id, t, secret = "<KEY>") ->
# TODO: make sever-side secret configurable
checksum = crypto.createHash('sha1')
checksum.update("#{type}:#{id}@#{t}-#{secret}")
checksum.digest('hex')
epReg = {}
accessControl = (ns) ->
(socket, next) ->
logger.log "AC #{socket.id}"
query = socket.handshake.query
access_token = query.access_token
if not access_token?
logger.log "No access_token"
return next(new Error("Access token is not provided"))
if not epReg[access_token]?
logger.log "Invalid access_token"
return next(new Error("Access token is not valid"))
permission = epReg[access_token]
ac = epAC[permission.type]
logger.log permission
logger.log ac
# TODO: validate token against parent socket.id
actual_token = genToken(permission.type, query.parent, permission.t)
if actual_token != access_token
logger.log "Invalid access_token: token does not match the socket"
logger.log "Expect: #{access_token}, Actual: #{actual_token}"
return next(new Error("Access token is not valid: token does not match the socket"))
if ns not in ac.allowed
logger.log "Endpoint '#{permission.type}' is not allowed to access #{ns}"
return next(new Error("Endpoint '#{permission.type}' is not allowed to access #{ns}"))
next()
atom_io.use accessControl('/atom')
log_io.use accessControl('/log')
sio.on "connection", (socket) ->
logger.log "Socket #{socket.id} connected"
socket.on "register", (ep, cb) ->
# TODO: only allow localhost to register as atom
logger.log "Registering #{socket.id}"
logger.log ep
ac = epAC[ep.type]
if not ac?
return cb(new Error("Unknown endpoint type: #{ep.type}"))
# TODO: store token timestamp for validation
t = new Date().getTime()
token = genToken ep.type, socket.id, t
epReg[token] =
type: ep.type
t: t
cb null, access: ac, token: token
socket.on "disconnect", ->
logger.log "Socket #{socket.id} disconnected"
sio.on "error", (err) ->
logger.log err
start = (port) ->
logger.log "Server started: #{port}"
server.listen parseInt(port)
main = (opts) ->
logger.log "Process started: #{process.pid}"
start opts.port
logger.log process.argv
opts = require('minimist')(process.argv.slice(2))
logger.log opts
main(opts)
prepareServerProcessArgv = (opts) ->
path = require('path')
coffee_executable = path.resolve __dirname, "../../node_modules/coffee-script/bin/coffee"
console.log coffee_executable
console.log opts
argv = [coffee_executable, __filename]
for key, value of opts
argv.push "--#{key}"
argv.push value
console.log argv
argv
module.exports =
isRunning: (opts, cb) ->
io = require('socket.io-client')("http://localhost:#{opts.port}/")
notRunning = (reason) ->
io.disconnect()
cb new Error("Server is not running. Reason: #{reason}")
io.on "connect", ->
io.disconnect()
cb null
io.on "error", notRunning
io.on "connect_error", notRunning
watch: (opts) ->
io = require('socket.io-client')("http://localhost:#{opts.port}/", forceNew: true)
child = null
killer = -1
tryRun = =>
if child?
if child.exitCode != null
console.log "Child already existed with #{child.exitCode}"
child = null
else if not io.connected
console.log "Still trying"
if killer < 0
console.log "Kill in 10s"
killer = setTimeout (->
console.log "Killing child"
child.kill()
killer = -1
tryRun()
), 10000
return
console.log "Server is down, restarting"
child ?= this.run opts
io.on "connect", ->
if killer > 0
console.log "Kill the killer"
clearTimeout killer
console.log "Server is up"
io.on "disconnect", ->
console.log child
console.log "Disconnected from server"
tryRun()
# io.on "error", (err) ->
# console.log "Error: "
# console.log err
# tryRun()
io.on "connect_error", (err) ->
console.log "Connection error"
console.log err
tryRun()
run: (opts) ->
console.log "Run"
{spawn} = require('child_process')
fs = require('fs')
log = fs.openSync('i:\\server.log', 'a')
# log = fs.openSync('./server.log', 'a')
argv = prepareServerProcessArgv(opts)
child = spawn 'node', argv, {detached: true, stdio: ['ignore', log, log]}
child.unref()
child.on "error", (err) ->
console.log "Server process error:"
console.log err
child.on "exit", (code, signal) ->
console.log "Server process exited: #{code}, SGN = #{signal}"
console.log "Server process: #{child.pid}, port: #{opts.port}"
child
| true | if require.main == module
console.log "Loaded"
app = require('express')()
server = require('http').Server(app)
sio = require('socket.io')(server)
Logger = require('./logger')
atom_io = sio.of '/atom' # atom registery
device_io = sio.of '/device' # device registery
log_io = sio.of '/log' # log channel
rpc_io = sio.of '/rpc' # rpc channel
# 0. Namespace / is for watch dogs
# 1. Atoms/devices must first complete registery on /.
# Each ns should do auth as configured. Server will hold their socket.id
# as authentication token.
# 2. Then they can connect to `/rpc` channel and request endpoint list.
# 3. They can connect to `/log` channel to read and write logs
# 4. Atoms can control server on `/atom` channel
# 5. They can perform RPC over `/rpc` channel
logger = new Logger(log_io)
epAC =
atom:
primary: '/atom'
allowed: ['/atom', '/rpc', '/log']
device:
primary: '/device'
allowed: ['/device', '/rpc', '/log']
crypto = require 'crypto'
genToken = (type, id, t, secret = "PI:KEY:<KEY>END_PI") ->
# TODO: make sever-side secret configurable
checksum = crypto.createHash('sha1')
checksum.update("#{type}:#{id}@#{t}-#{secret}")
checksum.digest('hex')
epReg = {}
accessControl = (ns) ->
(socket, next) ->
logger.log "AC #{socket.id}"
query = socket.handshake.query
access_token = query.access_token
if not access_token?
logger.log "No access_token"
return next(new Error("Access token is not provided"))
if not epReg[access_token]?
logger.log "Invalid access_token"
return next(new Error("Access token is not valid"))
permission = epReg[access_token]
ac = epAC[permission.type]
logger.log permission
logger.log ac
# TODO: validate token against parent socket.id
actual_token = genToken(permission.type, query.parent, permission.t)
if actual_token != access_token
logger.log "Invalid access_token: token does not match the socket"
logger.log "Expect: #{access_token}, Actual: #{actual_token}"
return next(new Error("Access token is not valid: token does not match the socket"))
if ns not in ac.allowed
logger.log "Endpoint '#{permission.type}' is not allowed to access #{ns}"
return next(new Error("Endpoint '#{permission.type}' is not allowed to access #{ns}"))
next()
atom_io.use accessControl('/atom')
log_io.use accessControl('/log')
sio.on "connection", (socket) ->
logger.log "Socket #{socket.id} connected"
socket.on "register", (ep, cb) ->
# TODO: only allow localhost to register as atom
logger.log "Registering #{socket.id}"
logger.log ep
ac = epAC[ep.type]
if not ac?
return cb(new Error("Unknown endpoint type: #{ep.type}"))
# TODO: store token timestamp for validation
t = new Date().getTime()
token = genToken ep.type, socket.id, t
epReg[token] =
type: ep.type
t: t
cb null, access: ac, token: token
socket.on "disconnect", ->
logger.log "Socket #{socket.id} disconnected"
sio.on "error", (err) ->
logger.log err
start = (port) ->
logger.log "Server started: #{port}"
server.listen parseInt(port)
main = (opts) ->
logger.log "Process started: #{process.pid}"
start opts.port
logger.log process.argv
opts = require('minimist')(process.argv.slice(2))
logger.log opts
main(opts)
prepareServerProcessArgv = (opts) ->
path = require('path')
coffee_executable = path.resolve __dirname, "../../node_modules/coffee-script/bin/coffee"
console.log coffee_executable
console.log opts
argv = [coffee_executable, __filename]
for key, value of opts
argv.push "--#{key}"
argv.push value
console.log argv
argv
module.exports =
isRunning: (opts, cb) ->
io = require('socket.io-client')("http://localhost:#{opts.port}/")
notRunning = (reason) ->
io.disconnect()
cb new Error("Server is not running. Reason: #{reason}")
io.on "connect", ->
io.disconnect()
cb null
io.on "error", notRunning
io.on "connect_error", notRunning
watch: (opts) ->
io = require('socket.io-client')("http://localhost:#{opts.port}/", forceNew: true)
child = null
killer = -1
tryRun = =>
if child?
if child.exitCode != null
console.log "Child already existed with #{child.exitCode}"
child = null
else if not io.connected
console.log "Still trying"
if killer < 0
console.log "Kill in 10s"
killer = setTimeout (->
console.log "Killing child"
child.kill()
killer = -1
tryRun()
), 10000
return
console.log "Server is down, restarting"
child ?= this.run opts
io.on "connect", ->
if killer > 0
console.log "Kill the killer"
clearTimeout killer
console.log "Server is up"
io.on "disconnect", ->
console.log child
console.log "Disconnected from server"
tryRun()
# io.on "error", (err) ->
# console.log "Error: "
# console.log err
# tryRun()
io.on "connect_error", (err) ->
console.log "Connection error"
console.log err
tryRun()
run: (opts) ->
console.log "Run"
{spawn} = require('child_process')
fs = require('fs')
log = fs.openSync('i:\\server.log', 'a')
# log = fs.openSync('./server.log', 'a')
argv = prepareServerProcessArgv(opts)
child = spawn 'node', argv, {detached: true, stdio: ['ignore', log, log]}
child.unref()
child.on "error", (err) ->
console.log "Server process error:"
console.log err
child.on "exit", (code, signal) ->
console.log "Server process exited: #{code}, SGN = #{signal}"
console.log "Server process: #{child.pid}, port: #{opts.port}"
child
|
[
{
"context": "li>{@}</li>{end names}</ul>\"\n\n data = names: [\"Abby\",\"Matt\",\"Jhon\"]\n\n result = Bigot.render source",
"end": 627,
"score": 0.9998362064361572,
"start": 623,
"tag": "NAME",
"value": "Abby"
},
{
"context": "/li>{end names}</ul>\"\n\n data = names: [\"Ab... | test/test.coffee | FireZenk/bigot | 2 | Bigot = require("./../lib/index.js");
exports.BigotTest =
'Test 01 - Render objects': (test) ->
source = "<h1>{title}</h1><h6>{subtitle}</h6><p>{content}</p><p>2 + 3 = {func}</p>"
data = title: "Hello World!", subtitle: "and hello Bigot!", content: "This is a Bigot test template", func: () -> 2+3
result = Bigot.render source, data
test.equal result,"<h1>Hello World!</h1><h6>and hello Bigot!</h6><p>This is a Bigot test template</p><p>2 + 3 = 5</p>"
do test.done
'Test 02 - Render array of objects': (test) ->
source = "<ul>{loop names}<li>{@}</li>{end names}</ul>"
data = names: ["Abby","Matt","Jhon"]
result = Bigot.render source, data
test.equal result,"<ul><li>Abby</li><li>Matt</li><li>Jhon</li></ul>"
do test.done
'Test 03 - Render objects into array of objects': (test) ->
source = "<ul>{loop people}<li>{name}, {age}</li>{end people}</ul>"
data = people: [
{name: "Abby", age: "24"},
{name: "Matt", age: "32"},
{name: "John", age: "18"}]
result = Bigot.render source, data
test.equal result,"<ul><li>Abby, 24</li><li>Matt, 32</li><li>John, 18</li></ul>"
do test.done
'Test 04 - Render arrays into array of objects': (test) ->
source = "<ul>{loop people}<li>{name}, {age} {loop sports}<span>{@}</span>{end sports}</li>{end people}</ul>"
data = people: [
{name: "Abby", age: "24", sports: ["hockey","curling"]},
{name: "Matt", age: "32", sports: ["futbol"]},
{name: "John", age: "18", sports: ["tennis","basketball"]}]
result = Bigot.render source, data
test.equal result,"<ul><li>Abby, 24 <span>hockey</span><span>curling</span></li><li>Matt, 32 <span>futbol</span></li><li>John, 18 <span>tennis</span><span>basketball</span></li></ul>"
do test.done
'Test 05 - Render object arrays into array of objects': (test) ->
source = "<ul>{loop people}<li>{name}, {age} {loop activities}<span>{sport} and {hobby}</span>{end activities}</li>{end people}</ul>";
data = people: [{name: "Abby", age: "24", activities: [{
sport: "hockey",
hobby: "drive"}] },
{name: "Matt", age: "32", activities: [{
sport: "football",
hobby: "pets"}] },
{name: "John", age: "18", activities: [{
sport: "tennis",
hobby: "videogames"}] }]
result = Bigot.render source, data
test.equal result,"<ul><li>Abby, 24 <span>hockey and drive</span></li><li>Matt, 32 <span>football and pets</span></li><li>John, 18 <span>tennis and videogames</span></li></ul>"
do test.done
'Test 06 - Render including templates': (test) ->
source = "{include header}<h1>{title}</h1><h6>{subtitle}</h6><p>{content}</p><p>2 + 3 = {func}</p>{include footer}"
data = header: "./test/header.html", footer: "./test/footer.html", title: "Hello World!", subtitle: "and hello Bigot!", content: "This is a Bigot test template", func: () -> 2+3
result = Bigot.render source, data
test.equal result,"<html><head><title>Bigot test</title></head><body>\n<h1>Hello World!</h1><h6>and hello Bigot!</h6><p>This is a Bigot test template</p><p>2 + 3 = 5</p></body></html>\n"
do test.done
'Test 07 - Render conditionals': (test) ->
source = "<h1>{title}</h1><h6>{subtitle}</h6>{if bool_func}<p>{content}</p>{else}<p>{content2}</p>{end bool_func}<p>2 + 3 = {func}</p>"
data =
title: "Hello World!",
subtitle: "and hello Bigot!"
bool_func: () -> true
content: "This is a Bigot test template with conditionals"
content2: "This is a Bigot test template"
func: () -> 2+3
result = Bigot.render source, data
test.equal result,"<h1>Hello World!</h1><h6>and hello Bigot!</h6><p>This is a Bigot test template with conditionals</p><p>2 + 3 = 5</p>"
do test.done
'Test 08 - Render nested conditionals': (test) ->
source = "{if showMe}<p>Hello {if canShow}<span>friend!</span>{else}<span>{name}!</span>{end canShow}</p>{else}<p>Bye!</p>{end showMe}"
data =
name: "Lightning McQueen"
showMe: () -> true
canShow: () -> false
result = Bigot.render source, data
test.equal result,"<p>Hello <span>Lightning McQueen!</span></p>"
do test.done
'Test 09 - Render nested conditionals with one loop inside': (test) ->
source = "{if showMe}<p>Hello {if canShow}<span>friend!</span>{else}<span>{name}!</span><ul>{loop animals}<li>{@}</li>{end animals}</ul>{end canShow}</p>{else}<p>Bye!</p>{end showMe}"
data =
name: "Lightning McQueen"
showMe: () -> true
canShow: () -> false
animals: ["Lion","Tiger","Panther"]
result = Bigot.render source, data
test.equal result,"<p>Hello <span>Lightning McQueen!</span><ul><li>Lion</li><li>Tiger</li><li>Panther</li></ul></p>"
do test.done
'Test 10 - Render commented code': (test) ->
source = "{comment Start of file}<h1>{title}</h1><h6>{subtitle}</h6>{comment Middle of file}<p>{content}</p><p>2 + 3 = {func}</p>{comment End of file}"
data = title: "Hello World!", subtitle: "and hello Bigot!", content: "This is a Bigot test template", func: () -> 2+3
result = Bigot.render source, data
test.equal result,"<h1>Hello World!</h1><h6>and hello Bigot!</h6><p>This is a Bigot test template</p><p>2 + 3 = 5</p>"
do test.done
'Test 11 - Render code with helpers': (test) ->
source = "<h1>{toUpper title}</h1><h6>{toLower subtitle}</h6>"
data =
title: "Hello World!"
subtitle: "and hello Bigot!"
toUpper: (text) ->
text = JSON.stringify(this)
text = text.substring(1, text.length-1)
do text.toUpperCase
toLower: (text) ->
text = JSON.stringify(this)
text = text.substring(1, text.length-1)
do text.toLowerCase
result = Bigot.render source, data
test.equal result,"<h1>HELLO WORLD!</h1><h6>and hello bigot!</h6>"
do test.done
| 186006 | Bigot = require("./../lib/index.js");
exports.BigotTest =
'Test 01 - Render objects': (test) ->
source = "<h1>{title}</h1><h6>{subtitle}</h6><p>{content}</p><p>2 + 3 = {func}</p>"
data = title: "Hello World!", subtitle: "and hello Bigot!", content: "This is a Bigot test template", func: () -> 2+3
result = Bigot.render source, data
test.equal result,"<h1>Hello World!</h1><h6>and hello Bigot!</h6><p>This is a Bigot test template</p><p>2 + 3 = 5</p>"
do test.done
'Test 02 - Render array of objects': (test) ->
source = "<ul>{loop names}<li>{@}</li>{end names}</ul>"
data = names: ["<NAME>","<NAME>","<NAME>"]
result = Bigot.render source, data
test.equal result,"<ul><li><NAME></li><li><NAME></li><li><NAME></li></ul>"
do test.done
'Test 03 - Render objects into array of objects': (test) ->
source = "<ul>{loop people}<li>{name}, {age}</li>{end people}</ul>"
data = people: [
{name: "<NAME>", age: "24"},
{name: "<NAME>", age: "32"},
{name: "<NAME>", age: "18"}]
result = Bigot.render source, data
test.equal result,"<ul><li><NAME>, 24</li><li><NAME>, 32</li><li>John, 18</li></ul>"
do test.done
'Test 04 - Render arrays into array of objects': (test) ->
source = "<ul>{loop people}<li>{name}, {age} {loop sports}<span>{@}</span>{end sports}</li>{end people}</ul>"
data = people: [
{name: "<NAME>", age: "24", sports: ["hockey","curling"]},
{name: "<NAME>", age: "32", sports: ["futbol"]},
{name: "<NAME>", age: "18", sports: ["tennis","basketball"]}]
result = Bigot.render source, data
test.equal result,"<ul><li><NAME>, 24 <span>hockey</span><span>curling</span></li><li><NAME>, 32 <span>futbol</span></li><li><NAME>, 18 <span>tennis</span><span>basketball</span></li></ul>"
do test.done
'Test 05 - Render object arrays into array of objects': (test) ->
source = "<ul>{loop people}<li>{name}, {age} {loop activities}<span>{sport} and {hobby}</span>{end activities}</li>{end people}</ul>";
data = people: [{name: "<NAME>", age: "24", activities: [{
sport: "hockey",
hobby: "drive"}] },
{name: "<NAME>", age: "32", activities: [{
sport: "football",
hobby: "pets"}] },
{name: "<NAME>", age: "18", activities: [{
sport: "tennis",
hobby: "videogames"}] }]
result = Bigot.render source, data
test.equal result,"<ul><li><NAME>, 24 <span>hockey and drive</span></li><li><NAME>, 32 <span>football and pets</span></li><li><NAME>, 18 <span>tennis and videogames</span></li></ul>"
do test.done
'Test 06 - Render including templates': (test) ->
source = "{include header}<h1>{title}</h1><h6>{subtitle}</h6><p>{content}</p><p>2 + 3 = {func}</p>{include footer}"
data = header: "./test/header.html", footer: "./test/footer.html", title: "Hello World!", subtitle: "and hello Bigot!", content: "This is a Bigot test template", func: () -> 2+3
result = Bigot.render source, data
test.equal result,"<html><head><title>Bigot test</title></head><body>\n<h1>Hello World!</h1><h6>and hello Bigot!</h6><p>This is a Bigot test template</p><p>2 + 3 = 5</p></body></html>\n"
do test.done
'Test 07 - Render conditionals': (test) ->
source = "<h1>{title}</h1><h6>{subtitle}</h6>{if bool_func}<p>{content}</p>{else}<p>{content2}</p>{end bool_func}<p>2 + 3 = {func}</p>"
data =
title: "Hello World!",
subtitle: "and hello Bigot!"
bool_func: () -> true
content: "This is a Bigot test template with conditionals"
content2: "This is a Bigot test template"
func: () -> 2+3
result = Bigot.render source, data
test.equal result,"<h1>Hello World!</h1><h6>and hello Bigot!</h6><p>This is a Bigot test template with conditionals</p><p>2 + 3 = 5</p>"
do test.done
'Test 08 - Render nested conditionals': (test) ->
source = "{if showMe}<p>Hello {if canShow}<span>friend!</span>{else}<span>{name}!</span>{end canShow}</p>{else}<p>Bye!</p>{end showMe}"
data =
name: "<NAME>"
showMe: () -> true
canShow: () -> false
result = Bigot.render source, data
test.equal result,"<p>Hello <span><NAME>!</span></p>"
do test.done
'Test 09 - Render nested conditionals with one loop inside': (test) ->
source = "{if showMe}<p>Hello {if canShow}<span>friend!</span>{else}<span>{name}!</span><ul>{loop animals}<li>{@}</li>{end animals}</ul>{end canShow}</p>{else}<p>Bye!</p>{end showMe}"
data =
name: "<NAME>"
showMe: () -> true
canShow: () -> false
animals: ["Lion","Tiger","Panther"]
result = Bigot.render source, data
test.equal result,"<p>Hello <span><NAME>!</span><ul><li>Lion</li><li>Tiger</li><li>Panther</li></ul></p>"
do test.done
'Test 10 - Render commented code': (test) ->
source = "{comment Start of file}<h1>{title}</h1><h6>{subtitle}</h6>{comment Middle of file}<p>{content}</p><p>2 + 3 = {func}</p>{comment End of file}"
data = title: "Hello World!", subtitle: "and hello Bigot!", content: "This is a Bigot test template", func: () -> 2+3
result = Bigot.render source, data
test.equal result,"<h1>Hello World!</h1><h6>and hello Bigot!</h6><p>This is a Bigot test template</p><p>2 + 3 = 5</p>"
do test.done
'Test 11 - Render code with helpers': (test) ->
source = "<h1>{toUpper title}</h1><h6>{toLower subtitle}</h6>"
data =
title: "Hello World!"
subtitle: "and hello Bigot!"
toUpper: (text) ->
text = JSON.stringify(this)
text = text.substring(1, text.length-1)
do text.toUpperCase
toLower: (text) ->
text = JSON.stringify(this)
text = text.substring(1, text.length-1)
do text.toLowerCase
result = Bigot.render source, data
test.equal result,"<h1>HELLO WORLD!</h1><h6>and hello bigot!</h6>"
do test.done
| true | Bigot = require("./../lib/index.js");
exports.BigotTest =
'Test 01 - Render objects': (test) ->
source = "<h1>{title}</h1><h6>{subtitle}</h6><p>{content}</p><p>2 + 3 = {func}</p>"
data = title: "Hello World!", subtitle: "and hello Bigot!", content: "This is a Bigot test template", func: () -> 2+3
result = Bigot.render source, data
test.equal result,"<h1>Hello World!</h1><h6>and hello Bigot!</h6><p>This is a Bigot test template</p><p>2 + 3 = 5</p>"
do test.done
'Test 02 - Render array of objects': (test) ->
source = "<ul>{loop names}<li>{@}</li>{end names}</ul>"
data = names: ["PI:NAME:<NAME>END_PI","PI:NAME:<NAME>END_PI","PI:NAME:<NAME>END_PI"]
result = Bigot.render source, data
test.equal result,"<ul><li>PI:NAME:<NAME>END_PI</li><li>PI:NAME:<NAME>END_PI</li><li>PI:NAME:<NAME>END_PI</li></ul>"
do test.done
'Test 03 - Render objects into array of objects': (test) ->
source = "<ul>{loop people}<li>{name}, {age}</li>{end people}</ul>"
data = people: [
{name: "PI:NAME:<NAME>END_PI", age: "24"},
{name: "PI:NAME:<NAME>END_PI", age: "32"},
{name: "PI:NAME:<NAME>END_PI", age: "18"}]
result = Bigot.render source, data
test.equal result,"<ul><li>PI:NAME:<NAME>END_PI, 24</li><li>PI:NAME:<NAME>END_PI, 32</li><li>John, 18</li></ul>"
do test.done
'Test 04 - Render arrays into array of objects': (test) ->
source = "<ul>{loop people}<li>{name}, {age} {loop sports}<span>{@}</span>{end sports}</li>{end people}</ul>"
data = people: [
{name: "PI:NAME:<NAME>END_PI", age: "24", sports: ["hockey","curling"]},
{name: "PI:NAME:<NAME>END_PI", age: "32", sports: ["futbol"]},
{name: "PI:NAME:<NAME>END_PI", age: "18", sports: ["tennis","basketball"]}]
result = Bigot.render source, data
test.equal result,"<ul><li>PI:NAME:<NAME>END_PI, 24 <span>hockey</span><span>curling</span></li><li>PI:NAME:<NAME>END_PI, 32 <span>futbol</span></li><li>PI:NAME:<NAME>END_PI, 18 <span>tennis</span><span>basketball</span></li></ul>"
do test.done
'Test 05 - Render object arrays into array of objects': (test) ->
source = "<ul>{loop people}<li>{name}, {age} {loop activities}<span>{sport} and {hobby}</span>{end activities}</li>{end people}</ul>";
data = people: [{name: "PI:NAME:<NAME>END_PI", age: "24", activities: [{
sport: "hockey",
hobby: "drive"}] },
{name: "PI:NAME:<NAME>END_PI", age: "32", activities: [{
sport: "football",
hobby: "pets"}] },
{name: "PI:NAME:<NAME>END_PI", age: "18", activities: [{
sport: "tennis",
hobby: "videogames"}] }]
result = Bigot.render source, data
test.equal result,"<ul><li>PI:NAME:<NAME>END_PI, 24 <span>hockey and drive</span></li><li>PI:NAME:<NAME>END_PI, 32 <span>football and pets</span></li><li>PI:NAME:<NAME>END_PI, 18 <span>tennis and videogames</span></li></ul>"
do test.done
'Test 06 - Render including templates': (test) ->
source = "{include header}<h1>{title}</h1><h6>{subtitle}</h6><p>{content}</p><p>2 + 3 = {func}</p>{include footer}"
data = header: "./test/header.html", footer: "./test/footer.html", title: "Hello World!", subtitle: "and hello Bigot!", content: "This is a Bigot test template", func: () -> 2+3
result = Bigot.render source, data
test.equal result,"<html><head><title>Bigot test</title></head><body>\n<h1>Hello World!</h1><h6>and hello Bigot!</h6><p>This is a Bigot test template</p><p>2 + 3 = 5</p></body></html>\n"
do test.done
'Test 07 - Render conditionals': (test) ->
source = "<h1>{title}</h1><h6>{subtitle}</h6>{if bool_func}<p>{content}</p>{else}<p>{content2}</p>{end bool_func}<p>2 + 3 = {func}</p>"
data =
title: "Hello World!",
subtitle: "and hello Bigot!"
bool_func: () -> true
content: "This is a Bigot test template with conditionals"
content2: "This is a Bigot test template"
func: () -> 2+3
result = Bigot.render source, data
test.equal result,"<h1>Hello World!</h1><h6>and hello Bigot!</h6><p>This is a Bigot test template with conditionals</p><p>2 + 3 = 5</p>"
do test.done
'Test 08 - Render nested conditionals': (test) ->
source = "{if showMe}<p>Hello {if canShow}<span>friend!</span>{else}<span>{name}!</span>{end canShow}</p>{else}<p>Bye!</p>{end showMe}"
data =
name: "PI:NAME:<NAME>END_PI"
showMe: () -> true
canShow: () -> false
result = Bigot.render source, data
test.equal result,"<p>Hello <span>PI:NAME:<NAME>END_PI!</span></p>"
do test.done
'Test 09 - Render nested conditionals with one loop inside': (test) ->
source = "{if showMe}<p>Hello {if canShow}<span>friend!</span>{else}<span>{name}!</span><ul>{loop animals}<li>{@}</li>{end animals}</ul>{end canShow}</p>{else}<p>Bye!</p>{end showMe}"
data =
name: "PI:NAME:<NAME>END_PI"
showMe: () -> true
canShow: () -> false
animals: ["Lion","Tiger","Panther"]
result = Bigot.render source, data
test.equal result,"<p>Hello <span>PI:NAME:<NAME>END_PI!</span><ul><li>Lion</li><li>Tiger</li><li>Panther</li></ul></p>"
do test.done
'Test 10 - Render commented code': (test) ->
source = "{comment Start of file}<h1>{title}</h1><h6>{subtitle}</h6>{comment Middle of file}<p>{content}</p><p>2 + 3 = {func}</p>{comment End of file}"
data = title: "Hello World!", subtitle: "and hello Bigot!", content: "This is a Bigot test template", func: () -> 2+3
result = Bigot.render source, data
test.equal result,"<h1>Hello World!</h1><h6>and hello Bigot!</h6><p>This is a Bigot test template</p><p>2 + 3 = 5</p>"
do test.done
'Test 11 - Render code with helpers': (test) ->
source = "<h1>{toUpper title}</h1><h6>{toLower subtitle}</h6>"
data =
title: "Hello World!"
subtitle: "and hello Bigot!"
toUpper: (text) ->
text = JSON.stringify(this)
text = text.substring(1, text.length-1)
do text.toUpperCase
toLower: (text) ->
text = JSON.stringify(this)
text = text.substring(1, text.length-1)
do text.toLowerCase
result = Bigot.render source, data
test.equal result,"<h1>HELLO WORLD!</h1><h6>and hello bigot!</h6>"
do test.done
|
[
{
"context": "alue_array.length == 1 and a.mx_hash.hash_key == 'identifier'\n if !a.mx_hash.type?\n a.mx_hash.type",
"end": 5345,
"score": 0.9570508003234863,
"start": 5335,
"tag": "KEY",
"value": "identifier"
},
{
"context": "############\n\ntrans = new Translator\ntrans... | src/type_inference.coffee | hu2prod/scriptscript | 1 | require 'fy'
require 'fy/lib/codegen'
{
Translator
} = require 'gram2'
module = @
# ###################################################################################################
# scope state
# ###################################################################################################
# MAY BE move to trans
current_scope =
id_map : {} # id -> ast pos list
scope_list = [current_scope]
scope_stack = []
fake_id = (type)->
{
mx_hash : {
type
}
}
scope_state_reset = ()->
current_scope =
id_map : {
stdin : [
fake_id mk_type 'Source', [mk_type 'string']
]
stdout : [
fake_id mk_type 'Sink', [mk_type 'string'],
]
Math : [
fake_id mk_type 'object', [], {
abs : mk_type 'either', [
mk_type 'function', [mk_type('int'), mk_type('int')]
mk_type 'function', [mk_type('float'), mk_type('float')]
]
round : mk_type 'function', [mk_type('int'), mk_type('float')]
}
]
Fail : [ # crafted object for more coverage
fake_id mk_type 'object', [], {
invalid_either : mk_type 'either', [
mk_type 'function', [mk_type('int'), mk_type('int')]
mk_type 'int'
]
}
]
Either_test : [ # crafted object for more coverage
fake_id mk_type 'object', [], {
int_float : mk_type 'either', [
mk_type 'int'
mk_type 'float'
]
int_float_bool : mk_type 'either', [
mk_type 'int'
mk_type 'float'
mk_type 'bool'
]
}
]
} # id -> ast pos list
scope_list = [current_scope]
scope_stack = []
return
scope_id_push = (node)->
current_scope.id_map[node.value] ?= []
current_scope.id_map[node.value].upush node
return
_mk_scope = (node)->
return scope if scope = node.__inject_scope
scope = node.__inject_scope =
id_map : {}
scope_list.push scope
scope
scope_push = (node)->
scope_stack.push current_scope
current_scope = _mk_scope node
return
scope_pop = ()->
current_scope = scope_stack.pop()
return
# ###################################################################################################
# Type
# ###################################################################################################
class @Type
main : ''
nest : []
field_hash : {} # name -> type
constructor:()->
@field_hash = {}
eq : (t)->
return false if @main != t.main
return false if @nest.length != t.nest.length
for v,k in @nest
return false if !v.eq t.nest[k]
for k,v1 of @field_hash
v2 = t.field_hash[k]
return false if !v2 or !v1.eq v2
# just check keys
for k,v2 of t.field_hash
v1 = @field_hash[k]
return false if !v1
true
toString : ()->
nest_part = ""
if @nest.length
list = []
for v in @nest
list.push v.toString()
nest_part = "<#{list.join ','}>"
object_part = ""
if h_count @field_hash
list = []
for k,v of @field_hash
list.push "#{k}:#{v}" # implicit to string
object_part = "{#{list.join ','}}"
"#{@main}#{nest_part}#{object_part}"
can_match : (t)->
return true if @main == '*'
return true if t.main == '*'
return false if @main != t.main
return false if @nest.length != t.nest.length
for v,k in @nest
v2 = t.nest[k]
return false if !v.can_match v2
for k,v1 of @field_hash
v2 = t.field_hash[k]
return false if !v2 or !v1.can_match v2
# just check keys
for k,v2 of t.field_hash
v1 = @field_hash[k]
return false if !v1
true
exchange_missing_info : (t)->
if @main == '*' and t.main == '*'
return 0 # nothing
else if @main == '*'
@main = t.main
@nest = t.nest.clone()
return 1
else if t.main == '*'
t.main = @main
t.nest = @nest.clone()
return 1
ret = 0
for v,k in @nest
v2 = t.nest[k]
ret += v.exchange_missing_info v2
for k,v1 of @field_hash
v1.exchange_missing_info t.field_hash[k]
ret
set : (t)->
@main = t.main
@nest = t.nest
@field_hash = t.field_hash
return
mk_type = (str, nest=[], field_hash={})->
ret = new module.Type
ret.main = str
ret.nest = nest
ret.field_hash = field_hash
ret
# ###################################################################################################
type_wrap = (type)->
mx_hash :
type : type
# TODO flatten either (e.g. either<a,either<b,c> > -> either<a,b,c>)
mk_either_list = (t)->
if t.main == 'either'
return t.nest
[t]
# TODO перестать использовать
_assert_pass_down = (ast, type, diagnostics)->
ret = 0
if type.main != '*'
ast.mx_hash.type = type
ret++
# rvalue unwrap
if ast.mx_hash.hash_key == 'rvalue'
ast = ast.value_array[0]
# lvalue patch
if ast.mx_hash.hash_key == 'lvalue'
# case @
# case @id
# case id
# case lvalue[rvalue]
# case lvalue.id
# case lvalue.octal/decimal
# LATER destructuring assignment
# case id
[a] = ast.value_array
if ast.value_array.length == 1 and a.mx_hash.hash_key == 'identifier'
if !a.mx_hash.type?
a.mx_hash.type = type
ret++
else
# UNIMPLEMENTED
return ret
assert_pass_down = (ast, type, diagnostics)->
assert_pass_down_eq ast, type_wrap(type), diagnostics
assert_pass_down_eq = (ast1, ast2, diagnostics)->
ret = 0
if ast1.mx_hash.type? and ast2.mx_hash.type?
either_list1 = mk_either_list ast1.mx_hash.type
either_list2 = mk_either_list ast2.mx_hash.type
# BUG normalize
pair_list = []
for e1 in either_list1
for e2 in either_list2
if e1.can_match e2
pair_list.push [e1,e2]
if pair_list.length == 0
perr either_list1
perr either_list2
throw new Error "assert pass up eq failed either list can't match"
t1_possible_list = []
t2_possible_list = []
# BUG normalize
for pair in pair_list
t1_possible_list.upush pair[0]
t2_possible_list.upush pair[1]
if either_list1.length != t1_possible_list.length
if t1_possible_list.length == 1
ast1.mx_hash.type.set t1_possible_list[0]
else
cut_type = mk_type 'either', t1_possible_list
ast1.mx_hash.type.set cut_type
ret++
if either_list2.length != t2_possible_list.length
if t2_possible_list.length == 1
ast2.mx_hash.type.set t2_possible_list[0]
else
cut_type = mk_type 'either', t2_possible_list
ast2.mx_hash.type.set cut_type
ret++
# BUG. Can't exchange shared info (e.g. either<function<int,float>,function<int,int>> can't send return type )
if either_list1.length == 1 and either_list2.length == 1
t1 = either_list1[0]
t2 = either_list2[0]
if !t1.eq(t2)
ret += t1.exchange_missing_info t2
else if !ast1.mx_hash.type? and !ast2.mx_hash.type?
# nothing
else if !ast1.mx_hash.type?
ret += _assert_pass_down ast1, ast2.mx_hash.type, "#{diagnostics} ast1 down"
else #!ast2.mx_hash.type?
ret += _assert_pass_down ast2, ast1.mx_hash.type, "#{diagnostics} ast2 down"
return ret
assert_pass_down_eq_list = (ast_list, type, diagnostics)->
ret = 0
# if !type # coverage LATER
for v in ast_list
break if type = v.mx_hash.type
if type
for v, idx in ast_list
ret += assert_pass_down v, type, "#{diagnostics} pos #{idx}"
return ret
# ###################################################################################################
trans = new Translator
trans.key = 'ti'
trans.translator_hash['skip'] = translate:(ctx, node)-> 0
trans.translator_hash['pass'] = translate:(ctx, node)->
child = node.value_array[0]
ret = ctx.translate child
ret += assert_pass_down_eq node, child, "pass rule=#{node?.rule?.signature}"
ret
trans.translator_hash['block'] = translate:(ctx, node)->
ctx.translate node.value_array[1]
trans.translator_hash['stmt_plus_last'] = translate:(ctx, node)->
ret = 0
for v in node.value_array
continue if v.mx_hash.hash_key == 'eol'
ret += ctx.translate v
child = node.value_array.last()
if child.mx_hash.type?
if !node.mx_hash.type?
node.mx_hash.type = child.mx_hash.type
ret++
else
# UNIMPLEMENTED
ret
trans.translator_hash['bracket'] = translate:(ctx, node)->
child = node.value_array[1]
ret = ctx.translate child
ret += assert_pass_down_eq node, child, "bracket"
ret
trans.translator_hash['this'] = translate:(ctx, node)->
# LATER
0
trans.translator_hash['id'] = translate:(ctx, node)->
ret = 0
is_prefefined_const = false
if node.value_view in ['true', 'false'] # .toLowerCase() ??
is_prefefined_const = true
if !node.mx_hash.type?
node.mx_hash.type = mk_type 'bool'
ret++
else
# UNIMPLEMENTED
if !is_prefefined_const
if (nest_type = node.value_array[0].mx_hash.type)?
if !node.mx_hash.type?
node.mx_hash.type = nest_type
ret++
else
# UNIMPLEMENTED
scope_id_push node.value_array[0]
ret
trans.translator_hash['const'] = translate:(ctx, node)->
if !node.mx_hash.type?
### !pragma coverage-skip-block ###
throw new Error "You forgot specify type at ti=const"
unless node.mx_hash.type instanceof module.Type
node.mx_hash.type = mk_type node.mx_hash.type
return 0
# ###################################################################################################
# bin_op
# ###################################################################################################
bin_op_type_table = {}
do ()->
def_bin = (op,at,bt,ret)->
key = "#{op},#{at},#{bt}"
bin_op_type_table[key] = ret
return
for op in "+ - * % %%".split " "
for at in "int float".split " "
for bt in "int float".split " "
def_bin op, at, bt, "float"
for op in "+ - * % %% << >> >>>".split /\s+/
def_bin op, "int", "int", "int"
def_bin "+", "string", "string", "string"
def_bin "*", "string", "int", "string"
for op in "/ **".split " "
for at in "int float".split " "
for bt in "int float".split " "
def_bin op, at, bt, "float"
for at in "int float".split " "
for bt in "int float".split " "
def_bin "//", at, bt, "int"
for op in "and or xor".split /\s+/
def_bin op, "bool", "bool", "bool"
def_bin op, "int", "int", "int"
for type in "int float".split /\s+/
for op in "< <= > >=".split /\s+/
def_bin op, type,type, "bool"
trans.translator_hash['bin_op'] = translate:(ctx, node)->
ret = 0
rvalue_list = []
bin_op_list = []
for v in node.value_array
rvalue_list.push v if v.mx_hash.hash_key == 'rvalue'
bin_op_list.push v if v.mx_hash.hash_key == 'bin_op'
bin_op_node = bin_op_list[0]
op = bin_op_node.value_view
for v in rvalue_list
ret += ctx.translate v
# cases
# no type detected - can build system that limits a, b and result
# 1 type detected - can build system that limits second and result
# 2 type detected - can validate and send result
# LATER result defined + some args
[a,b] = rvalue_list
if op == '|'
if a.mx_hash.type?
unless a.mx_hash.type.main in (a_types = ["array", "Source"])
pp a.mx_hash.type
throw new Error "Pipe can't be used for left type #{a.mx_hash.type.main}. Supported types: #{a_types}"
if b.mx_hash.type?
unless b.mx_hash.type.main in (b_types = ["function", "array", "Sink"])
throw new Error "Pipe can't be used for right type #{b.mx_hash.type.main}. Supported types: #{b_types}"
return 0
if !a.mx_hash.type? and !b.mx_hash.type?
# case 1
# not implemented
else if a.mx_hash.type? and b.mx_hash.type?
# case 3
at = a.mx_hash.type
bt = b.mx_hash.type
loop
if op in ['==', '!=']
ret += assert_pass_down_eq a, b, "bin_op eq"
_ret = 'bool'
break
key = "#{op},#{at},#{bt}"
if !_ret = bin_op_type_table[key]
throw new Error "Type inference: can't find bin_op=#{op} a=#{at} b=#{bt} node=#{node.value}"
break
if !node.mx_hash.type?
node.mx_hash.type = mk_type _ret
ret++
else
# UNIMPLEMENTED
else
# case 2
# not implemented
return ret
# ###################################################################################################
# assign_bin_op
# ###################################################################################################
trans.translator_hash['assign_bin_op'] = translate:(ctx, node)->
ret = 0
rvalue_list = []
bin_op_list = []
for v in node.value_array
rvalue_list.push v if v.mx_hash.hash_key in ['lvalue', 'rvalue']
bin_op_list.push v if v.mx_hash.hash_key == 'assign_bin_op'
bin_op_node = bin_op_list[0]
op = bin_op_node.value_view.replace '=', ''
for v in rvalue_list
ctx.translate v
# cases
# no type detected - can build system that limits a, b and result
# 1 type detected - can build system that limits second and result
# 2 type detected - can validate and send result
# LATER result defined + some args
[a,b] = rvalue_list
if !a.mx_hash.type? and !b.mx_hash.type?
# case 1
# not implemented
else if a.mx_hash.type? and b.mx_hash.type?
# case 3
if op == ''
ret += assert_pass_down_eq a, b, "assign_bin_op"
else
at = a.mx_hash.type
bt = b.mx_hash.type
key = "#{op},#{at},#{bt}"
if !_ret = bin_op_type_table[key]
throw new Error "can't find assign_bin_op=#{op} a=#{at} b=#{bt} node=#{node.value}"
_ret_t = mk_type _ret
if !_ret_t.eq at
### !pragma coverage-skip-block ###
# ПРИМ. Пока сейчас нет операций у которых a.type != b.type
throw new Error "assign_bin_op conflict '#{_ret_t}' != '#{at}'"
assert_pass_down node, _ret_t, 'assign_bin_op'
else
# case 2
if b.mx_hash.type?
if op == ''
ret += assert_pass_down_eq a, b, 'assign_bin_op'
# BYPASSSING missing code coverage
ret += assert_pass_down_eq node, b, 'assign_bin_op'
# if !node.mx_hash.type?
# node.mx_hash.type = b.mx_hash.type
# ret++
# else
# # UNIMPLEMENTED
else # a.mx_hash.type?
if op == ''
ret += assert_pass_down_eq b, a, 'assign_bin_op'
if !node.mx_hash.type?
node.mx_hash.type = a.mx_hash.type
ret++
else
# UNIMPLEMENTED
ret
# ###################################################################################################
# pre_op
# ###################################################################################################
pre_op_type_table = {}
def_pre = (op,at,ret)->
key = "#{op},#{at}"
pre_op_type_table[key] = ret
return
def_pre "-", "int", "int"
def_pre "~", "int", "int"
def_pre "+", "string", "float"
def_pre "!", "bool", "bool"
def_pre "not", "int" , "int"
def_pre "not", "bool", "bool"
trans.translator_hash['pre_op'] = translate:(ctx, node)->
ret = 0
rvalue_list = []
pre_op_list = []
for v in node.value_array
rvalue_list.push v if v.mx_hash.hash_key == 'rvalue'
pre_op_list.push v if v.mx_hash.hash_key == 'pre_op'
pre_op_node = pre_op_list[0]
op = pre_op_node.value_view
for v in rvalue_list
ret += ctx.translate v
# cases
# no type detected - can build system that limits a, b and result
# 1 type detected - can validate and send result
[a] = rvalue_list
if !a.mx_hash.type?
# case 1
# not implemented
else
# case 2
at = a.mx_hash.type
key = "#{op},#{at}"
if !_ret = pre_op_type_table[key]
throw new Error "can't find pre_op=#{op} a=#{at} node=#{node.value}"
if !node.mx_hash.type?
node.mx_hash.type = mk_type _ret
ret++
else
# UNIMPLEMENTED
ret
# LATER
# ###################################################################################################
# post_op
# ###################################################################################################
post_op_type_table = {}
def_post = (op,at,ret)->
key = "#{op},#{at}"
post_op_type_table[key] = ret
return
def_post "++", "int", "int"
def_post "--", "int", "int"
# NOTE 1++ is not valid, but passes gram and TI
trans.translator_hash['post_op'] = translate:(ctx, node)->
ret = 0
rvalue_list = []
post_op_list = []
for v in node.value_array
rvalue_list.push v if v.mx_hash.hash_key == 'rvalue'
post_op_list.push v if v.mx_hash.hash_key == 'post_op'
post_op_node = post_op_list[0]
op = post_op_node.value_view
for v in rvalue_list
ret += ctx.translate v
# cases
# no type detected - can build system that limits a, b and result
# 1 type detected - can validate and send result
[a] = rvalue_list
if !a.mx_hash.type?
# case 1
# not implemented
else
# case 2
at = a.mx_hash.type
key = "#{op},#{at}"
if !_ret = post_op_type_table[key]
throw new Error "can't find post_op=#{op} a=#{at} node=#{node.value}"
if !node.mx_hash.type?
node.mx_hash.type = mk_type _ret
ret++
else
# UNIMPLEMENTED
ret
# # ###################################################################################################
trans.translator_hash["ternary"] = translate:(ctx, node)->
ret = 0
[cond, _s1, vtrue, _s2, vfalse] = node.value_array
ret += ctx.translate cond
ret += assert_pass_down cond, mk_type('bool'), 'ternary'
ret += ctx.translate vtrue
ret += ctx.translate vfalse
ret += assert_pass_down_eq vtrue, vfalse
ret += assert_pass_down_eq vtrue, node, "ternary"
return ret
# ###################################################################################################
trans.translator_hash["array"] = translate:(ctx, node)->
ret = 0
element_list = []
walk = (node)->
for sn in node.value_array
if sn.mx_hash.hash_key == 'rvalue'
element_list.push sn
else
walk sn
return
walk node
for el in element_list
ret += ctx.translate el
ret += assert_pass_down_eq_list element_list, undefined, "array decl"
if element_list[0]?.mx_hash.type?
subtype = element_list[0].mx_hash.type
if !node.mx_hash.type?
node.mx_hash.type = mk_type "array", [subtype]
ret++
else
# UNIMPLEMENTED
else
if !node.mx_hash.type?
node.mx_hash.type = mk_type "array", [mk_type '*']
ret++
else
# UNIMPLEMENTED
return ret
# ###################################################################################################
trans.translator_hash["hash"] = translate:(ctx, node)->
ret = 0
pair_list = []
walk = (node)->
for sn in node.value_array
if sn.mx_hash.hash_key == 'pair'
pair_list.push sn
else
walk sn
return
walk node
element_list = []
must_be_hash = false
for el in pair_list
rvalue_list = []
id_list = []
for sn in el.value_array
if sn.mx_hash.hash_key == 'identifier'
id_list.push sn
else if sn.mx_hash.hash_key == 'rvalue'
rvalue_list.push sn
ret += ctx.translate sn
if rvalue_list.length == 0
key = id_list[0]
value = id_list[0]
scope_id_push value
else if rvalue_list.length == 1
# NOTE LATER can be missing
# e.g. {a.b.c} => {c:a.b.c}
key = id_list[0]
value = rvalue_list[0]
else # if rvalue_list.length == 2
[key, value] = rvalue_list
must_be_hash = true
# TODO check key castable to string # same as string iterpolated
element_list.push {key, value}
# REMOVE LATER
if must_be_hash
ret += assert_pass_down_eq_list element_list.map((t)->t.value), undefined, "hash decl"
if element_list.length
if must_be_hash
subtype = element_list[0].value.mx_hash.type
if !node.mx_hash.type?
node.mx_hash.type = mk_type "hash", [subtype]
ret++
else
if !node.mx_hash.type?
node.mx_hash.type = mk_type "object", []
ret++
for kv in element_list
{key, value} = kv
node.mx_hash.type.field_hash[key.value] = value.mx_hash.type or mk_type '*'
else
if !node.mx_hash.type?
node.mx_hash.type = mk_type "hash", [mk_type '*']
ret++
else
# UNIMPLEMENTED
return ret
# ###################################################################################################
# access
# ###################################################################################################
trans.translator_hash['array_access'] = translate:(ctx, node)->
ret = 0
[root, _skip, rvalue] = node.value_array
ret += ctx.translate root
ret += ctx.translate rvalue
# cases
# 1 array<T> [int ] -> T
# 2 hash<T> [string] -> T
if root.mx_hash.type
subtype = root.mx_hash.type.nest[0]
switch root.mx_hash.type.main
when 'array'
ret += assert_pass_down rvalue, mk_type("int"), "array_access array"
when 'hash' # Прим. здесь я считаю hash == dictionary. А есть еще тип named tuple
ret += assert_pass_down rvalue, mk_type("string"), "array_access hash"
when 'string'
ret += assert_pass_down rvalue, mk_type("int"), "array_access hash"
subtype = mk_type 'string'
# when '*' # can't pass as main type
# OK
else
throw new Error "Trying to access array of not allowed type '#{root.mx_hash.type.main}'"
if subtype and subtype.main != '*'
ret += assert_pass_down node, subtype, "array_access"
ret
trans.translator_hash['id_access'] = translate:(ctx, node)->
[root, _skip, id] = node.value_array
ret = ctx.translate root
if root.mx_hash.type
subtype = root.mx_hash.type.nest[0]
switch root.mx_hash.type.main
when 'array'
# TODO later impl with field_hash
if id.value == 'length'
subtype = mk_type 'int'
else
throw new Error "Trying access field '#{id.value}' in array"
when 'hash' # Прим. здесь я считаю hash == dictionary. А есть еще тип named tuple, там нужно смотреть на тип каждого field'а
'OK'
when 'object' # named tuple
field_hash = root.mx_hash.type.field_hash
if !subtype = field_hash[id.value]
throw new Error "Trying access field '#{id.value}' in object with fields=#{Object.keys field_hash}"
else
throw new Error "Trying to access field '#{id.value}' of not allowed type '#{root.mx_hash.type.main}'"
if subtype.main != '*' or node.mx_hash.type
ret += assert_pass_down node, subtype, "id_access"
ret
trans.translator_hash['opencl_access'] = translate:(ctx, node)->
[root, _skip, id] = node.value_array
ret = ctx.translate root
if root.mx_hash.type
subtype = root.mx_hash.type.nest[0]
switch root.mx_hash.type.main
when 'array'
if id.value.length != 1
subtype = root.mx_hash.type
else
throw new Error "Trying to access field '#{id.value}' of not allowed type '#{root.mx_hash.type.main}'"
if subtype and subtype.main != '*'
ret += assert_pass_down node, subtype, "opencl_access"
ret
# ###################################################################################################
# function
# ###################################################################################################
type_ast_to_obj = (ast)->
# NOTE WRONG. Need proper handle <>
mk_type ast.value_view
trans.translator_hash['func_decl'] = translate:(ctx, node)->
ret = 0
function_body = null
arg_list_node = null
ret_type_node = null
for v in node.value_array
arg_list_node = v if v.mx_hash.hash_key == 'arg_list'
function_body = v if v.mx_hash.hash_key == 'function_body'
ret_type_node = v if v.mx_hash.hash_key == 'type'
scope_push node
# TODO translate arg default values
arg_list = []
if arg_list_node?
walk = (node)->
for v in node.value_array
if v.mx_hash.hash_key == 'arg'
arg_list.push v
else
walk v
return
walk arg_list_node
for v in arg_list
for sn in v.value_array
if sn.mx_hash.hash_key == 'identifier'
scope_id_push sn
else if sn.mx_hash.hash_key == 'rvalue'
ctx.translate sn
if function_body?
ret += ctx.translate function_body
scope_pop()
arg_type_list = []
if ret_type_node?
arg_type_list.push type_ast_to_obj ret_type_node
else
arg_type_list.push mk_type 'void'
for v in arg_list
type = null
rvalue = null
if v.value_array.length == 3
[id,_skip,type_or_rvalue] = v.value_array
type = type_or_rvalue if type_or_rvalue.mx_hash.hash_key == 'type'
rvalue= type_or_rvalue if type_or_rvalue.mx_hash.hash_key == 'rvalue'
else
[id] = v.value_array
if type?
type_str = type_ast_to_obj type
assert_pass_down id, type_str, "func arg '#{id.value}'"
if rvalue?
assert_pass_down_eq id, rvalue, "func arg '#{id.value}'"
arg_type_list.push id.mx_hash.type or mk_type "*"
craft_type = mk_type "function", arg_type_list
assert_pass_down node, craft_type, "function"
ret
trans.translator_hash['func_call'] = translate:(ctx, node)->
ret = 0
rvalue = node.value_array[0]
comma_rvalue_node = null
for v in node.value_array
if v.mx_hash.hash_key == 'comma_rvalue'
comma_rvalue_node = v
arg_list = []
ret += ctx.translate rvalue
if comma_rvalue_node
walk = (node)->
for v in node.value_array
if v.mx_hash.hash_key == 'rvalue'
arg_list.push v
if v.mx_hash.hash_key == 'comma_rvalue'
walk v
rvalue
walk comma_rvalue_node
for arg in arg_list
ret += ctx.translate arg
if rvalue.mx_hash.type
check_list = mk_either_list rvalue.mx_hash.type
allowed_signature_list = []
for type in check_list
unless type.main in ['function', '*']
throw new Error "trying to call type='#{type}'"
# default arg later
continue if type.nest.length-1 != arg_list.length
found = false
for i in [1 ... type.nest.length] by 1
expected_arg_type = type.nest[i]
real_arg_type = arg_list[i-1].mx_hash.type
if real_arg_type and !expected_arg_type.can_match real_arg_type
found = true
break
if !found
allowed_signature_list.push type
if allowed_signature_list.length == 0
throw new Error "can't find allowed_signature in '#{check_list.map((t)->t.toString())}'"
candidate_type = allowed_signature_list[0].nest[0]
found = false
for v in allowed_signature_list
if !v.nest[0].eq candidate_type
found = true
break
if !found
ret += assert_pass_down node, candidate_type, "func_call"
else if ctx.func_call_unroll
# try to detect function type based on argument call list
# NOTE BUG. Default args will be FUCKed
ret_type = node.mx_hash.type or mk_type '*'
craft_type = mk_type 'function', [ret_type]
for arg in arg_list
craft_type.nest.push arg.mx_hash.type or mk_type '*'
ret += assert_pass_down rvalue, craft_type, "func_call"
ret
# ###################################################################################################
# macro
# ###################################################################################################
trans.translator_hash['macro_stub'] = translate:(ctx, node)->
ret = 0
block = null
rvalue = null
for v in node.value_array
block = v if v.mx_hash.hash_key == 'block'
rvalue = v if v.mx_hash.hash_key == 'rvalue'
if rvalue?
ret += ctx.translate rvalue
ret += ctx.translate block
ret
# ###################################################################################################
# string_interpolated
# ###################################################################################################
trans.translator_hash['string_inter_pass'] = translate:(ctx, node)->
ret = 0
for v in node.value_array
if v.mx_hash.hash_key == 'rvalue'
ret += ctx.translate v
# TODO check v.mx_hash.type castable to string
ret
# ###################################################################################################
# scope id pass
# ###################################################################################################
scope_id_pass = ()->
ret = 0
for scope in scope_list
for k,list of scope.id_map
# p list.map((t)->t.mx_hash) # DEBUG
ret += assert_pass_down_eq_list list, undefined, "scope_id_pass '#{k}'"
# p "scope_id_pass=#{ret}" # DEBUG
return ret
# ###################################################################################################
@__type_inference = (ast, opt={})->
change_count = 0
trans.func_call_unroll = opt.func_call_unroll
for i in [0 .. 10] # MAGIC
# phase 1 deep
# found atoms of known types
# found bigger atoms that can be constructed for lower ones with 1 pass
# change_count = +trans.go ast # avoid sink point
trans.reset()
change_count = trans.translate ast
# phase 2 same scope id lookup
change_count += scope_id_pass()
# p "change_count=#{change_count}" # DEBUG
if change_count == 0
return
### !pragma coverage-skip-block ###
throw new Error "Type inference error. Out of lookup limit change_count(left)=#{change_count}"
@_type_inference = (ast, opt={})->
scope_state_reset()
module.__type_inference ast, opt
opt2 = {
func_call_unroll : true
}
obj_set opt2, opt
module.__type_inference ast, opt2
return
@type_inference = (ast, opt, on_end)->
try
module._type_inference ast, opt
catch e
return on_end e
on_end null
| 181127 | require 'fy'
require 'fy/lib/codegen'
{
Translator
} = require 'gram2'
module = @
# ###################################################################################################
# scope state
# ###################################################################################################
# MAY BE move to trans
current_scope =
id_map : {} # id -> ast pos list
scope_list = [current_scope]
scope_stack = []
fake_id = (type)->
{
mx_hash : {
type
}
}
scope_state_reset = ()->
current_scope =
id_map : {
stdin : [
fake_id mk_type 'Source', [mk_type 'string']
]
stdout : [
fake_id mk_type 'Sink', [mk_type 'string'],
]
Math : [
fake_id mk_type 'object', [], {
abs : mk_type 'either', [
mk_type 'function', [mk_type('int'), mk_type('int')]
mk_type 'function', [mk_type('float'), mk_type('float')]
]
round : mk_type 'function', [mk_type('int'), mk_type('float')]
}
]
Fail : [ # crafted object for more coverage
fake_id mk_type 'object', [], {
invalid_either : mk_type 'either', [
mk_type 'function', [mk_type('int'), mk_type('int')]
mk_type 'int'
]
}
]
Either_test : [ # crafted object for more coverage
fake_id mk_type 'object', [], {
int_float : mk_type 'either', [
mk_type 'int'
mk_type 'float'
]
int_float_bool : mk_type 'either', [
mk_type 'int'
mk_type 'float'
mk_type 'bool'
]
}
]
} # id -> ast pos list
scope_list = [current_scope]
scope_stack = []
return
scope_id_push = (node)->
current_scope.id_map[node.value] ?= []
current_scope.id_map[node.value].upush node
return
_mk_scope = (node)->
return scope if scope = node.__inject_scope
scope = node.__inject_scope =
id_map : {}
scope_list.push scope
scope
scope_push = (node)->
scope_stack.push current_scope
current_scope = _mk_scope node
return
scope_pop = ()->
current_scope = scope_stack.pop()
return
# ###################################################################################################
# Type
# ###################################################################################################
class @Type
main : ''
nest : []
field_hash : {} # name -> type
constructor:()->
@field_hash = {}
eq : (t)->
return false if @main != t.main
return false if @nest.length != t.nest.length
for v,k in @nest
return false if !v.eq t.nest[k]
for k,v1 of @field_hash
v2 = t.field_hash[k]
return false if !v2 or !v1.eq v2
# just check keys
for k,v2 of t.field_hash
v1 = @field_hash[k]
return false if !v1
true
toString : ()->
nest_part = ""
if @nest.length
list = []
for v in @nest
list.push v.toString()
nest_part = "<#{list.join ','}>"
object_part = ""
if h_count @field_hash
list = []
for k,v of @field_hash
list.push "#{k}:#{v}" # implicit to string
object_part = "{#{list.join ','}}"
"#{@main}#{nest_part}#{object_part}"
can_match : (t)->
return true if @main == '*'
return true if t.main == '*'
return false if @main != t.main
return false if @nest.length != t.nest.length
for v,k in @nest
v2 = t.nest[k]
return false if !v.can_match v2
for k,v1 of @field_hash
v2 = t.field_hash[k]
return false if !v2 or !v1.can_match v2
# just check keys
for k,v2 of t.field_hash
v1 = @field_hash[k]
return false if !v1
true
exchange_missing_info : (t)->
if @main == '*' and t.main == '*'
return 0 # nothing
else if @main == '*'
@main = t.main
@nest = t.nest.clone()
return 1
else if t.main == '*'
t.main = @main
t.nest = @nest.clone()
return 1
ret = 0
for v,k in @nest
v2 = t.nest[k]
ret += v.exchange_missing_info v2
for k,v1 of @field_hash
v1.exchange_missing_info t.field_hash[k]
ret
set : (t)->
@main = t.main
@nest = t.nest
@field_hash = t.field_hash
return
mk_type = (str, nest=[], field_hash={})->
ret = new module.Type
ret.main = str
ret.nest = nest
ret.field_hash = field_hash
ret
# ###################################################################################################
type_wrap = (type)->
mx_hash :
type : type
# TODO flatten either (e.g. either<a,either<b,c> > -> either<a,b,c>)
mk_either_list = (t)->
if t.main == 'either'
return t.nest
[t]
# TODO перестать использовать
_assert_pass_down = (ast, type, diagnostics)->
ret = 0
if type.main != '*'
ast.mx_hash.type = type
ret++
# rvalue unwrap
if ast.mx_hash.hash_key == 'rvalue'
ast = ast.value_array[0]
# lvalue patch
if ast.mx_hash.hash_key == 'lvalue'
# case @
# case @id
# case id
# case lvalue[rvalue]
# case lvalue.id
# case lvalue.octal/decimal
# LATER destructuring assignment
# case id
[a] = ast.value_array
if ast.value_array.length == 1 and a.mx_hash.hash_key == '<KEY>'
if !a.mx_hash.type?
a.mx_hash.type = type
ret++
else
# UNIMPLEMENTED
return ret
assert_pass_down = (ast, type, diagnostics)->
assert_pass_down_eq ast, type_wrap(type), diagnostics
assert_pass_down_eq = (ast1, ast2, diagnostics)->
ret = 0
if ast1.mx_hash.type? and ast2.mx_hash.type?
either_list1 = mk_either_list ast1.mx_hash.type
either_list2 = mk_either_list ast2.mx_hash.type
# BUG normalize
pair_list = []
for e1 in either_list1
for e2 in either_list2
if e1.can_match e2
pair_list.push [e1,e2]
if pair_list.length == 0
perr either_list1
perr either_list2
throw new Error "assert pass up eq failed either list can't match"
t1_possible_list = []
t2_possible_list = []
# BUG normalize
for pair in pair_list
t1_possible_list.upush pair[0]
t2_possible_list.upush pair[1]
if either_list1.length != t1_possible_list.length
if t1_possible_list.length == 1
ast1.mx_hash.type.set t1_possible_list[0]
else
cut_type = mk_type 'either', t1_possible_list
ast1.mx_hash.type.set cut_type
ret++
if either_list2.length != t2_possible_list.length
if t2_possible_list.length == 1
ast2.mx_hash.type.set t2_possible_list[0]
else
cut_type = mk_type 'either', t2_possible_list
ast2.mx_hash.type.set cut_type
ret++
# BUG. Can't exchange shared info (e.g. either<function<int,float>,function<int,int>> can't send return type )
if either_list1.length == 1 and either_list2.length == 1
t1 = either_list1[0]
t2 = either_list2[0]
if !t1.eq(t2)
ret += t1.exchange_missing_info t2
else if !ast1.mx_hash.type? and !ast2.mx_hash.type?
# nothing
else if !ast1.mx_hash.type?
ret += _assert_pass_down ast1, ast2.mx_hash.type, "#{diagnostics} ast1 down"
else #!ast2.mx_hash.type?
ret += _assert_pass_down ast2, ast1.mx_hash.type, "#{diagnostics} ast2 down"
return ret
assert_pass_down_eq_list = (ast_list, type, diagnostics)->
ret = 0
# if !type # coverage LATER
for v in ast_list
break if type = v.mx_hash.type
if type
for v, idx in ast_list
ret += assert_pass_down v, type, "#{diagnostics} pos #{idx}"
return ret
# ###################################################################################################
trans = new Translator
trans.key = '<KEY>'
trans.translator_hash['skip'] = translate:(ctx, node)-> 0
trans.translator_hash['pass'] = translate:(ctx, node)->
child = node.value_array[0]
ret = ctx.translate child
ret += assert_pass_down_eq node, child, "pass rule=#{node?.rule?.signature}"
ret
trans.translator_hash['block'] = translate:(ctx, node)->
ctx.translate node.value_array[1]
trans.translator_hash['stmt_plus_last'] = translate:(ctx, node)->
ret = 0
for v in node.value_array
continue if v.mx_hash.hash_key == 'eol'
ret += ctx.translate v
child = node.value_array.last()
if child.mx_hash.type?
if !node.mx_hash.type?
node.mx_hash.type = child.mx_hash.type
ret++
else
# UNIMPLEMENTED
ret
trans.translator_hash['bracket'] = translate:(ctx, node)->
child = node.value_array[1]
ret = ctx.translate child
ret += assert_pass_down_eq node, child, "bracket"
ret
trans.translator_hash['this'] = translate:(ctx, node)->
# LATER
0
trans.translator_hash['id'] = translate:(ctx, node)->
ret = 0
is_prefefined_const = false
if node.value_view in ['true', 'false'] # .toLowerCase() ??
is_prefefined_const = true
if !node.mx_hash.type?
node.mx_hash.type = mk_type 'bool'
ret++
else
# UNIMPLEMENTED
if !is_prefefined_const
if (nest_type = node.value_array[0].mx_hash.type)?
if !node.mx_hash.type?
node.mx_hash.type = nest_type
ret++
else
# UNIMPLEMENTED
scope_id_push node.value_array[0]
ret
trans.translator_hash['const'] = translate:(ctx, node)->
if !node.mx_hash.type?
### !pragma coverage-skip-block ###
throw new Error "You forgot specify type at ti=const"
unless node.mx_hash.type instanceof module.Type
node.mx_hash.type = mk_type node.mx_hash.type
return 0
# ###################################################################################################
# bin_op
# ###################################################################################################
bin_op_type_table = {}
do ()->
def_bin = (op,at,bt,ret)->
key = <KEY>
bin_op_type_table[key] = ret
return
for op in "+ - * % %%".split " "
for at in "int float".split " "
for bt in "int float".split " "
def_bin op, at, bt, "float"
for op in "+ - * % %% << >> >>>".split /\s+/
def_bin op, "int", "int", "int"
def_bin "+", "string", "string", "string"
def_bin "*", "string", "int", "string"
for op in "/ **".split " "
for at in "int float".split " "
for bt in "int float".split " "
def_bin op, at, bt, "float"
for at in "int float".split " "
for bt in "int float".split " "
def_bin "//", at, bt, "int"
for op in "and or xor".split /\s+/
def_bin op, "bool", "bool", "bool"
def_bin op, "int", "int", "int"
for type in "int float".split /\s+/
for op in "< <= > >=".split /\s+/
def_bin op, type,type, "bool"
trans.translator_hash['bin_op'] = translate:(ctx, node)->
ret = 0
rvalue_list = []
bin_op_list = []
for v in node.value_array
rvalue_list.push v if v.mx_hash.hash_key == 'rvalue'
bin_op_list.push v if v.mx_hash.hash_key == 'bin_op'
bin_op_node = bin_op_list[0]
op = bin_op_node.value_view
for v in rvalue_list
ret += ctx.translate v
# cases
# no type detected - can build system that limits a, b and result
# 1 type detected - can build system that limits second and result
# 2 type detected - can validate and send result
# LATER result defined + some args
[a,b] = rvalue_list
if op == '|'
if a.mx_hash.type?
unless a.mx_hash.type.main in (a_types = ["array", "Source"])
pp a.mx_hash.type
throw new Error "Pipe can't be used for left type #{a.mx_hash.type.main}. Supported types: #{a_types}"
if b.mx_hash.type?
unless b.mx_hash.type.main in (b_types = ["function", "array", "Sink"])
throw new Error "Pipe can't be used for right type #{b.mx_hash.type.main}. Supported types: #{b_types}"
return 0
if !a.mx_hash.type? and !b.mx_hash.type?
# case 1
# not implemented
else if a.mx_hash.type? and b.mx_hash.type?
# case 3
at = a.mx_hash.type
bt = b.mx_hash.type
loop
if op in ['==', '!=']
ret += assert_pass_down_eq a, b, "bin_op eq"
_ret = 'bool'
break
key = <KEY>
if !_ret = bin_op_type_table[key]
throw new Error "Type inference: can't find bin_op=#{op} a=#{at} b=#{bt} node=#{node.value}"
break
if !node.mx_hash.type?
node.mx_hash.type = mk_type _ret
ret++
else
# UNIMPLEMENTED
else
# case 2
# not implemented
return ret
# ###################################################################################################
# assign_bin_op
# ###################################################################################################
trans.translator_hash['assign_bin_op'] = translate:(ctx, node)->
ret = 0
rvalue_list = []
bin_op_list = []
for v in node.value_array
rvalue_list.push v if v.mx_hash.hash_key in ['lvalue', 'rvalue']
bin_op_list.push v if v.mx_hash.hash_key == '<KEY>'
bin_op_node = bin_op_list[0]
op = bin_op_node.value_view.replace '=', ''
for v in rvalue_list
ctx.translate v
# cases
# no type detected - can build system that limits a, b and result
# 1 type detected - can build system that limits second and result
# 2 type detected - can validate and send result
# LATER result defined + some args
[a,b] = rvalue_list
if !a.mx_hash.type? and !b.mx_hash.type?
# case 1
# not implemented
else if a.mx_hash.type? and b.mx_hash.type?
# case 3
if op == ''
ret += assert_pass_down_eq a, b, "assign_bin_op"
else
at = a.mx_hash.type
bt = b.mx_hash.type
key = <KEY>
if !_ret = bin_op_type_table[key]
throw new Error "can't find assign_bin_op=#{op} a=#{at} b=#{bt} node=#{node.value}"
_ret_t = mk_type _ret
if !_ret_t.eq at
### !pragma coverage-skip-block ###
# <NAME>РИМ. Пока сейчас нет операций у которых a.type != b.type
throw new Error "assign_bin_op conflict '#{_ret_t}' != '#{at}'"
assert_pass_down node, _ret_t, 'assign_bin_op'
else
# case 2
if b.mx_hash.type?
if op == ''
ret += assert_pass_down_eq a, b, 'assign_bin_op'
# BYPASSSING missing code coverage
ret += assert_pass_down_eq node, b, 'assign_bin_op'
# if !node.mx_hash.type?
# node.mx_hash.type = b.mx_hash.type
# ret++
# else
# # UNIMPLEMENTED
else # a.mx_hash.type?
if op == ''
ret += assert_pass_down_eq b, a, 'assign_bin_op'
if !node.mx_hash.type?
node.mx_hash.type = a.mx_hash.type
ret++
else
# UNIMPLEMENTED
ret
# ###################################################################################################
# pre_op
# ###################################################################################################
pre_op_type_table = {}
def_pre = (op,at,ret)->
key = <KEY>
pre_op_type_table[key] = ret
return
def_pre "-", "int", "int"
def_pre "~", "int", "int"
def_pre "+", "string", "float"
def_pre "!", "bool", "bool"
def_pre "not", "int" , "int"
def_pre "not", "bool", "bool"
trans.translator_hash['pre_op'] = translate:(ctx, node)->
ret = 0
rvalue_list = []
pre_op_list = []
for v in node.value_array
rvalue_list.push v if v.mx_hash.hash_key == 'rvalue'
pre_op_list.push v if v.mx_hash.hash_key == 'pre_op'
pre_op_node = pre_op_list[0]
op = pre_op_node.value_view
for v in rvalue_list
ret += ctx.translate v
# cases
# no type detected - can build system that limits a, b and result
# 1 type detected - can validate and send result
[a] = rvalue_list
if !a.mx_hash.type?
# case 1
# not implemented
else
# case 2
at = a.mx_hash.type
key = <KEY>
if !_ret = pre_op_type_table[key]
throw new Error "can't find pre_op=#{op} a=#{at} node=#{node.value}"
if !node.mx_hash.type?
node.mx_hash.type = mk_type _ret
ret++
else
# UNIMPLEMENTED
ret
# LATER
# ###################################################################################################
# post_op
# ###################################################################################################
post_op_type_table = {}
def_post = (op,at,ret)->
key = <KEY>
post_op_type_table[key] = ret
return
def_post "++", "int", "int"
def_post "--", "int", "int"
# NOTE 1++ is not valid, but passes gram and TI
trans.translator_hash['post_op'] = translate:(ctx, node)->
ret = 0
rvalue_list = []
post_op_list = []
for v in node.value_array
rvalue_list.push v if v.mx_hash.hash_key == 'rvalue'
post_op_list.push v if v.mx_hash.hash_key == 'post_op'
post_op_node = post_op_list[0]
op = post_op_node.value_view
for v in rvalue_list
ret += ctx.translate v
# cases
# no type detected - can build system that limits a, b and result
# 1 type detected - can validate and send result
[a] = rvalue_list
if !a.mx_hash.type?
# case 1
# not implemented
else
# case 2
at = a.mx_hash.type
key = <KEY>
if !_ret = post_op_type_table[key]
throw new Error "can't find post_op=#{op} a=#{at} node=#{node.value}"
if !node.mx_hash.type?
node.mx_hash.type = mk_type _ret
ret++
else
# UNIMPLEMENTED
ret
# # ###################################################################################################
trans.translator_hash["ternary"] = translate:(ctx, node)->
ret = 0
[cond, _s1, vtrue, _s2, vfalse] = node.value_array
ret += ctx.translate cond
ret += assert_pass_down cond, mk_type('bool'), 'ternary'
ret += ctx.translate vtrue
ret += ctx.translate vfalse
ret += assert_pass_down_eq vtrue, vfalse
ret += assert_pass_down_eq vtrue, node, "ternary"
return ret
# ###################################################################################################
trans.translator_hash["array"] = translate:(ctx, node)->
ret = 0
element_list = []
walk = (node)->
for sn in node.value_array
if sn.mx_hash.hash_key == 'rvalue'
element_list.push sn
else
walk sn
return
walk node
for el in element_list
ret += ctx.translate el
ret += assert_pass_down_eq_list element_list, undefined, "array decl"
if element_list[0]?.mx_hash.type?
subtype = element_list[0].mx_hash.type
if !node.mx_hash.type?
node.mx_hash.type = mk_type "array", [subtype]
ret++
else
# UNIMPLEMENTED
else
if !node.mx_hash.type?
node.mx_hash.type = mk_type "array", [mk_type '*']
ret++
else
# UNIMPLEMENTED
return ret
# ###################################################################################################
trans.translator_hash["hash"] = translate:(ctx, node)->
ret = 0
pair_list = []
walk = (node)->
for sn in node.value_array
if sn.mx_hash.hash_key == 'pair'
pair_list.push sn
else
walk sn
return
walk node
element_list = []
must_be_hash = false
for el in pair_list
rvalue_list = []
id_list = []
for sn in el.value_array
if sn.mx_hash.hash_key == 'identifier'
id_list.push sn
else if sn.mx_hash.hash_key == 'rvalue'
rvalue_list.push sn
ret += ctx.translate sn
if rvalue_list.length == 0
key = id_list[0]
value = id_list[0]
scope_id_push value
else if rvalue_list.length == 1
# NOTE LATER can be missing
# e.g. {a.b.c} => {c:a.b.c}
key = id_list[0]
value = rvalue_list[0]
else # if rvalue_list.length == 2
[key, value] = rvalue_list
must_be_hash = true
# TODO check key castable to string # same as string iterpolated
element_list.push {key, value}
# REMOVE LATER
if must_be_hash
ret += assert_pass_down_eq_list element_list.map((t)->t.value), undefined, "hash decl"
if element_list.length
if must_be_hash
subtype = element_list[0].value.mx_hash.type
if !node.mx_hash.type?
node.mx_hash.type = mk_type "hash", [subtype]
ret++
else
if !node.mx_hash.type?
node.mx_hash.type = mk_type "object", []
ret++
for kv in element_list
{key, value} = kv
node.mx_hash.type.field_hash[key.value] = value.mx_hash.type or mk_type '*'
else
if !node.mx_hash.type?
node.mx_hash.type = mk_type "hash", [mk_type '*']
ret++
else
# UNIMPLEMENTED
return ret
# ###################################################################################################
# access
# ###################################################################################################
trans.translator_hash['array_access'] = translate:(ctx, node)->
ret = 0
[root, _skip, rvalue] = node.value_array
ret += ctx.translate root
ret += ctx.translate rvalue
# cases
# 1 array<T> [int ] -> T
# 2 hash<T> [string] -> T
if root.mx_hash.type
subtype = root.mx_hash.type.nest[0]
switch root.mx_hash.type.main
when 'array'
ret += assert_pass_down rvalue, mk_type("int"), "array_access array"
when 'hash' # Прим. здесь я считаю hash == dictionary. А есть еще тип named tuple
ret += assert_pass_down rvalue, mk_type("string"), "array_access hash"
when 'string'
ret += assert_pass_down rvalue, mk_type("int"), "array_access hash"
subtype = mk_type 'string'
# when '*' # can't pass as main type
# OK
else
throw new Error "Trying to access array of not allowed type '#{root.mx_hash.type.main}'"
if subtype and subtype.main != '*'
ret += assert_pass_down node, subtype, "array_access"
ret
trans.translator_hash['id_access'] = translate:(ctx, node)->
[root, _skip, id] = node.value_array
ret = ctx.translate root
if root.mx_hash.type
subtype = root.mx_hash.type.nest[0]
switch root.mx_hash.type.main
when 'array'
# TODO later impl with field_hash
if id.value == 'length'
subtype = mk_type 'int'
else
throw new Error "Trying access field '#{id.value}' in array"
when 'hash' # Прим. здесь я считаю hash == dictionary. А есть еще тип named tuple, там нужно смотреть на тип каждого field'а
'OK'
when 'object' # named tuple
field_hash = root.mx_hash.type.field_hash
if !subtype = field_hash[id.value]
throw new Error "Trying access field '#{id.value}' in object with fields=#{Object.keys field_hash}"
else
throw new Error "Trying to access field '#{id.value}' of not allowed type '#{root.mx_hash.type.main}'"
if subtype.main != '*' or node.mx_hash.type
ret += assert_pass_down node, subtype, "id_access"
ret
trans.translator_hash['opencl_access'] = translate:(ctx, node)->
[root, _skip, id] = node.value_array
ret = ctx.translate root
if root.mx_hash.type
subtype = root.mx_hash.type.nest[0]
switch root.mx_hash.type.main
when 'array'
if id.value.length != 1
subtype = root.mx_hash.type
else
throw new Error "Trying to access field '#{id.value}' of not allowed type '#{root.mx_hash.type.main}'"
if subtype and subtype.main != '*'
ret += assert_pass_down node, subtype, "opencl_access"
ret
# ###################################################################################################
# function
# ###################################################################################################
type_ast_to_obj = (ast)->
# NOTE WRONG. Need proper handle <>
mk_type ast.value_view
trans.translator_hash['func_decl'] = translate:(ctx, node)->
ret = 0
function_body = null
arg_list_node = null
ret_type_node = null
for v in node.value_array
arg_list_node = v if v.mx_hash.hash_key == 'arg_list'
function_body = v if v.mx_hash.hash_key == 'function_body'
ret_type_node = v if v.mx_hash.hash_key == 'type'
scope_push node
# TODO translate arg default values
arg_list = []
if arg_list_node?
walk = (node)->
for v in node.value_array
if v.mx_hash.hash_key == 'arg'
arg_list.push v
else
walk v
return
walk arg_list_node
for v in arg_list
for sn in v.value_array
if sn.mx_hash.hash_key == 'identifier'
scope_id_push sn
else if sn.mx_hash.hash_key == 'rvalue'
ctx.translate sn
if function_body?
ret += ctx.translate function_body
scope_pop()
arg_type_list = []
if ret_type_node?
arg_type_list.push type_ast_to_obj ret_type_node
else
arg_type_list.push mk_type 'void'
for v in arg_list
type = null
rvalue = null
if v.value_array.length == 3
[id,_skip,type_or_rvalue] = v.value_array
type = type_or_rvalue if type_or_rvalue.mx_hash.hash_key == 'type'
rvalue= type_or_rvalue if type_or_rvalue.mx_hash.hash_key == 'rvalue'
else
[id] = v.value_array
if type?
type_str = type_ast_to_obj type
assert_pass_down id, type_str, "func arg '#{id.value}'"
if rvalue?
assert_pass_down_eq id, rvalue, "func arg '#{id.value}'"
arg_type_list.push id.mx_hash.type or mk_type "*"
craft_type = mk_type "function", arg_type_list
assert_pass_down node, craft_type, "function"
ret
trans.translator_hash['func_call'] = translate:(ctx, node)->
ret = 0
rvalue = node.value_array[0]
comma_rvalue_node = null
for v in node.value_array
if v.mx_hash.hash_key == '<KEY>'
comma_rvalue_node = v
arg_list = []
ret += ctx.translate rvalue
if comma_rvalue_node
walk = (node)->
for v in node.value_array
if v.mx_hash.hash_key == '<KEY>'
arg_list.push v
if v.mx_hash.hash_key == '<KEY>'
walk v
rvalue
walk comma_rvalue_node
for arg in arg_list
ret += ctx.translate arg
if rvalue.mx_hash.type
check_list = mk_either_list rvalue.mx_hash.type
allowed_signature_list = []
for type in check_list
unless type.main in ['function', '*']
throw new Error "trying to call type='#{type}'"
# default arg later
continue if type.nest.length-1 != arg_list.length
found = false
for i in [1 ... type.nest.length] by 1
expected_arg_type = type.nest[i]
real_arg_type = arg_list[i-1].mx_hash.type
if real_arg_type and !expected_arg_type.can_match real_arg_type
found = true
break
if !found
allowed_signature_list.push type
if allowed_signature_list.length == 0
throw new Error "can't find allowed_signature in '#{check_list.map((t)->t.toString())}'"
candidate_type = allowed_signature_list[0].nest[0]
found = false
for v in allowed_signature_list
if !v.nest[0].eq candidate_type
found = true
break
if !found
ret += assert_pass_down node, candidate_type, "func_call"
else if ctx.func_call_unroll
# try to detect function type based on argument call list
# NOTE BUG. Default args will be FUCKed
ret_type = node.mx_hash.type or mk_type '*'
craft_type = mk_type 'function', [ret_type]
for arg in arg_list
craft_type.nest.push arg.mx_hash.type or mk_type '*'
ret += assert_pass_down rvalue, craft_type, "func_call"
ret
# ###################################################################################################
# macro
# ###################################################################################################
trans.translator_hash['macro_stub'] = translate:(ctx, node)->
ret = 0
block = null
rvalue = null
for v in node.value_array
block = v if v.mx_hash.hash_key == 'block'
rvalue = v if v.mx_hash.hash_key == 'r<KEY>'
if rvalue?
ret += ctx.translate rvalue
ret += ctx.translate block
ret
# ###################################################################################################
# string_interpolated
# ###################################################################################################
trans.translator_hash['string_inter_pass'] = translate:(ctx, node)->
ret = 0
for v in node.value_array
if v.mx_hash.hash_key == 'r<KEY>'
ret += ctx.translate v
# TODO check v.mx_hash.type castable to string
ret
# ###################################################################################################
# scope id pass
# ###################################################################################################
scope_id_pass = ()->
ret = 0
for scope in scope_list
for k,list of scope.id_map
# p list.map((t)->t.mx_hash) # DEBUG
ret += assert_pass_down_eq_list list, undefined, "scope_id_pass '#{k}'"
# p "scope_id_pass=#{ret}" # DEBUG
return ret
# ###################################################################################################
@__type_inference = (ast, opt={})->
change_count = 0
trans.func_call_unroll = opt.func_call_unroll
for i in [0 .. 10] # MAGIC
# phase 1 deep
# found atoms of known types
# found bigger atoms that can be constructed for lower ones with 1 pass
# change_count = +trans.go ast # avoid sink point
trans.reset()
change_count = trans.translate ast
# phase 2 same scope id lookup
change_count += scope_id_pass()
# p "change_count=#{change_count}" # DEBUG
if change_count == 0
return
### !pragma coverage-skip-block ###
throw new Error "Type inference error. Out of lookup limit change_count(left)=#{change_count}"
@_type_inference = (ast, opt={})->
scope_state_reset()
module.__type_inference ast, opt
opt2 = {
func_call_unroll : true
}
obj_set opt2, opt
module.__type_inference ast, opt2
return
@type_inference = (ast, opt, on_end)->
try
module._type_inference ast, opt
catch e
return on_end e
on_end null
| true | require 'fy'
require 'fy/lib/codegen'
{
Translator
} = require 'gram2'
module = @
# ###################################################################################################
# scope state
# ###################################################################################################
# MAY BE move to trans
current_scope =
id_map : {} # id -> ast pos list
scope_list = [current_scope]
scope_stack = []
fake_id = (type)->
{
mx_hash : {
type
}
}
scope_state_reset = ()->
current_scope =
id_map : {
stdin : [
fake_id mk_type 'Source', [mk_type 'string']
]
stdout : [
fake_id mk_type 'Sink', [mk_type 'string'],
]
Math : [
fake_id mk_type 'object', [], {
abs : mk_type 'either', [
mk_type 'function', [mk_type('int'), mk_type('int')]
mk_type 'function', [mk_type('float'), mk_type('float')]
]
round : mk_type 'function', [mk_type('int'), mk_type('float')]
}
]
Fail : [ # crafted object for more coverage
fake_id mk_type 'object', [], {
invalid_either : mk_type 'either', [
mk_type 'function', [mk_type('int'), mk_type('int')]
mk_type 'int'
]
}
]
Either_test : [ # crafted object for more coverage
fake_id mk_type 'object', [], {
int_float : mk_type 'either', [
mk_type 'int'
mk_type 'float'
]
int_float_bool : mk_type 'either', [
mk_type 'int'
mk_type 'float'
mk_type 'bool'
]
}
]
} # id -> ast pos list
scope_list = [current_scope]
scope_stack = []
return
scope_id_push = (node)->
current_scope.id_map[node.value] ?= []
current_scope.id_map[node.value].upush node
return
_mk_scope = (node)->
return scope if scope = node.__inject_scope
scope = node.__inject_scope =
id_map : {}
scope_list.push scope
scope
scope_push = (node)->
scope_stack.push current_scope
current_scope = _mk_scope node
return
scope_pop = ()->
current_scope = scope_stack.pop()
return
# ###################################################################################################
# Type
# ###################################################################################################
class @Type
main : ''
nest : []
field_hash : {} # name -> type
constructor:()->
@field_hash = {}
eq : (t)->
return false if @main != t.main
return false if @nest.length != t.nest.length
for v,k in @nest
return false if !v.eq t.nest[k]
for k,v1 of @field_hash
v2 = t.field_hash[k]
return false if !v2 or !v1.eq v2
# just check keys
for k,v2 of t.field_hash
v1 = @field_hash[k]
return false if !v1
true
toString : ()->
nest_part = ""
if @nest.length
list = []
for v in @nest
list.push v.toString()
nest_part = "<#{list.join ','}>"
object_part = ""
if h_count @field_hash
list = []
for k,v of @field_hash
list.push "#{k}:#{v}" # implicit to string
object_part = "{#{list.join ','}}"
"#{@main}#{nest_part}#{object_part}"
can_match : (t)->
return true if @main == '*'
return true if t.main == '*'
return false if @main != t.main
return false if @nest.length != t.nest.length
for v,k in @nest
v2 = t.nest[k]
return false if !v.can_match v2
for k,v1 of @field_hash
v2 = t.field_hash[k]
return false if !v2 or !v1.can_match v2
# just check keys
for k,v2 of t.field_hash
v1 = @field_hash[k]
return false if !v1
true
exchange_missing_info : (t)->
if @main == '*' and t.main == '*'
return 0 # nothing
else if @main == '*'
@main = t.main
@nest = t.nest.clone()
return 1
else if t.main == '*'
t.main = @main
t.nest = @nest.clone()
return 1
ret = 0
for v,k in @nest
v2 = t.nest[k]
ret += v.exchange_missing_info v2
for k,v1 of @field_hash
v1.exchange_missing_info t.field_hash[k]
ret
set : (t)->
@main = t.main
@nest = t.nest
@field_hash = t.field_hash
return
mk_type = (str, nest=[], field_hash={})->
ret = new module.Type
ret.main = str
ret.nest = nest
ret.field_hash = field_hash
ret
# ###################################################################################################
type_wrap = (type)->
mx_hash :
type : type
# TODO flatten either (e.g. either<a,either<b,c> > -> either<a,b,c>)
mk_either_list = (t)->
if t.main == 'either'
return t.nest
[t]
# TODO перестать использовать
_assert_pass_down = (ast, type, diagnostics)->
ret = 0
if type.main != '*'
ast.mx_hash.type = type
ret++
# rvalue unwrap
if ast.mx_hash.hash_key == 'rvalue'
ast = ast.value_array[0]
# lvalue patch
if ast.mx_hash.hash_key == 'lvalue'
# case @
# case @id
# case id
# case lvalue[rvalue]
# case lvalue.id
# case lvalue.octal/decimal
# LATER destructuring assignment
# case id
[a] = ast.value_array
if ast.value_array.length == 1 and a.mx_hash.hash_key == 'PI:KEY:<KEY>END_PI'
if !a.mx_hash.type?
a.mx_hash.type = type
ret++
else
# UNIMPLEMENTED
return ret
assert_pass_down = (ast, type, diagnostics)->
assert_pass_down_eq ast, type_wrap(type), diagnostics
assert_pass_down_eq = (ast1, ast2, diagnostics)->
ret = 0
if ast1.mx_hash.type? and ast2.mx_hash.type?
either_list1 = mk_either_list ast1.mx_hash.type
either_list2 = mk_either_list ast2.mx_hash.type
# BUG normalize
pair_list = []
for e1 in either_list1
for e2 in either_list2
if e1.can_match e2
pair_list.push [e1,e2]
if pair_list.length == 0
perr either_list1
perr either_list2
throw new Error "assert pass up eq failed either list can't match"
t1_possible_list = []
t2_possible_list = []
# BUG normalize
for pair in pair_list
t1_possible_list.upush pair[0]
t2_possible_list.upush pair[1]
if either_list1.length != t1_possible_list.length
if t1_possible_list.length == 1
ast1.mx_hash.type.set t1_possible_list[0]
else
cut_type = mk_type 'either', t1_possible_list
ast1.mx_hash.type.set cut_type
ret++
if either_list2.length != t2_possible_list.length
if t2_possible_list.length == 1
ast2.mx_hash.type.set t2_possible_list[0]
else
cut_type = mk_type 'either', t2_possible_list
ast2.mx_hash.type.set cut_type
ret++
# BUG. Can't exchange shared info (e.g. either<function<int,float>,function<int,int>> can't send return type )
if either_list1.length == 1 and either_list2.length == 1
t1 = either_list1[0]
t2 = either_list2[0]
if !t1.eq(t2)
ret += t1.exchange_missing_info t2
else if !ast1.mx_hash.type? and !ast2.mx_hash.type?
# nothing
else if !ast1.mx_hash.type?
ret += _assert_pass_down ast1, ast2.mx_hash.type, "#{diagnostics} ast1 down"
else #!ast2.mx_hash.type?
ret += _assert_pass_down ast2, ast1.mx_hash.type, "#{diagnostics} ast2 down"
return ret
assert_pass_down_eq_list = (ast_list, type, diagnostics)->
ret = 0
# if !type # coverage LATER
for v in ast_list
break if type = v.mx_hash.type
if type
for v, idx in ast_list
ret += assert_pass_down v, type, "#{diagnostics} pos #{idx}"
return ret
# ###################################################################################################
trans = new Translator
trans.key = 'PI:KEY:<KEY>END_PI'
trans.translator_hash['skip'] = translate:(ctx, node)-> 0
trans.translator_hash['pass'] = translate:(ctx, node)->
child = node.value_array[0]
ret = ctx.translate child
ret += assert_pass_down_eq node, child, "pass rule=#{node?.rule?.signature}"
ret
trans.translator_hash['block'] = translate:(ctx, node)->
ctx.translate node.value_array[1]
trans.translator_hash['stmt_plus_last'] = translate:(ctx, node)->
ret = 0
for v in node.value_array
continue if v.mx_hash.hash_key == 'eol'
ret += ctx.translate v
child = node.value_array.last()
if child.mx_hash.type?
if !node.mx_hash.type?
node.mx_hash.type = child.mx_hash.type
ret++
else
# UNIMPLEMENTED
ret
trans.translator_hash['bracket'] = translate:(ctx, node)->
child = node.value_array[1]
ret = ctx.translate child
ret += assert_pass_down_eq node, child, "bracket"
ret
trans.translator_hash['this'] = translate:(ctx, node)->
# LATER
0
trans.translator_hash['id'] = translate:(ctx, node)->
ret = 0
is_prefefined_const = false
if node.value_view in ['true', 'false'] # .toLowerCase() ??
is_prefefined_const = true
if !node.mx_hash.type?
node.mx_hash.type = mk_type 'bool'
ret++
else
# UNIMPLEMENTED
if !is_prefefined_const
if (nest_type = node.value_array[0].mx_hash.type)?
if !node.mx_hash.type?
node.mx_hash.type = nest_type
ret++
else
# UNIMPLEMENTED
scope_id_push node.value_array[0]
ret
trans.translator_hash['const'] = translate:(ctx, node)->
if !node.mx_hash.type?
### !pragma coverage-skip-block ###
throw new Error "You forgot specify type at ti=const"
unless node.mx_hash.type instanceof module.Type
node.mx_hash.type = mk_type node.mx_hash.type
return 0
# ###################################################################################################
# bin_op
# ###################################################################################################
bin_op_type_table = {}
do ()->
def_bin = (op,at,bt,ret)->
key = PI:KEY:<KEY>END_PI
bin_op_type_table[key] = ret
return
for op in "+ - * % %%".split " "
for at in "int float".split " "
for bt in "int float".split " "
def_bin op, at, bt, "float"
for op in "+ - * % %% << >> >>>".split /\s+/
def_bin op, "int", "int", "int"
def_bin "+", "string", "string", "string"
def_bin "*", "string", "int", "string"
for op in "/ **".split " "
for at in "int float".split " "
for bt in "int float".split " "
def_bin op, at, bt, "float"
for at in "int float".split " "
for bt in "int float".split " "
def_bin "//", at, bt, "int"
for op in "and or xor".split /\s+/
def_bin op, "bool", "bool", "bool"
def_bin op, "int", "int", "int"
for type in "int float".split /\s+/
for op in "< <= > >=".split /\s+/
def_bin op, type,type, "bool"
trans.translator_hash['bin_op'] = translate:(ctx, node)->
ret = 0
rvalue_list = []
bin_op_list = []
for v in node.value_array
rvalue_list.push v if v.mx_hash.hash_key == 'rvalue'
bin_op_list.push v if v.mx_hash.hash_key == 'bin_op'
bin_op_node = bin_op_list[0]
op = bin_op_node.value_view
for v in rvalue_list
ret += ctx.translate v
# cases
# no type detected - can build system that limits a, b and result
# 1 type detected - can build system that limits second and result
# 2 type detected - can validate and send result
# LATER result defined + some args
[a,b] = rvalue_list
if op == '|'
if a.mx_hash.type?
unless a.mx_hash.type.main in (a_types = ["array", "Source"])
pp a.mx_hash.type
throw new Error "Pipe can't be used for left type #{a.mx_hash.type.main}. Supported types: #{a_types}"
if b.mx_hash.type?
unless b.mx_hash.type.main in (b_types = ["function", "array", "Sink"])
throw new Error "Pipe can't be used for right type #{b.mx_hash.type.main}. Supported types: #{b_types}"
return 0
if !a.mx_hash.type? and !b.mx_hash.type?
# case 1
# not implemented
else if a.mx_hash.type? and b.mx_hash.type?
# case 3
at = a.mx_hash.type
bt = b.mx_hash.type
loop
if op in ['==', '!=']
ret += assert_pass_down_eq a, b, "bin_op eq"
_ret = 'bool'
break
key = PI:KEY:<KEY>END_PI
if !_ret = bin_op_type_table[key]
throw new Error "Type inference: can't find bin_op=#{op} a=#{at} b=#{bt} node=#{node.value}"
break
if !node.mx_hash.type?
node.mx_hash.type = mk_type _ret
ret++
else
# UNIMPLEMENTED
else
# case 2
# not implemented
return ret
# ###################################################################################################
# assign_bin_op
# ###################################################################################################
trans.translator_hash['assign_bin_op'] = translate:(ctx, node)->
ret = 0
rvalue_list = []
bin_op_list = []
for v in node.value_array
rvalue_list.push v if v.mx_hash.hash_key in ['lvalue', 'rvalue']
bin_op_list.push v if v.mx_hash.hash_key == 'PI:KEY:<KEY>END_PI'
bin_op_node = bin_op_list[0]
op = bin_op_node.value_view.replace '=', ''
for v in rvalue_list
ctx.translate v
# cases
# no type detected - can build system that limits a, b and result
# 1 type detected - can build system that limits second and result
# 2 type detected - can validate and send result
# LATER result defined + some args
[a,b] = rvalue_list
if !a.mx_hash.type? and !b.mx_hash.type?
# case 1
# not implemented
else if a.mx_hash.type? and b.mx_hash.type?
# case 3
if op == ''
ret += assert_pass_down_eq a, b, "assign_bin_op"
else
at = a.mx_hash.type
bt = b.mx_hash.type
key = PI:KEY:<KEY>END_PI
if !_ret = bin_op_type_table[key]
throw new Error "can't find assign_bin_op=#{op} a=#{at} b=#{bt} node=#{node.value}"
_ret_t = mk_type _ret
if !_ret_t.eq at
### !pragma coverage-skip-block ###
# PI:NAME:<NAME>END_PIРИМ. Пока сейчас нет операций у которых a.type != b.type
throw new Error "assign_bin_op conflict '#{_ret_t}' != '#{at}'"
assert_pass_down node, _ret_t, 'assign_bin_op'
else
# case 2
if b.mx_hash.type?
if op == ''
ret += assert_pass_down_eq a, b, 'assign_bin_op'
# BYPASSSING missing code coverage
ret += assert_pass_down_eq node, b, 'assign_bin_op'
# if !node.mx_hash.type?
# node.mx_hash.type = b.mx_hash.type
# ret++
# else
# # UNIMPLEMENTED
else # a.mx_hash.type?
if op == ''
ret += assert_pass_down_eq b, a, 'assign_bin_op'
if !node.mx_hash.type?
node.mx_hash.type = a.mx_hash.type
ret++
else
# UNIMPLEMENTED
ret
# ###################################################################################################
# pre_op
# ###################################################################################################
pre_op_type_table = {}
def_pre = (op,at,ret)->
key = PI:KEY:<KEY>END_PI
pre_op_type_table[key] = ret
return
def_pre "-", "int", "int"
def_pre "~", "int", "int"
def_pre "+", "string", "float"
def_pre "!", "bool", "bool"
def_pre "not", "int" , "int"
def_pre "not", "bool", "bool"
trans.translator_hash['pre_op'] = translate:(ctx, node)->
ret = 0
rvalue_list = []
pre_op_list = []
for v in node.value_array
rvalue_list.push v if v.mx_hash.hash_key == 'rvalue'
pre_op_list.push v if v.mx_hash.hash_key == 'pre_op'
pre_op_node = pre_op_list[0]
op = pre_op_node.value_view
for v in rvalue_list
ret += ctx.translate v
# cases
# no type detected - can build system that limits a, b and result
# 1 type detected - can validate and send result
[a] = rvalue_list
if !a.mx_hash.type?
# case 1
# not implemented
else
# case 2
at = a.mx_hash.type
key = PI:KEY:<KEY>END_PI
if !_ret = pre_op_type_table[key]
throw new Error "can't find pre_op=#{op} a=#{at} node=#{node.value}"
if !node.mx_hash.type?
node.mx_hash.type = mk_type _ret
ret++
else
# UNIMPLEMENTED
ret
# LATER
# ###################################################################################################
# post_op
# ###################################################################################################
post_op_type_table = {}
def_post = (op,at,ret)->
key = PI:KEY:<KEY>END_PI
post_op_type_table[key] = ret
return
def_post "++", "int", "int"
def_post "--", "int", "int"
# NOTE 1++ is not valid, but passes gram and TI
trans.translator_hash['post_op'] = translate:(ctx, node)->
ret = 0
rvalue_list = []
post_op_list = []
for v in node.value_array
rvalue_list.push v if v.mx_hash.hash_key == 'rvalue'
post_op_list.push v if v.mx_hash.hash_key == 'post_op'
post_op_node = post_op_list[0]
op = post_op_node.value_view
for v in rvalue_list
ret += ctx.translate v
# cases
# no type detected - can build system that limits a, b and result
# 1 type detected - can validate and send result
[a] = rvalue_list
if !a.mx_hash.type?
# case 1
# not implemented
else
# case 2
at = a.mx_hash.type
key = PI:KEY:<KEY>END_PI
if !_ret = post_op_type_table[key]
throw new Error "can't find post_op=#{op} a=#{at} node=#{node.value}"
if !node.mx_hash.type?
node.mx_hash.type = mk_type _ret
ret++
else
# UNIMPLEMENTED
ret
# # ###################################################################################################
trans.translator_hash["ternary"] = translate:(ctx, node)->
ret = 0
[cond, _s1, vtrue, _s2, vfalse] = node.value_array
ret += ctx.translate cond
ret += assert_pass_down cond, mk_type('bool'), 'ternary'
ret += ctx.translate vtrue
ret += ctx.translate vfalse
ret += assert_pass_down_eq vtrue, vfalse
ret += assert_pass_down_eq vtrue, node, "ternary"
return ret
# ###################################################################################################
trans.translator_hash["array"] = translate:(ctx, node)->
ret = 0
element_list = []
walk = (node)->
for sn in node.value_array
if sn.mx_hash.hash_key == 'rvalue'
element_list.push sn
else
walk sn
return
walk node
for el in element_list
ret += ctx.translate el
ret += assert_pass_down_eq_list element_list, undefined, "array decl"
if element_list[0]?.mx_hash.type?
subtype = element_list[0].mx_hash.type
if !node.mx_hash.type?
node.mx_hash.type = mk_type "array", [subtype]
ret++
else
# UNIMPLEMENTED
else
if !node.mx_hash.type?
node.mx_hash.type = mk_type "array", [mk_type '*']
ret++
else
# UNIMPLEMENTED
return ret
# ###################################################################################################
trans.translator_hash["hash"] = translate:(ctx, node)->
ret = 0
pair_list = []
walk = (node)->
for sn in node.value_array
if sn.mx_hash.hash_key == 'pair'
pair_list.push sn
else
walk sn
return
walk node
element_list = []
must_be_hash = false
for el in pair_list
rvalue_list = []
id_list = []
for sn in el.value_array
if sn.mx_hash.hash_key == 'identifier'
id_list.push sn
else if sn.mx_hash.hash_key == 'rvalue'
rvalue_list.push sn
ret += ctx.translate sn
if rvalue_list.length == 0
key = id_list[0]
value = id_list[0]
scope_id_push value
else if rvalue_list.length == 1
# NOTE LATER can be missing
# e.g. {a.b.c} => {c:a.b.c}
key = id_list[0]
value = rvalue_list[0]
else # if rvalue_list.length == 2
[key, value] = rvalue_list
must_be_hash = true
# TODO check key castable to string # same as string iterpolated
element_list.push {key, value}
# REMOVE LATER
if must_be_hash
ret += assert_pass_down_eq_list element_list.map((t)->t.value), undefined, "hash decl"
if element_list.length
if must_be_hash
subtype = element_list[0].value.mx_hash.type
if !node.mx_hash.type?
node.mx_hash.type = mk_type "hash", [subtype]
ret++
else
if !node.mx_hash.type?
node.mx_hash.type = mk_type "object", []
ret++
for kv in element_list
{key, value} = kv
node.mx_hash.type.field_hash[key.value] = value.mx_hash.type or mk_type '*'
else
if !node.mx_hash.type?
node.mx_hash.type = mk_type "hash", [mk_type '*']
ret++
else
# UNIMPLEMENTED
return ret
# ###################################################################################################
# access
# ###################################################################################################
trans.translator_hash['array_access'] = translate:(ctx, node)->
ret = 0
[root, _skip, rvalue] = node.value_array
ret += ctx.translate root
ret += ctx.translate rvalue
# cases
# 1 array<T> [int ] -> T
# 2 hash<T> [string] -> T
if root.mx_hash.type
subtype = root.mx_hash.type.nest[0]
switch root.mx_hash.type.main
when 'array'
ret += assert_pass_down rvalue, mk_type("int"), "array_access array"
when 'hash' # Прим. здесь я считаю hash == dictionary. А есть еще тип named tuple
ret += assert_pass_down rvalue, mk_type("string"), "array_access hash"
when 'string'
ret += assert_pass_down rvalue, mk_type("int"), "array_access hash"
subtype = mk_type 'string'
# when '*' # can't pass as main type
# OK
else
throw new Error "Trying to access array of not allowed type '#{root.mx_hash.type.main}'"
if subtype and subtype.main != '*'
ret += assert_pass_down node, subtype, "array_access"
ret
trans.translator_hash['id_access'] = translate:(ctx, node)->
[root, _skip, id] = node.value_array
ret = ctx.translate root
if root.mx_hash.type
subtype = root.mx_hash.type.nest[0]
switch root.mx_hash.type.main
when 'array'
# TODO later impl with field_hash
if id.value == 'length'
subtype = mk_type 'int'
else
throw new Error "Trying access field '#{id.value}' in array"
when 'hash' # Прим. здесь я считаю hash == dictionary. А есть еще тип named tuple, там нужно смотреть на тип каждого field'а
'OK'
when 'object' # named tuple
field_hash = root.mx_hash.type.field_hash
if !subtype = field_hash[id.value]
throw new Error "Trying access field '#{id.value}' in object with fields=#{Object.keys field_hash}"
else
throw new Error "Trying to access field '#{id.value}' of not allowed type '#{root.mx_hash.type.main}'"
if subtype.main != '*' or node.mx_hash.type
ret += assert_pass_down node, subtype, "id_access"
ret
trans.translator_hash['opencl_access'] = translate:(ctx, node)->
[root, _skip, id] = node.value_array
ret = ctx.translate root
if root.mx_hash.type
subtype = root.mx_hash.type.nest[0]
switch root.mx_hash.type.main
when 'array'
if id.value.length != 1
subtype = root.mx_hash.type
else
throw new Error "Trying to access field '#{id.value}' of not allowed type '#{root.mx_hash.type.main}'"
if subtype and subtype.main != '*'
ret += assert_pass_down node, subtype, "opencl_access"
ret
# ###################################################################################################
# function
# ###################################################################################################
type_ast_to_obj = (ast)->
# NOTE WRONG. Need proper handle <>
mk_type ast.value_view
trans.translator_hash['func_decl'] = translate:(ctx, node)->
ret = 0
function_body = null
arg_list_node = null
ret_type_node = null
for v in node.value_array
arg_list_node = v if v.mx_hash.hash_key == 'arg_list'
function_body = v if v.mx_hash.hash_key == 'function_body'
ret_type_node = v if v.mx_hash.hash_key == 'type'
scope_push node
# TODO translate arg default values
arg_list = []
if arg_list_node?
walk = (node)->
for v in node.value_array
if v.mx_hash.hash_key == 'arg'
arg_list.push v
else
walk v
return
walk arg_list_node
for v in arg_list
for sn in v.value_array
if sn.mx_hash.hash_key == 'identifier'
scope_id_push sn
else if sn.mx_hash.hash_key == 'rvalue'
ctx.translate sn
if function_body?
ret += ctx.translate function_body
scope_pop()
arg_type_list = []
if ret_type_node?
arg_type_list.push type_ast_to_obj ret_type_node
else
arg_type_list.push mk_type 'void'
for v in arg_list
type = null
rvalue = null
if v.value_array.length == 3
[id,_skip,type_or_rvalue] = v.value_array
type = type_or_rvalue if type_or_rvalue.mx_hash.hash_key == 'type'
rvalue= type_or_rvalue if type_or_rvalue.mx_hash.hash_key == 'rvalue'
else
[id] = v.value_array
if type?
type_str = type_ast_to_obj type
assert_pass_down id, type_str, "func arg '#{id.value}'"
if rvalue?
assert_pass_down_eq id, rvalue, "func arg '#{id.value}'"
arg_type_list.push id.mx_hash.type or mk_type "*"
craft_type = mk_type "function", arg_type_list
assert_pass_down node, craft_type, "function"
ret
trans.translator_hash['func_call'] = translate:(ctx, node)->
ret = 0
rvalue = node.value_array[0]
comma_rvalue_node = null
for v in node.value_array
if v.mx_hash.hash_key == 'PI:KEY:<KEY>END_PI'
comma_rvalue_node = v
arg_list = []
ret += ctx.translate rvalue
if comma_rvalue_node
walk = (node)->
for v in node.value_array
if v.mx_hash.hash_key == 'PI:KEY:<KEY>END_PI'
arg_list.push v
if v.mx_hash.hash_key == 'PI:KEY:<KEY>END_PI'
walk v
rvalue
walk comma_rvalue_node
for arg in arg_list
ret += ctx.translate arg
if rvalue.mx_hash.type
check_list = mk_either_list rvalue.mx_hash.type
allowed_signature_list = []
for type in check_list
unless type.main in ['function', '*']
throw new Error "trying to call type='#{type}'"
# default arg later
continue if type.nest.length-1 != arg_list.length
found = false
for i in [1 ... type.nest.length] by 1
expected_arg_type = type.nest[i]
real_arg_type = arg_list[i-1].mx_hash.type
if real_arg_type and !expected_arg_type.can_match real_arg_type
found = true
break
if !found
allowed_signature_list.push type
if allowed_signature_list.length == 0
throw new Error "can't find allowed_signature in '#{check_list.map((t)->t.toString())}'"
candidate_type = allowed_signature_list[0].nest[0]
found = false
for v in allowed_signature_list
if !v.nest[0].eq candidate_type
found = true
break
if !found
ret += assert_pass_down node, candidate_type, "func_call"
else if ctx.func_call_unroll
# try to detect function type based on argument call list
# NOTE BUG. Default args will be FUCKed
ret_type = node.mx_hash.type or mk_type '*'
craft_type = mk_type 'function', [ret_type]
for arg in arg_list
craft_type.nest.push arg.mx_hash.type or mk_type '*'
ret += assert_pass_down rvalue, craft_type, "func_call"
ret
# ###################################################################################################
# macro
# ###################################################################################################
trans.translator_hash['macro_stub'] = translate:(ctx, node)->
ret = 0
block = null
rvalue = null
for v in node.value_array
block = v if v.mx_hash.hash_key == 'block'
rvalue = v if v.mx_hash.hash_key == 'rPI:KEY:<KEY>END_PI'
if rvalue?
ret += ctx.translate rvalue
ret += ctx.translate block
ret
# ###################################################################################################
# string_interpolated
# ###################################################################################################
trans.translator_hash['string_inter_pass'] = translate:(ctx, node)->
ret = 0
for v in node.value_array
if v.mx_hash.hash_key == 'rPI:KEY:<KEY>END_PI'
ret += ctx.translate v
# TODO check v.mx_hash.type castable to string
ret
# ###################################################################################################
# scope id pass
# ###################################################################################################
scope_id_pass = ()->
ret = 0
for scope in scope_list
for k,list of scope.id_map
# p list.map((t)->t.mx_hash) # DEBUG
ret += assert_pass_down_eq_list list, undefined, "scope_id_pass '#{k}'"
# p "scope_id_pass=#{ret}" # DEBUG
return ret
# ###################################################################################################
@__type_inference = (ast, opt={})->
change_count = 0
trans.func_call_unroll = opt.func_call_unroll
for i in [0 .. 10] # MAGIC
# phase 1 deep
# found atoms of known types
# found bigger atoms that can be constructed for lower ones with 1 pass
# change_count = +trans.go ast # avoid sink point
trans.reset()
change_count = trans.translate ast
# phase 2 same scope id lookup
change_count += scope_id_pass()
# p "change_count=#{change_count}" # DEBUG
if change_count == 0
return
### !pragma coverage-skip-block ###
throw new Error "Type inference error. Out of lookup limit change_count(left)=#{change_count}"
@_type_inference = (ast, opt={})->
scope_state_reset()
module.__type_inference ast, opt
opt2 = {
func_call_unroll : true
}
obj_set opt2, opt
module.__type_inference ast, opt2
return
@type_inference = (ast, opt, on_end)->
try
module._type_inference ast, opt
catch e
return on_end e
on_end null
|
[
{
"context": "view Tests for operator-assignment rule.\n# @author Brandon Mills\n###\n\n'use strict'\n\n#-----------------------------",
"end": 80,
"score": 0.9996968507766724,
"start": 67,
"tag": "NAME",
"value": "Brandon Mills"
}
] | src/tests/rules/operator-assignment.coffee | danielbayley/eslint-plugin-coffee | 21 | ###*
# @fileoverview Tests for operator-assignment rule.
# @author Brandon Mills
###
'use strict'
#------------------------------------------------------------------------------
# Requirements
#------------------------------------------------------------------------------
rule = require '../../rules/operator-assignment'
{RuleTester} = require 'eslint'
path = require 'path'
#------------------------------------------------------------------------------
# Tests
#------------------------------------------------------------------------------
ruleTester = new RuleTester parser: path.join __dirname, '../../..'
EXPECTED_OPERATOR_ASSIGNMENT = [
message: 'Assignment can be replaced with operator assignment.'
type: 'AssignmentExpression'
]
UNEXPECTED_OPERATOR_ASSIGNMENT = [
message: 'Unexpected operator assignment shorthand.'
type: 'AssignmentExpression'
]
ruleTester.run 'operator-assignment', rule,
valid: [
'x = y'
'x = y + x'
'x += x + y'
'x = (x + y) - z'
'x -= y'
'x = y - x'
'x *= x'
'x = y * z'
'x = (x * y) * z'
'x = y / x'
'x /= y'
'x %= y'
'x <<= y'
'x >>= x >> y'
'x >>>= y'
'x &= y'
'x **= y'
'x ^= y ^ z'
'x |= x | y'
'x = x < y'
'x = x > y'
'x = x <= y'
'x = x >= y'
'x = x instanceof y'
'x = x in y'
'x = x == y'
'x = x != y'
'x = x is y'
'x = x isnt y'
"x[y] = x['y'] + z"
"x.y = x['y'] / z"
'x.y = z + x.y'
'x[fn()] = x[fn()] + y'
,
code: 'x += x + y'
options: ['always']
,
code: 'x = x + y'
options: ['never']
,
code: 'x = x ** y'
options: ['never']
,
'x = y ** x'
'x = x * y + z'
]
invalid: [
code: 'x = x + y'
output: 'x += y'
errors: EXPECTED_OPERATOR_ASSIGNMENT
,
code: 'x = x - y'
output: 'x -= y'
errors: EXPECTED_OPERATOR_ASSIGNMENT
,
code: 'x = x * y'
output: 'x *= y'
errors: EXPECTED_OPERATOR_ASSIGNMENT
,
code: 'x = y * x'
output: null # not fixed (possible change in behavior if y and x have valueOf() functions)
errors: EXPECTED_OPERATOR_ASSIGNMENT
,
code: 'x = (y * z) * x'
output: null # not fixed (possible change in behavior if y/z and x have valueOf() functions)
errors: EXPECTED_OPERATOR_ASSIGNMENT
,
code: 'x = x / y'
output: 'x /= y'
errors: EXPECTED_OPERATOR_ASSIGNMENT
,
code: 'x = x % y'
output: 'x %= y'
errors: EXPECTED_OPERATOR_ASSIGNMENT
,
code: 'x = x << y'
output: 'x <<= y'
errors: EXPECTED_OPERATOR_ASSIGNMENT
,
code: 'x = x >> y'
output: 'x >>= y'
errors: EXPECTED_OPERATOR_ASSIGNMENT
,
code: 'x = x >>> y'
output: 'x >>>= y'
errors: EXPECTED_OPERATOR_ASSIGNMENT
,
code: 'x = x & y'
output: 'x &= y'
errors: EXPECTED_OPERATOR_ASSIGNMENT
,
code: 'x = x ^ y'
output: 'x ^= y'
errors: EXPECTED_OPERATOR_ASSIGNMENT
,
code: 'x = x | y'
output: 'x |= y'
errors: EXPECTED_OPERATOR_ASSIGNMENT
,
code: '''
x = 1
x = x && y
'''
output: '''
x = 1
x &&= y
'''
errors: EXPECTED_OPERATOR_ASSIGNMENT
,
code: '''
x = 1
x = x || y
'''
output: '''
x = 1
x ||= y
'''
errors: EXPECTED_OPERATOR_ASSIGNMENT
,
code: '''
x = 1
x = x ? y
'''
output: '''
x = 1
x ?= y
'''
errors: EXPECTED_OPERATOR_ASSIGNMENT
,
code: 'x[0] = x[0] - y'
output: 'x[0] -= y'
errors: EXPECTED_OPERATOR_ASSIGNMENT
,
code: "x.y[z['a']][0].b = x.y[z['a']][0].b * 2"
output: null # not fixed; might activate getters more than before
errors: EXPECTED_OPERATOR_ASSIGNMENT
,
code: 'x = x + y'
output: 'x += y'
options: ['always']
errors: EXPECTED_OPERATOR_ASSIGNMENT
,
code: 'x = (x + y)'
output: 'x += y'
options: ['always']
errors: EXPECTED_OPERATOR_ASSIGNMENT
,
code: 'x = x + (y)'
output: 'x += (y)'
options: ['always']
errors: EXPECTED_OPERATOR_ASSIGNMENT
,
code: 'x += (y)'
output: 'x = x + (y)'
options: ['never']
errors: UNEXPECTED_OPERATOR_ASSIGNMENT
,
code: 'x += y'
output: 'x = x + y'
options: ['never']
errors: UNEXPECTED_OPERATOR_ASSIGNMENT
,
code: 'foo.bar = foo.bar + baz'
output: 'foo.bar += baz'
errors: EXPECTED_OPERATOR_ASSIGNMENT
,
code: 'foo.bar += baz'
output: 'foo.bar = foo.bar + baz'
options: ['never']
errors: UNEXPECTED_OPERATOR_ASSIGNMENT
,
code: 'foo.bar.baz = foo.bar.baz + qux'
output: null # not fixed; fixing would cause a foo.bar getter to activate once rather than twice
errors: EXPECTED_OPERATOR_ASSIGNMENT
,
code: 'foo.bar.baz += qux'
output: null # not fixed; fixing would cause a foo.bar getter to activate twice rather than once
options: ['never']
errors: UNEXPECTED_OPERATOR_ASSIGNMENT
,
code: 'foo[bar] = foo[bar] + baz'
output: null # not fixed; fixing would cause bar.toString() to get called once instead of twice
errors: EXPECTED_OPERATOR_ASSIGNMENT
,
code: 'foo[bar] >>>= baz'
output: null # not fixed; fixing would cause bar.toString() to get called twice instead of once
options: ['never']
errors: UNEXPECTED_OPERATOR_ASSIGNMENT
,
code: 'foo[5] = foo[5] / baz'
output: 'foo[5] /= baz' # this is ok because 5 is a literal, so toString won't get called
errors: EXPECTED_OPERATOR_ASSIGNMENT
,
code: 'foo.bar ^= ((((((((((((((((baz))))))))))))))))'
output: 'foo.bar = foo.bar ^ ((((((((((((((((baz))))))))))))))))'
options: ['never']
errors: UNEXPECTED_OPERATOR_ASSIGNMENT
,
code: 'foo = foo ** bar'
output: 'foo **= bar'
errors: EXPECTED_OPERATOR_ASSIGNMENT
,
code: 'foo **= bar'
output: 'foo = foo ** bar'
options: ['never']
errors: UNEXPECTED_OPERATOR_ASSIGNMENT
,
code: 'foo *= bar + 1'
output: 'foo = foo * (bar + 1)'
options: ['never']
errors: UNEXPECTED_OPERATOR_ASSIGNMENT
,
code: 'foo -= bar - baz'
output: 'foo = foo - (bar - baz)'
options: ['never']
errors: UNEXPECTED_OPERATOR_ASSIGNMENT
,
code: 'foo += bar + baz'
output: 'foo = foo + (bar + baz)' # addition is not associative in JS, e.g. (1 + 2) + '3' !== 1 + (2 + '3')
options: ['never']
errors: UNEXPECTED_OPERATOR_ASSIGNMENT
,
code: 'foo += bar = 1'
output: 'foo = foo + (bar = 1)'
options: ['never']
errors: UNEXPECTED_OPERATOR_ASSIGNMENT
,
code: '''
foo = 1
foo ||= bar ? 1
'''
output: '''
foo = 1
foo = foo || (bar ? 1)
'''
options: ['never']
errors: UNEXPECTED_OPERATOR_ASSIGNMENT
,
code: 'foo *= (bar + 1)'
output: 'foo = foo * (bar + 1)'
options: ['never']
errors: UNEXPECTED_OPERATOR_ASSIGNMENT
]
| 92769 | ###*
# @fileoverview Tests for operator-assignment rule.
# @author <NAME>
###
'use strict'
#------------------------------------------------------------------------------
# Requirements
#------------------------------------------------------------------------------
rule = require '../../rules/operator-assignment'
{RuleTester} = require 'eslint'
path = require 'path'
#------------------------------------------------------------------------------
# Tests
#------------------------------------------------------------------------------
ruleTester = new RuleTester parser: path.join __dirname, '../../..'
EXPECTED_OPERATOR_ASSIGNMENT = [
message: 'Assignment can be replaced with operator assignment.'
type: 'AssignmentExpression'
]
UNEXPECTED_OPERATOR_ASSIGNMENT = [
message: 'Unexpected operator assignment shorthand.'
type: 'AssignmentExpression'
]
ruleTester.run 'operator-assignment', rule,
valid: [
'x = y'
'x = y + x'
'x += x + y'
'x = (x + y) - z'
'x -= y'
'x = y - x'
'x *= x'
'x = y * z'
'x = (x * y) * z'
'x = y / x'
'x /= y'
'x %= y'
'x <<= y'
'x >>= x >> y'
'x >>>= y'
'x &= y'
'x **= y'
'x ^= y ^ z'
'x |= x | y'
'x = x < y'
'x = x > y'
'x = x <= y'
'x = x >= y'
'x = x instanceof y'
'x = x in y'
'x = x == y'
'x = x != y'
'x = x is y'
'x = x isnt y'
"x[y] = x['y'] + z"
"x.y = x['y'] / z"
'x.y = z + x.y'
'x[fn()] = x[fn()] + y'
,
code: 'x += x + y'
options: ['always']
,
code: 'x = x + y'
options: ['never']
,
code: 'x = x ** y'
options: ['never']
,
'x = y ** x'
'x = x * y + z'
]
invalid: [
code: 'x = x + y'
output: 'x += y'
errors: EXPECTED_OPERATOR_ASSIGNMENT
,
code: 'x = x - y'
output: 'x -= y'
errors: EXPECTED_OPERATOR_ASSIGNMENT
,
code: 'x = x * y'
output: 'x *= y'
errors: EXPECTED_OPERATOR_ASSIGNMENT
,
code: 'x = y * x'
output: null # not fixed (possible change in behavior if y and x have valueOf() functions)
errors: EXPECTED_OPERATOR_ASSIGNMENT
,
code: 'x = (y * z) * x'
output: null # not fixed (possible change in behavior if y/z and x have valueOf() functions)
errors: EXPECTED_OPERATOR_ASSIGNMENT
,
code: 'x = x / y'
output: 'x /= y'
errors: EXPECTED_OPERATOR_ASSIGNMENT
,
code: 'x = x % y'
output: 'x %= y'
errors: EXPECTED_OPERATOR_ASSIGNMENT
,
code: 'x = x << y'
output: 'x <<= y'
errors: EXPECTED_OPERATOR_ASSIGNMENT
,
code: 'x = x >> y'
output: 'x >>= y'
errors: EXPECTED_OPERATOR_ASSIGNMENT
,
code: 'x = x >>> y'
output: 'x >>>= y'
errors: EXPECTED_OPERATOR_ASSIGNMENT
,
code: 'x = x & y'
output: 'x &= y'
errors: EXPECTED_OPERATOR_ASSIGNMENT
,
code: 'x = x ^ y'
output: 'x ^= y'
errors: EXPECTED_OPERATOR_ASSIGNMENT
,
code: 'x = x | y'
output: 'x |= y'
errors: EXPECTED_OPERATOR_ASSIGNMENT
,
code: '''
x = 1
x = x && y
'''
output: '''
x = 1
x &&= y
'''
errors: EXPECTED_OPERATOR_ASSIGNMENT
,
code: '''
x = 1
x = x || y
'''
output: '''
x = 1
x ||= y
'''
errors: EXPECTED_OPERATOR_ASSIGNMENT
,
code: '''
x = 1
x = x ? y
'''
output: '''
x = 1
x ?= y
'''
errors: EXPECTED_OPERATOR_ASSIGNMENT
,
code: 'x[0] = x[0] - y'
output: 'x[0] -= y'
errors: EXPECTED_OPERATOR_ASSIGNMENT
,
code: "x.y[z['a']][0].b = x.y[z['a']][0].b * 2"
output: null # not fixed; might activate getters more than before
errors: EXPECTED_OPERATOR_ASSIGNMENT
,
code: 'x = x + y'
output: 'x += y'
options: ['always']
errors: EXPECTED_OPERATOR_ASSIGNMENT
,
code: 'x = (x + y)'
output: 'x += y'
options: ['always']
errors: EXPECTED_OPERATOR_ASSIGNMENT
,
code: 'x = x + (y)'
output: 'x += (y)'
options: ['always']
errors: EXPECTED_OPERATOR_ASSIGNMENT
,
code: 'x += (y)'
output: 'x = x + (y)'
options: ['never']
errors: UNEXPECTED_OPERATOR_ASSIGNMENT
,
code: 'x += y'
output: 'x = x + y'
options: ['never']
errors: UNEXPECTED_OPERATOR_ASSIGNMENT
,
code: 'foo.bar = foo.bar + baz'
output: 'foo.bar += baz'
errors: EXPECTED_OPERATOR_ASSIGNMENT
,
code: 'foo.bar += baz'
output: 'foo.bar = foo.bar + baz'
options: ['never']
errors: UNEXPECTED_OPERATOR_ASSIGNMENT
,
code: 'foo.bar.baz = foo.bar.baz + qux'
output: null # not fixed; fixing would cause a foo.bar getter to activate once rather than twice
errors: EXPECTED_OPERATOR_ASSIGNMENT
,
code: 'foo.bar.baz += qux'
output: null # not fixed; fixing would cause a foo.bar getter to activate twice rather than once
options: ['never']
errors: UNEXPECTED_OPERATOR_ASSIGNMENT
,
code: 'foo[bar] = foo[bar] + baz'
output: null # not fixed; fixing would cause bar.toString() to get called once instead of twice
errors: EXPECTED_OPERATOR_ASSIGNMENT
,
code: 'foo[bar] >>>= baz'
output: null # not fixed; fixing would cause bar.toString() to get called twice instead of once
options: ['never']
errors: UNEXPECTED_OPERATOR_ASSIGNMENT
,
code: 'foo[5] = foo[5] / baz'
output: 'foo[5] /= baz' # this is ok because 5 is a literal, so toString won't get called
errors: EXPECTED_OPERATOR_ASSIGNMENT
,
code: 'foo.bar ^= ((((((((((((((((baz))))))))))))))))'
output: 'foo.bar = foo.bar ^ ((((((((((((((((baz))))))))))))))))'
options: ['never']
errors: UNEXPECTED_OPERATOR_ASSIGNMENT
,
code: 'foo = foo ** bar'
output: 'foo **= bar'
errors: EXPECTED_OPERATOR_ASSIGNMENT
,
code: 'foo **= bar'
output: 'foo = foo ** bar'
options: ['never']
errors: UNEXPECTED_OPERATOR_ASSIGNMENT
,
code: 'foo *= bar + 1'
output: 'foo = foo * (bar + 1)'
options: ['never']
errors: UNEXPECTED_OPERATOR_ASSIGNMENT
,
code: 'foo -= bar - baz'
output: 'foo = foo - (bar - baz)'
options: ['never']
errors: UNEXPECTED_OPERATOR_ASSIGNMENT
,
code: 'foo += bar + baz'
output: 'foo = foo + (bar + baz)' # addition is not associative in JS, e.g. (1 + 2) + '3' !== 1 + (2 + '3')
options: ['never']
errors: UNEXPECTED_OPERATOR_ASSIGNMENT
,
code: 'foo += bar = 1'
output: 'foo = foo + (bar = 1)'
options: ['never']
errors: UNEXPECTED_OPERATOR_ASSIGNMENT
,
code: '''
foo = 1
foo ||= bar ? 1
'''
output: '''
foo = 1
foo = foo || (bar ? 1)
'''
options: ['never']
errors: UNEXPECTED_OPERATOR_ASSIGNMENT
,
code: 'foo *= (bar + 1)'
output: 'foo = foo * (bar + 1)'
options: ['never']
errors: UNEXPECTED_OPERATOR_ASSIGNMENT
]
| true | ###*
# @fileoverview Tests for operator-assignment rule.
# @author PI:NAME:<NAME>END_PI
###
'use strict'
#------------------------------------------------------------------------------
# Requirements
#------------------------------------------------------------------------------
rule = require '../../rules/operator-assignment'
{RuleTester} = require 'eslint'
path = require 'path'
#------------------------------------------------------------------------------
# Tests
#------------------------------------------------------------------------------
ruleTester = new RuleTester parser: path.join __dirname, '../../..'
EXPECTED_OPERATOR_ASSIGNMENT = [
message: 'Assignment can be replaced with operator assignment.'
type: 'AssignmentExpression'
]
UNEXPECTED_OPERATOR_ASSIGNMENT = [
message: 'Unexpected operator assignment shorthand.'
type: 'AssignmentExpression'
]
ruleTester.run 'operator-assignment', rule,
valid: [
'x = y'
'x = y + x'
'x += x + y'
'x = (x + y) - z'
'x -= y'
'x = y - x'
'x *= x'
'x = y * z'
'x = (x * y) * z'
'x = y / x'
'x /= y'
'x %= y'
'x <<= y'
'x >>= x >> y'
'x >>>= y'
'x &= y'
'x **= y'
'x ^= y ^ z'
'x |= x | y'
'x = x < y'
'x = x > y'
'x = x <= y'
'x = x >= y'
'x = x instanceof y'
'x = x in y'
'x = x == y'
'x = x != y'
'x = x is y'
'x = x isnt y'
"x[y] = x['y'] + z"
"x.y = x['y'] / z"
'x.y = z + x.y'
'x[fn()] = x[fn()] + y'
,
code: 'x += x + y'
options: ['always']
,
code: 'x = x + y'
options: ['never']
,
code: 'x = x ** y'
options: ['never']
,
'x = y ** x'
'x = x * y + z'
]
invalid: [
code: 'x = x + y'
output: 'x += y'
errors: EXPECTED_OPERATOR_ASSIGNMENT
,
code: 'x = x - y'
output: 'x -= y'
errors: EXPECTED_OPERATOR_ASSIGNMENT
,
code: 'x = x * y'
output: 'x *= y'
errors: EXPECTED_OPERATOR_ASSIGNMENT
,
code: 'x = y * x'
output: null # not fixed (possible change in behavior if y and x have valueOf() functions)
errors: EXPECTED_OPERATOR_ASSIGNMENT
,
code: 'x = (y * z) * x'
output: null # not fixed (possible change in behavior if y/z and x have valueOf() functions)
errors: EXPECTED_OPERATOR_ASSIGNMENT
,
code: 'x = x / y'
output: 'x /= y'
errors: EXPECTED_OPERATOR_ASSIGNMENT
,
code: 'x = x % y'
output: 'x %= y'
errors: EXPECTED_OPERATOR_ASSIGNMENT
,
code: 'x = x << y'
output: 'x <<= y'
errors: EXPECTED_OPERATOR_ASSIGNMENT
,
code: 'x = x >> y'
output: 'x >>= y'
errors: EXPECTED_OPERATOR_ASSIGNMENT
,
code: 'x = x >>> y'
output: 'x >>>= y'
errors: EXPECTED_OPERATOR_ASSIGNMENT
,
code: 'x = x & y'
output: 'x &= y'
errors: EXPECTED_OPERATOR_ASSIGNMENT
,
code: 'x = x ^ y'
output: 'x ^= y'
errors: EXPECTED_OPERATOR_ASSIGNMENT
,
code: 'x = x | y'
output: 'x |= y'
errors: EXPECTED_OPERATOR_ASSIGNMENT
,
code: '''
x = 1
x = x && y
'''
output: '''
x = 1
x &&= y
'''
errors: EXPECTED_OPERATOR_ASSIGNMENT
,
code: '''
x = 1
x = x || y
'''
output: '''
x = 1
x ||= y
'''
errors: EXPECTED_OPERATOR_ASSIGNMENT
,
code: '''
x = 1
x = x ? y
'''
output: '''
x = 1
x ?= y
'''
errors: EXPECTED_OPERATOR_ASSIGNMENT
,
code: 'x[0] = x[0] - y'
output: 'x[0] -= y'
errors: EXPECTED_OPERATOR_ASSIGNMENT
,
code: "x.y[z['a']][0].b = x.y[z['a']][0].b * 2"
output: null # not fixed; might activate getters more than before
errors: EXPECTED_OPERATOR_ASSIGNMENT
,
code: 'x = x + y'
output: 'x += y'
options: ['always']
errors: EXPECTED_OPERATOR_ASSIGNMENT
,
code: 'x = (x + y)'
output: 'x += y'
options: ['always']
errors: EXPECTED_OPERATOR_ASSIGNMENT
,
code: 'x = x + (y)'
output: 'x += (y)'
options: ['always']
errors: EXPECTED_OPERATOR_ASSIGNMENT
,
code: 'x += (y)'
output: 'x = x + (y)'
options: ['never']
errors: UNEXPECTED_OPERATOR_ASSIGNMENT
,
code: 'x += y'
output: 'x = x + y'
options: ['never']
errors: UNEXPECTED_OPERATOR_ASSIGNMENT
,
code: 'foo.bar = foo.bar + baz'
output: 'foo.bar += baz'
errors: EXPECTED_OPERATOR_ASSIGNMENT
,
code: 'foo.bar += baz'
output: 'foo.bar = foo.bar + baz'
options: ['never']
errors: UNEXPECTED_OPERATOR_ASSIGNMENT
,
code: 'foo.bar.baz = foo.bar.baz + qux'
output: null # not fixed; fixing would cause a foo.bar getter to activate once rather than twice
errors: EXPECTED_OPERATOR_ASSIGNMENT
,
code: 'foo.bar.baz += qux'
output: null # not fixed; fixing would cause a foo.bar getter to activate twice rather than once
options: ['never']
errors: UNEXPECTED_OPERATOR_ASSIGNMENT
,
code: 'foo[bar] = foo[bar] + baz'
output: null # not fixed; fixing would cause bar.toString() to get called once instead of twice
errors: EXPECTED_OPERATOR_ASSIGNMENT
,
code: 'foo[bar] >>>= baz'
output: null # not fixed; fixing would cause bar.toString() to get called twice instead of once
options: ['never']
errors: UNEXPECTED_OPERATOR_ASSIGNMENT
,
code: 'foo[5] = foo[5] / baz'
output: 'foo[5] /= baz' # this is ok because 5 is a literal, so toString won't get called
errors: EXPECTED_OPERATOR_ASSIGNMENT
,
code: 'foo.bar ^= ((((((((((((((((baz))))))))))))))))'
output: 'foo.bar = foo.bar ^ ((((((((((((((((baz))))))))))))))))'
options: ['never']
errors: UNEXPECTED_OPERATOR_ASSIGNMENT
,
code: 'foo = foo ** bar'
output: 'foo **= bar'
errors: EXPECTED_OPERATOR_ASSIGNMENT
,
code: 'foo **= bar'
output: 'foo = foo ** bar'
options: ['never']
errors: UNEXPECTED_OPERATOR_ASSIGNMENT
,
code: 'foo *= bar + 1'
output: 'foo = foo * (bar + 1)'
options: ['never']
errors: UNEXPECTED_OPERATOR_ASSIGNMENT
,
code: 'foo -= bar - baz'
output: 'foo = foo - (bar - baz)'
options: ['never']
errors: UNEXPECTED_OPERATOR_ASSIGNMENT
,
code: 'foo += bar + baz'
output: 'foo = foo + (bar + baz)' # addition is not associative in JS, e.g. (1 + 2) + '3' !== 1 + (2 + '3')
options: ['never']
errors: UNEXPECTED_OPERATOR_ASSIGNMENT
,
code: 'foo += bar = 1'
output: 'foo = foo + (bar = 1)'
options: ['never']
errors: UNEXPECTED_OPERATOR_ASSIGNMENT
,
code: '''
foo = 1
foo ||= bar ? 1
'''
output: '''
foo = 1
foo = foo || (bar ? 1)
'''
options: ['never']
errors: UNEXPECTED_OPERATOR_ASSIGNMENT
,
code: 'foo *= (bar + 1)'
output: 'foo = foo * (bar + 1)'
options: ['never']
errors: UNEXPECTED_OPERATOR_ASSIGNMENT
]
|
[
{
"context": "rldFromVariable', ->\n assert.equal @person, \"david\"\n assert.equal @body, \"hello david\"\n \n ",
"end": 449,
"score": 0.99652099609375,
"start": 444,
"tag": "NAME",
"value": "david"
},
{
"context": " @person, \"david\"\n assert.equal @body, \"hel... | test/cases/controller/renderTemplateTest.coffee | ludicast/tower | 1 | require '../../config'
controller = null
user = null
router = null
describe 'Tower.Controller.Rendering', ->
test 'renderCoffeeKupFromTemplate', ->
Tower.get 'renderCoffeeKupFromTemplate', ->
assert.equal @body, "<h1>Hello World</h1>\n"
assert.equal @headers["Content-Type"], "text/html"
test 'renderHelloWorldFromVariable', ->
Tower.get 'renderHelloWorldFromVariable', ->
assert.equal @person, "david"
assert.equal @body, "hello david"
test 'renderWithExplicitStringTemplateAsAction', ->
Tower.get 'renderWithExplicitStringTemplateAsAction', ->
assert.equal @body, "<h1>Hello World!!!</h1>\n"
#test 'helloWorldFile', ->
# Tower.get 'helloWorldFile', ->
# assert.equal @body, "Hello world!"
#
#test 'renderHelloWorld', ->
# Tower.get 'renderHelloWorld', ->
# assert.equal @body, "Hello world!"
#
#test 'renderHelloWorldWithForwardSlash', ->
# Tower.get 'renderHelloWorldWithForwardSlash', ->
# assert.equal @body, "Hello world!"
#
#test 'renderActionHelloWorld', ->
# Tower.get 'renderActionHelloWorld', ->
# assert.equal @body, "Hello world!"
#
test 'renderActionUpcasedHelloWorld', ->
Tower.get 'renderActionUpcasedHelloWorld', ->
assert.equal @body, "<h1>renderActionUpcasedHelloWorld</h1>\n"
test 'renderActionUpcasedHelloWorldAsString', ->
Tower.get 'renderActionUpcasedHelloWorldAsString', ->
assert.equal @body, "<h1>renderActionUpcasedHelloWorld</h1>\n"
#test 'renderActionHelloWorldAsString', ->
# Tower.get 'renderActionUpcasedHelloWorld', ->
# assert.equal @body, "Hello world!"
#
#test 'renderTextHelloWorld', ->
# Tower.get 'renderActionUpcasedHelloWorld', ->
# assert.equal @body, "hello world"
#
#test 'renderTextHelloWorldWithLayout'
#
#test 'helloWorldWithLayoutFalse'
#test 'renderFileWithInstanceVariables'
#test 'renderFileAsStringWithInstanceVariables'
#test 'renderFileNotUsingFullPath'
#test 'renderFileNotUsingFullPathWithDotInPath'
#test 'renderFileUsingPathname'
#test 'renderFileFromTemplate'
#test 'renderFileWithLocals'
#test 'renderFileAsStringWithLocals'
#
#test 'accessingRequestInTemplate'
#test 'accessingLoggerInTemplate'
#test 'accessingActionNameInTemplate'
#test 'accessingControllerNameInTemplate'
#
#test 'renderCustomCode'
#test 'renderTextWithNull'
#test 'renderTextWithFalse'
#test 'renderTextWithResource'
#test 'renderNothingWithAppendix'
#
#test 'heading'
#
#test 'blankResponse'
#
#test 'layoutTest'
#
#test 'accessingParamsInTemplate'
#test 'accessingLocalAssignsInInlineTemplate'
test 'renderJsonHelloWorld', ->
Tower.get 'renderJsonHelloWorld', ->
assert.equal @body, JSON.stringify(hello: "world")
assert.equal @headers["Content-Type"], "application/json"
test 'renderJsonHelloWorldWithParams', ->
Tower.get 'renderJsonHelloWorldWithParams', hello: "world", ->
assert.equal @body, JSON.stringify(hello: "world")
assert.equal @headers["Content-Type"], "application/json"
assert.equal @status, 200
test 'renderJsonHelloWorldWithStatus', ->
Tower.get 'renderJsonHelloWorldWithStatus', ->
assert.equal @body, JSON.stringify(hello: "world")
assert.equal @headers["Content-Type"], "application/json"
assert.equal @status, 401
| 82667 | require '../../config'
controller = null
user = null
router = null
describe 'Tower.Controller.Rendering', ->
test 'renderCoffeeKupFromTemplate', ->
Tower.get 'renderCoffeeKupFromTemplate', ->
assert.equal @body, "<h1>Hello World</h1>\n"
assert.equal @headers["Content-Type"], "text/html"
test 'renderHelloWorldFromVariable', ->
Tower.get 'renderHelloWorldFromVariable', ->
assert.equal @person, "<NAME>"
assert.equal @body, "hello <NAME>"
test 'renderWithExplicitStringTemplateAsAction', ->
Tower.get 'renderWithExplicitStringTemplateAsAction', ->
assert.equal @body, "<h1>Hello World!!!</h1>\n"
#test 'helloWorldFile', ->
# Tower.get 'helloWorldFile', ->
# assert.equal @body, "Hello world!"
#
#test 'renderHelloWorld', ->
# Tower.get 'renderHelloWorld', ->
# assert.equal @body, "Hello world!"
#
#test 'renderHelloWorldWithForwardSlash', ->
# Tower.get 'renderHelloWorldWithForwardSlash', ->
# assert.equal @body, "Hello world!"
#
#test 'renderActionHelloWorld', ->
# Tower.get 'renderActionHelloWorld', ->
# assert.equal @body, "Hello world!"
#
test 'renderActionUpcasedHelloWorld', ->
Tower.get 'renderActionUpcasedHelloWorld', ->
assert.equal @body, "<h1>renderActionUpcasedHelloWorld</h1>\n"
test 'renderActionUpcasedHelloWorldAsString', ->
Tower.get 'renderActionUpcasedHelloWorldAsString', ->
assert.equal @body, "<h1>renderActionUpcasedHelloWorld</h1>\n"
#test 'renderActionHelloWorldAsString', ->
# Tower.get 'renderActionUpcasedHelloWorld', ->
# assert.equal @body, "Hello world!"
#
#test 'renderTextHelloWorld', ->
# Tower.get 'renderActionUpcasedHelloWorld', ->
# assert.equal @body, "hello world"
#
#test 'renderTextHelloWorldWithLayout'
#
#test 'helloWorldWithLayoutFalse'
#test 'renderFileWithInstanceVariables'
#test 'renderFileAsStringWithInstanceVariables'
#test 'renderFileNotUsingFullPath'
#test 'renderFileNotUsingFullPathWithDotInPath'
#test 'renderFileUsingPathname'
#test 'renderFileFromTemplate'
#test 'renderFileWithLocals'
#test 'renderFileAsStringWithLocals'
#
#test 'accessingRequestInTemplate'
#test 'accessingLoggerInTemplate'
#test 'accessingActionNameInTemplate'
#test 'accessingControllerNameInTemplate'
#
#test 'renderCustomCode'
#test 'renderTextWithNull'
#test 'renderTextWithFalse'
#test 'renderTextWithResource'
#test 'renderNothingWithAppendix'
#
#test 'heading'
#
#test 'blankResponse'
#
#test 'layoutTest'
#
#test 'accessingParamsInTemplate'
#test 'accessingLocalAssignsInInlineTemplate'
test 'renderJsonHelloWorld', ->
Tower.get 'renderJsonHelloWorld', ->
assert.equal @body, JSON.stringify(hello: "world")
assert.equal @headers["Content-Type"], "application/json"
test 'renderJsonHelloWorldWithParams', ->
Tower.get 'renderJsonHelloWorldWithParams', hello: "world", ->
assert.equal @body, JSON.stringify(hello: "world")
assert.equal @headers["Content-Type"], "application/json"
assert.equal @status, 200
test 'renderJsonHelloWorldWithStatus', ->
Tower.get 'renderJsonHelloWorldWithStatus', ->
assert.equal @body, JSON.stringify(hello: "world")
assert.equal @headers["Content-Type"], "application/json"
assert.equal @status, 401
| true | require '../../config'
controller = null
user = null
router = null
describe 'Tower.Controller.Rendering', ->
test 'renderCoffeeKupFromTemplate', ->
Tower.get 'renderCoffeeKupFromTemplate', ->
assert.equal @body, "<h1>Hello World</h1>\n"
assert.equal @headers["Content-Type"], "text/html"
test 'renderHelloWorldFromVariable', ->
Tower.get 'renderHelloWorldFromVariable', ->
assert.equal @person, "PI:NAME:<NAME>END_PI"
assert.equal @body, "hello PI:NAME:<NAME>END_PI"
test 'renderWithExplicitStringTemplateAsAction', ->
Tower.get 'renderWithExplicitStringTemplateAsAction', ->
assert.equal @body, "<h1>Hello World!!!</h1>\n"
#test 'helloWorldFile', ->
# Tower.get 'helloWorldFile', ->
# assert.equal @body, "Hello world!"
#
#test 'renderHelloWorld', ->
# Tower.get 'renderHelloWorld', ->
# assert.equal @body, "Hello world!"
#
#test 'renderHelloWorldWithForwardSlash', ->
# Tower.get 'renderHelloWorldWithForwardSlash', ->
# assert.equal @body, "Hello world!"
#
#test 'renderActionHelloWorld', ->
# Tower.get 'renderActionHelloWorld', ->
# assert.equal @body, "Hello world!"
#
test 'renderActionUpcasedHelloWorld', ->
Tower.get 'renderActionUpcasedHelloWorld', ->
assert.equal @body, "<h1>renderActionUpcasedHelloWorld</h1>\n"
test 'renderActionUpcasedHelloWorldAsString', ->
Tower.get 'renderActionUpcasedHelloWorldAsString', ->
assert.equal @body, "<h1>renderActionUpcasedHelloWorld</h1>\n"
#test 'renderActionHelloWorldAsString', ->
# Tower.get 'renderActionUpcasedHelloWorld', ->
# assert.equal @body, "Hello world!"
#
#test 'renderTextHelloWorld', ->
# Tower.get 'renderActionUpcasedHelloWorld', ->
# assert.equal @body, "hello world"
#
#test 'renderTextHelloWorldWithLayout'
#
#test 'helloWorldWithLayoutFalse'
#test 'renderFileWithInstanceVariables'
#test 'renderFileAsStringWithInstanceVariables'
#test 'renderFileNotUsingFullPath'
#test 'renderFileNotUsingFullPathWithDotInPath'
#test 'renderFileUsingPathname'
#test 'renderFileFromTemplate'
#test 'renderFileWithLocals'
#test 'renderFileAsStringWithLocals'
#
#test 'accessingRequestInTemplate'
#test 'accessingLoggerInTemplate'
#test 'accessingActionNameInTemplate'
#test 'accessingControllerNameInTemplate'
#
#test 'renderCustomCode'
#test 'renderTextWithNull'
#test 'renderTextWithFalse'
#test 'renderTextWithResource'
#test 'renderNothingWithAppendix'
#
#test 'heading'
#
#test 'blankResponse'
#
#test 'layoutTest'
#
#test 'accessingParamsInTemplate'
#test 'accessingLocalAssignsInInlineTemplate'
test 'renderJsonHelloWorld', ->
Tower.get 'renderJsonHelloWorld', ->
assert.equal @body, JSON.stringify(hello: "world")
assert.equal @headers["Content-Type"], "application/json"
test 'renderJsonHelloWorldWithParams', ->
Tower.get 'renderJsonHelloWorldWithParams', hello: "world", ->
assert.equal @body, JSON.stringify(hello: "world")
assert.equal @headers["Content-Type"], "application/json"
assert.equal @status, 200
test 'renderJsonHelloWorldWithStatus', ->
Tower.get 'renderJsonHelloWorldWithStatus', ->
assert.equal @body, JSON.stringify(hello: "world")
assert.equal @headers["Content-Type"], "application/json"
assert.equal @status, 401
|
[
{
"context": "nvert', ->\n beforeEach ->\n room.user.say 'jim', 'hubot convert 1318781876'\n\n it 'should echo",
"end": 784,
"score": 0.7107774019241333,
"start": 781,
"tag": "NAME",
"value": "jim"
}
] | tests/timestamp.coffee | tigefabot/test | 0 | Helper = require('hubot-test-helper')
expect = require('chai').expect
assert = require('chai').assert
sinon = require('sinon')
# helper loads a specific script if it's a file
helper = new Helper('./../scripts/timestamp.coffee')
describe 'timestamp', ->
room = null
moment = null
momentUnixStub = null
momentUnixToStringStub = null
beforeEach ->
moment = require('moment')
momentUnixToStringStub = sinon.stub()
momentUnixToStringStub.returns("Sun Oct 16 2011 16:17:56 GMT+0000")
momentUnixStub = sinon.stub moment, "unix", () ->
return {toString: momentUnixToStringStub}
room = helper.createRoom()
afterEach ->
moment.unix.restore()
room.destroy()
context 'user asks hubot to convert', ->
beforeEach ->
room.user.say 'jim', 'hubot convert 1318781876'
it 'should echo message back', ->
expect(room.messages).to.eql [
['jim', 'hubot convert 1318781876']
['hubot', 'Sun Oct 16 2011 16:17:56 GMT+0000']
]
it 'should have called toString', ->
expect(momentUnixToStringStub.callCount).to.eql 1
it 'should have called unix() with the correct parameters', ->
expect(momentUnixStub.args[0]).to.eql [ '1318781876' ]
| 220791 | Helper = require('hubot-test-helper')
expect = require('chai').expect
assert = require('chai').assert
sinon = require('sinon')
# helper loads a specific script if it's a file
helper = new Helper('./../scripts/timestamp.coffee')
describe 'timestamp', ->
room = null
moment = null
momentUnixStub = null
momentUnixToStringStub = null
beforeEach ->
moment = require('moment')
momentUnixToStringStub = sinon.stub()
momentUnixToStringStub.returns("Sun Oct 16 2011 16:17:56 GMT+0000")
momentUnixStub = sinon.stub moment, "unix", () ->
return {toString: momentUnixToStringStub}
room = helper.createRoom()
afterEach ->
moment.unix.restore()
room.destroy()
context 'user asks hubot to convert', ->
beforeEach ->
room.user.say '<NAME>', 'hubot convert 1318781876'
it 'should echo message back', ->
expect(room.messages).to.eql [
['jim', 'hubot convert 1318781876']
['hubot', 'Sun Oct 16 2011 16:17:56 GMT+0000']
]
it 'should have called toString', ->
expect(momentUnixToStringStub.callCount).to.eql 1
it 'should have called unix() with the correct parameters', ->
expect(momentUnixStub.args[0]).to.eql [ '1318781876' ]
| true | Helper = require('hubot-test-helper')
expect = require('chai').expect
assert = require('chai').assert
sinon = require('sinon')
# helper loads a specific script if it's a file
helper = new Helper('./../scripts/timestamp.coffee')
describe 'timestamp', ->
room = null
moment = null
momentUnixStub = null
momentUnixToStringStub = null
beforeEach ->
moment = require('moment')
momentUnixToStringStub = sinon.stub()
momentUnixToStringStub.returns("Sun Oct 16 2011 16:17:56 GMT+0000")
momentUnixStub = sinon.stub moment, "unix", () ->
return {toString: momentUnixToStringStub}
room = helper.createRoom()
afterEach ->
moment.unix.restore()
room.destroy()
context 'user asks hubot to convert', ->
beforeEach ->
room.user.say 'PI:NAME:<NAME>END_PI', 'hubot convert 1318781876'
it 'should echo message back', ->
expect(room.messages).to.eql [
['jim', 'hubot convert 1318781876']
['hubot', 'Sun Oct 16 2011 16:17:56 GMT+0000']
]
it 'should have called toString', ->
expect(momentUnixToStringStub.callCount).to.eql 1
it 'should have called unix() with the correct parameters', ->
expect(momentUnixStub.args[0]).to.eql [ '1318781876' ]
|
[
{
"context": "e\n email: email.toLowerCase()\n password: password\n firstName: req.body.firstName\n lastNam",
"end": 1048,
"score": 0.9991434216499329,
"start": 1040,
"tag": "PASSWORD",
"value": "password"
}
] | config/passport.coffee | webzepter/cleveroad_test_task | 2 | LocalStrategy = require('passport-local').Strategy
module.exports = (userModel, passport) ->
serialize = (user, done) ->
done(null, user.id)
deserialize = (id, done) ->
userModel
.findOneById(id)
.populate('role')
.exec (err, user) ->
done(err, user)
passport.serializeUser(serialize)
passport.deserializeUser(deserialize)
loginParams =
usernameField: 'email'
localLogin = (email, password, done) ->
userModel
.findOneByEmail(email.toLowerCase())
.then (user) ->
user.comparePasswords password, (err, matched) ->
if matched
done(null, user)
else
done(null, false, message: 'Invalid credentials')
.catch ->
done(null, false, message: 'Invalid credentials')
passport.use('local-login', new LocalStrategy(loginParams, localLogin))
signupParams =
usernameField: 'email'
passReqToCallback: true
localSignUp = (req, email, password, done) ->
userModel
.create
email: email.toLowerCase()
password: password
firstName: req.body.firstName
lastName: req.body.lastName
city: req.body.city
role: req.body.role
.exec (err, user) ->
done(err, user)
passport.use('local-signup', new LocalStrategy(signupParams, localSignUp)) | 119498 | LocalStrategy = require('passport-local').Strategy
module.exports = (userModel, passport) ->
serialize = (user, done) ->
done(null, user.id)
deserialize = (id, done) ->
userModel
.findOneById(id)
.populate('role')
.exec (err, user) ->
done(err, user)
passport.serializeUser(serialize)
passport.deserializeUser(deserialize)
loginParams =
usernameField: 'email'
localLogin = (email, password, done) ->
userModel
.findOneByEmail(email.toLowerCase())
.then (user) ->
user.comparePasswords password, (err, matched) ->
if matched
done(null, user)
else
done(null, false, message: 'Invalid credentials')
.catch ->
done(null, false, message: 'Invalid credentials')
passport.use('local-login', new LocalStrategy(loginParams, localLogin))
signupParams =
usernameField: 'email'
passReqToCallback: true
localSignUp = (req, email, password, done) ->
userModel
.create
email: email.toLowerCase()
password: <PASSWORD>
firstName: req.body.firstName
lastName: req.body.lastName
city: req.body.city
role: req.body.role
.exec (err, user) ->
done(err, user)
passport.use('local-signup', new LocalStrategy(signupParams, localSignUp)) | true | LocalStrategy = require('passport-local').Strategy
module.exports = (userModel, passport) ->
serialize = (user, done) ->
done(null, user.id)
deserialize = (id, done) ->
userModel
.findOneById(id)
.populate('role')
.exec (err, user) ->
done(err, user)
passport.serializeUser(serialize)
passport.deserializeUser(deserialize)
loginParams =
usernameField: 'email'
localLogin = (email, password, done) ->
userModel
.findOneByEmail(email.toLowerCase())
.then (user) ->
user.comparePasswords password, (err, matched) ->
if matched
done(null, user)
else
done(null, false, message: 'Invalid credentials')
.catch ->
done(null, false, message: 'Invalid credentials')
passport.use('local-login', new LocalStrategy(loginParams, localLogin))
signupParams =
usernameField: 'email'
passReqToCallback: true
localSignUp = (req, email, password, done) ->
userModel
.create
email: email.toLowerCase()
password: PI:PASSWORD:<PASSWORD>END_PI
firstName: req.body.firstName
lastName: req.body.lastName
city: req.body.city
role: req.body.role
.exec (err, user) ->
done(err, user)
passport.use('local-signup', new LocalStrategy(signupParams, localSignUp)) |
[
{
"context": "\n 'id': 1\n }\n {\n 'name': 'Ontario'\n 'id': 2\n }\n {\n 'name': ",
"end": 949,
"score": 0.9959821701049805,
"start": 942,
"tag": "NAME",
"value": "Ontario"
}
] | client/views/UI/form/customerForm.coffee | boogieprod/pacsim | 0 | Template.customerForm.rendered = ->
$('.ui.dropdown').dropdown()
$('#progress-bar').progress
percent: 100
Template.customerForm.helpers
subtotal: ->
total = 0
CartItems.find().map (item) ->
total += item.productPrice
return total
canadiantaxes: ->
total = 0
CartItems.find().map (item) ->
total += Math.round(item.productPrice * GST)
return total
provincialtaxes: ->
total = 0
CartItems.find().map (item) ->
total += Math.round(item.productPrice * QST)
return total
totalaftertax: ->
total = 0
CartItems.find().map (item) ->
cantaxamount = Math.round(item.productPrice * GST)
provtaxamount = Math.round(item.productPrice * QST)
total += Math.round(item.productPrice + provtaxamount + cantaxamount)
return total
'provinces': ->
[
{
'name': ->
TAPi18n.__ 'qc'
'id': 1
}
{
'name': 'Ontario'
'id': 2
}
{
'name': ->
TAPi18n.__ 'bc'
'id': 3
}
]
Template.customerForm.events
"click .make-payment": (evt, tmpl) ->
evt.preventDefault()
$form = $('.ui.form')
fname = $form.form('get value', 'fname')
lname = $form.form('get value', 'lname')
email = $form.form('get value', 'email')
street = $form.form('get value', 'street')
postal = $form.form('get value', 'postal')
prov = $form.form('get value', 'province')
areac = $form.form('get value', 'areacode')
agree = $form.form('get value', 'agreement')
if agree != true or fname[0].length < 1 or lname[0].length < 1 or email[0].length < 1 or areac[0] == ""
if agree != true and fname[0].length < 1 and lname[0].length < 1 and email[0].length < 1 and areac[0] == ""
sAlert.error(TAPi18n.__ 'fields_with_asterix_mandatory')
$('#agreement-field').addClass('error')
$('#fname-field').addClass('error')
$('#lname-field').addClass('error')
$('#email-field').addClass('error')
$('#areacode-field').addClass('error')
else if agree != true
sAlert.error(TAPi18n.__ 'must_review_terms')
$('#agreement-field').addClass('error')
else if fname[0].length < 1
sAlert.error(TAPi18n.__ 'fname_mandatory')
$('#fname-field').addClass('error')
else if lname[0].length < 1
sAlert.error(TAPi18n.__ 'lname_mandatory')
$('#lname-field').addClass('error')
else if email[0].length < 1
sAlert.error(TAPi18n.__ 'email_mandatory')
$('#email-field').addClass('error')
else if areac[0] == ""
sAlert.error(TAPi18n.__ 'areac_mandatory')
$('#areacode-field').addClass('error')
else
console.log "Everything's filled"
"change #agreement-field": (evt, tmpl) ->
$('#agreement-field').removeClass('error')
"focus #fname-field": (evt, tmpl) ->
$('#fname-field').removeClass('error')
"focus #lname-field": (evt, tmpl) ->
$('#lname-field').removeClass('error')
"focus #email-field": (evt, tmpl) ->
$('#email-field').removeClass('error')
"focus #areacode-field": (evt, tmpl) ->
$('#areacode-field').removeClass('error')
"click #reset-button": (evt, tmpl) ->
evt.preventDefault()
$('form#customer-form').form('clear')
| 110804 | Template.customerForm.rendered = ->
$('.ui.dropdown').dropdown()
$('#progress-bar').progress
percent: 100
Template.customerForm.helpers
subtotal: ->
total = 0
CartItems.find().map (item) ->
total += item.productPrice
return total
canadiantaxes: ->
total = 0
CartItems.find().map (item) ->
total += Math.round(item.productPrice * GST)
return total
provincialtaxes: ->
total = 0
CartItems.find().map (item) ->
total += Math.round(item.productPrice * QST)
return total
totalaftertax: ->
total = 0
CartItems.find().map (item) ->
cantaxamount = Math.round(item.productPrice * GST)
provtaxamount = Math.round(item.productPrice * QST)
total += Math.round(item.productPrice + provtaxamount + cantaxamount)
return total
'provinces': ->
[
{
'name': ->
TAPi18n.__ 'qc'
'id': 1
}
{
'name': '<NAME>'
'id': 2
}
{
'name': ->
TAPi18n.__ 'bc'
'id': 3
}
]
Template.customerForm.events
"click .make-payment": (evt, tmpl) ->
evt.preventDefault()
$form = $('.ui.form')
fname = $form.form('get value', 'fname')
lname = $form.form('get value', 'lname')
email = $form.form('get value', 'email')
street = $form.form('get value', 'street')
postal = $form.form('get value', 'postal')
prov = $form.form('get value', 'province')
areac = $form.form('get value', 'areacode')
agree = $form.form('get value', 'agreement')
if agree != true or fname[0].length < 1 or lname[0].length < 1 or email[0].length < 1 or areac[0] == ""
if agree != true and fname[0].length < 1 and lname[0].length < 1 and email[0].length < 1 and areac[0] == ""
sAlert.error(TAPi18n.__ 'fields_with_asterix_mandatory')
$('#agreement-field').addClass('error')
$('#fname-field').addClass('error')
$('#lname-field').addClass('error')
$('#email-field').addClass('error')
$('#areacode-field').addClass('error')
else if agree != true
sAlert.error(TAPi18n.__ 'must_review_terms')
$('#agreement-field').addClass('error')
else if fname[0].length < 1
sAlert.error(TAPi18n.__ 'fname_mandatory')
$('#fname-field').addClass('error')
else if lname[0].length < 1
sAlert.error(TAPi18n.__ 'lname_mandatory')
$('#lname-field').addClass('error')
else if email[0].length < 1
sAlert.error(TAPi18n.__ 'email_mandatory')
$('#email-field').addClass('error')
else if areac[0] == ""
sAlert.error(TAPi18n.__ 'areac_mandatory')
$('#areacode-field').addClass('error')
else
console.log "Everything's filled"
"change #agreement-field": (evt, tmpl) ->
$('#agreement-field').removeClass('error')
"focus #fname-field": (evt, tmpl) ->
$('#fname-field').removeClass('error')
"focus #lname-field": (evt, tmpl) ->
$('#lname-field').removeClass('error')
"focus #email-field": (evt, tmpl) ->
$('#email-field').removeClass('error')
"focus #areacode-field": (evt, tmpl) ->
$('#areacode-field').removeClass('error')
"click #reset-button": (evt, tmpl) ->
evt.preventDefault()
$('form#customer-form').form('clear')
| true | Template.customerForm.rendered = ->
$('.ui.dropdown').dropdown()
$('#progress-bar').progress
percent: 100
Template.customerForm.helpers
subtotal: ->
total = 0
CartItems.find().map (item) ->
total += item.productPrice
return total
canadiantaxes: ->
total = 0
CartItems.find().map (item) ->
total += Math.round(item.productPrice * GST)
return total
provincialtaxes: ->
total = 0
CartItems.find().map (item) ->
total += Math.round(item.productPrice * QST)
return total
totalaftertax: ->
total = 0
CartItems.find().map (item) ->
cantaxamount = Math.round(item.productPrice * GST)
provtaxamount = Math.round(item.productPrice * QST)
total += Math.round(item.productPrice + provtaxamount + cantaxamount)
return total
'provinces': ->
[
{
'name': ->
TAPi18n.__ 'qc'
'id': 1
}
{
'name': 'PI:NAME:<NAME>END_PI'
'id': 2
}
{
'name': ->
TAPi18n.__ 'bc'
'id': 3
}
]
Template.customerForm.events
"click .make-payment": (evt, tmpl) ->
evt.preventDefault()
$form = $('.ui.form')
fname = $form.form('get value', 'fname')
lname = $form.form('get value', 'lname')
email = $form.form('get value', 'email')
street = $form.form('get value', 'street')
postal = $form.form('get value', 'postal')
prov = $form.form('get value', 'province')
areac = $form.form('get value', 'areacode')
agree = $form.form('get value', 'agreement')
if agree != true or fname[0].length < 1 or lname[0].length < 1 or email[0].length < 1 or areac[0] == ""
if agree != true and fname[0].length < 1 and lname[0].length < 1 and email[0].length < 1 and areac[0] == ""
sAlert.error(TAPi18n.__ 'fields_with_asterix_mandatory')
$('#agreement-field').addClass('error')
$('#fname-field').addClass('error')
$('#lname-field').addClass('error')
$('#email-field').addClass('error')
$('#areacode-field').addClass('error')
else if agree != true
sAlert.error(TAPi18n.__ 'must_review_terms')
$('#agreement-field').addClass('error')
else if fname[0].length < 1
sAlert.error(TAPi18n.__ 'fname_mandatory')
$('#fname-field').addClass('error')
else if lname[0].length < 1
sAlert.error(TAPi18n.__ 'lname_mandatory')
$('#lname-field').addClass('error')
else if email[0].length < 1
sAlert.error(TAPi18n.__ 'email_mandatory')
$('#email-field').addClass('error')
else if areac[0] == ""
sAlert.error(TAPi18n.__ 'areac_mandatory')
$('#areacode-field').addClass('error')
else
console.log "Everything's filled"
"change #agreement-field": (evt, tmpl) ->
$('#agreement-field').removeClass('error')
"focus #fname-field": (evt, tmpl) ->
$('#fname-field').removeClass('error')
"focus #lname-field": (evt, tmpl) ->
$('#lname-field').removeClass('error')
"focus #email-field": (evt, tmpl) ->
$('#email-field').removeClass('error')
"focus #areacode-field": (evt, tmpl) ->
$('#areacode-field').removeClass('error')
"click #reset-button": (evt, tmpl) ->
evt.preventDefault()
$('form#customer-form').form('clear')
|
[
{
"context": "\n return callback url if parser.hostname in ['127.0.0.1', 'dev.kodi.ng']\n\n # check if running under pr",
"end": 581,
"score": 0.9997518062591553,
"start": 572,
"tag": "IP_ADDRESS",
"value": "127.0.0.1"
},
{
"context": " # build our new url, example:\n # ol... | client/app/lib/util/proxifier.coffee | ezgikaysi/koding | 1 | globals = require 'globals'
doXhrRequest = require 'app/util/doXhrRequest'
module.exports = class Proxifier
@proxify = (options, callback) ->
# take options for url
{ url, checkAlternatives } = options
checkAlternatives ?= yes
# parse url
parser = global.document.createElement 'a'
parser.href = url
# if url is already proxyfied return it as is
baseDomain = "#{globals.config.domains.base}".replace '.', '\\.'
return callback url if ///p\.#{baseDomain}///.test url
return callback url if parser.hostname in ['127.0.0.1', 'dev.kodi.ng']
# check if running under production environment
isInProduction = @isInProduction()
# get the current protocol
protocol = @getProtocol()
# build our new url, example:
# old: http://54.164.174.218:3000/kite
# new: https://p.koding.com/-/prodproxy/54.164.243.111/kite
# or
# http://localhost:8090/-/prodproxy/54.164.243.111/kite
# subdomain is for different proxy environments
# one for development the other for production
subdomain = if isInProduction then 'p' else 'dev-p2'
# create the base url
baseURL = "#{protocol}//#{subdomain}.#{globals.config.domains.base}/-"
# if it's a tunnel given domain we need to do one more check
# for tunnels since production tunnel proxy is different
if (/\.koding\.me$/.test host = parser.hostname)
# for tunneled connections default tunnel is `devtunnel`
proxy = if isInProduction then 'prodtunnel' else 'devtunnel'
baseURL = "#{baseURL}/#{proxy}/#{host}"
current = "#{baseURL}#{parser.pathname}"
return callback current unless checkAlternatives
Proxifier.checkAlternative protocol, baseURL, (err, res) ->
if err
console.warn '[tunnel] failed to look for alternatives:', err
return callback current
# walk over alternatives for local and send
# it back if found a match with the protocol
for alt in res
if ///^#{alt.protocol}///.test(protocol) and alt.local
return callback "#{protocol}//#{alt.addr}/kite"
callback current
# proxy support for not tunneled direct connections for each environment
else
proxy = if isInProduction then 'prodproxy' else 'devproxy'
# generated proxyfied url for connecting to kite
callback "#{baseURL}/#{proxy}/#{host}#{parser.pathname}"
@checkAlternative = (protocol, baseURL, callback) ->
endPoint = "#{baseURL}/-/discover/kite"
type = 'GET'
timeout = 2000
doXhrRequest { endPoint, type, timeout }, callback
@getProtocol = ->
{ protocol } = global.document.location
return protocol
@isInProduction = ->
return globals.config.environment is 'production'
| 22004 | globals = require 'globals'
doXhrRequest = require 'app/util/doXhrRequest'
module.exports = class Proxifier
@proxify = (options, callback) ->
# take options for url
{ url, checkAlternatives } = options
checkAlternatives ?= yes
# parse url
parser = global.document.createElement 'a'
parser.href = url
# if url is already proxyfied return it as is
baseDomain = "#{globals.config.domains.base}".replace '.', '\\.'
return callback url if ///p\.#{baseDomain}///.test url
return callback url if parser.hostname in ['127.0.0.1', 'dev.kodi.ng']
# check if running under production environment
isInProduction = @isInProduction()
# get the current protocol
protocol = @getProtocol()
# build our new url, example:
# old: http://192.168.3.11:3000/kite
# new: https://p.koding.com/-/prodproxy/54.164.243.111/kite
# or
# http://localhost:8090/-/prodproxy/54.164.243.111/kite
# subdomain is for different proxy environments
# one for development the other for production
subdomain = if isInProduction then 'p' else 'dev-p2'
# create the base url
baseURL = "#{protocol}//#{subdomain}.#{globals.config.domains.base}/-"
# if it's a tunnel given domain we need to do one more check
# for tunnels since production tunnel proxy is different
if (/\.koding\.me$/.test host = parser.hostname)
# for tunneled connections default tunnel is `devtunnel`
proxy = if isInProduction then 'prodtunnel' else 'devtunnel'
baseURL = "#{baseURL}/#{proxy}/#{host}"
current = "#{baseURL}#{parser.pathname}"
return callback current unless checkAlternatives
Proxifier.checkAlternative protocol, baseURL, (err, res) ->
if err
console.warn '[tunnel] failed to look for alternatives:', err
return callback current
# walk over alternatives for local and send
# it back if found a match with the protocol
for alt in res
if ///^#{alt.protocol}///.test(protocol) and alt.local
return callback "#{protocol}//#{alt.addr}/kite"
callback current
# proxy support for not tunneled direct connections for each environment
else
proxy = if isInProduction then 'prodproxy' else 'devproxy'
# generated proxyfied url for connecting to kite
callback "#{baseURL}/#{proxy}/#{host}#{parser.pathname}"
@checkAlternative = (protocol, baseURL, callback) ->
endPoint = "#{baseURL}/-/discover/kite"
type = 'GET'
timeout = 2000
doXhrRequest { endPoint, type, timeout }, callback
@getProtocol = ->
{ protocol } = global.document.location
return protocol
@isInProduction = ->
return globals.config.environment is 'production'
| true | globals = require 'globals'
doXhrRequest = require 'app/util/doXhrRequest'
module.exports = class Proxifier
@proxify = (options, callback) ->
# take options for url
{ url, checkAlternatives } = options
checkAlternatives ?= yes
# parse url
parser = global.document.createElement 'a'
parser.href = url
# if url is already proxyfied return it as is
baseDomain = "#{globals.config.domains.base}".replace '.', '\\.'
return callback url if ///p\.#{baseDomain}///.test url
return callback url if parser.hostname in ['127.0.0.1', 'dev.kodi.ng']
# check if running under production environment
isInProduction = @isInProduction()
# get the current protocol
protocol = @getProtocol()
# build our new url, example:
# old: http://PI:IP_ADDRESS:192.168.3.11END_PI:3000/kite
# new: https://p.koding.com/-/prodproxy/54.164.243.111/kite
# or
# http://localhost:8090/-/prodproxy/54.164.243.111/kite
# subdomain is for different proxy environments
# one for development the other for production
subdomain = if isInProduction then 'p' else 'dev-p2'
# create the base url
baseURL = "#{protocol}//#{subdomain}.#{globals.config.domains.base}/-"
# if it's a tunnel given domain we need to do one more check
# for tunnels since production tunnel proxy is different
if (/\.koding\.me$/.test host = parser.hostname)
# for tunneled connections default tunnel is `devtunnel`
proxy = if isInProduction then 'prodtunnel' else 'devtunnel'
baseURL = "#{baseURL}/#{proxy}/#{host}"
current = "#{baseURL}#{parser.pathname}"
return callback current unless checkAlternatives
Proxifier.checkAlternative protocol, baseURL, (err, res) ->
if err
console.warn '[tunnel] failed to look for alternatives:', err
return callback current
# walk over alternatives for local and send
# it back if found a match with the protocol
for alt in res
if ///^#{alt.protocol}///.test(protocol) and alt.local
return callback "#{protocol}//#{alt.addr}/kite"
callback current
# proxy support for not tunneled direct connections for each environment
else
proxy = if isInProduction then 'prodproxy' else 'devproxy'
# generated proxyfied url for connecting to kite
callback "#{baseURL}/#{proxy}/#{host}#{parser.pathname}"
@checkAlternative = (protocol, baseURL, callback) ->
endPoint = "#{baseURL}/-/discover/kite"
type = 'GET'
timeout = 2000
doXhrRequest { endPoint, type, timeout }, callback
@getProtocol = ->
{ protocol } = global.document.location
return protocol
@isInProduction = ->
return globals.config.environment is 'production'
|
[
{
"context": " expect(stringCapitalizeFilter('jon')).to.equal 'Jon'\n expect(stringCapitalizeFilter('JP')).to.",
"end": 223,
"score": 0.8760522603988647,
"start": 220,
"tag": "NAME",
"value": "Jon"
}
] | spec/string_filters.spec.coffee | goodeggs/ng-string | 5 | describe 'string filters', ->
beforeEach module 'string'
describe 'capitalize', ->
it 'capitalizes a string', ->
inject (stringCapitalizeFilter) ->
expect(stringCapitalizeFilter('jon')).to.equal 'Jon'
expect(stringCapitalizeFilter('JP')).to.equal 'Jp'
# TODO add basic tests for all the other filters.
| 169315 | describe 'string filters', ->
beforeEach module 'string'
describe 'capitalize', ->
it 'capitalizes a string', ->
inject (stringCapitalizeFilter) ->
expect(stringCapitalizeFilter('jon')).to.equal '<NAME>'
expect(stringCapitalizeFilter('JP')).to.equal 'Jp'
# TODO add basic tests for all the other filters.
| true | describe 'string filters', ->
beforeEach module 'string'
describe 'capitalize', ->
it 'capitalizes a string', ->
inject (stringCapitalizeFilter) ->
expect(stringCapitalizeFilter('jon')).to.equal 'PI:NAME:<NAME>END_PI'
expect(stringCapitalizeFilter('JP')).to.equal 'Jp'
# TODO add basic tests for all the other filters.
|
[
{
"context": "r/discover/etc.)', ->\n <CreditCard\n name='Frank Latte'\n number='4242424242424242'\n month='04'",
"end": 283,
"score": 0.9998475313186646,
"start": 272,
"tag": "NAME",
"value": "Frank Latte"
},
{
"context": "260px'}}>\n <CreditCard\n ... | client/component-lab/CreditCard/CreditCard.story.coffee | lionheart1022/koding | 0 | React = require 'react'
{ storiesOf, action } = require '@kadira/storybook'
CreditCard = require './CreditCard'
storiesOf 'CreditCard', module
.add 'empty card', ->
<CreditCard />
.add 'regular card (visa/master/discover/etc.)', ->
<CreditCard
name='Frank Latte'
number='4242424242424242'
month='04'
year='19'
brand='visa' />
.add 'half regular card (visa/master/discover/etc.)', ->
children = [ 'visa', 'master-card', 'american-express', 'diners-club'
'discover', 'jcb', 'maestro' ].map (brand, index) ->
number = if 'american-express' is brand
then '•••••••••••4242'
else '••••••••••••4242'
<div key={brand} style={{margin: '0 20px 20px 0', width: '260px'}}>
<CreditCard
name='Fahrettin Tasdelen'
number={number}
month='04'
year='19'
brand={brand} />
</div>
<div style={{width: '560px', margin: '0 auto', display: 'flex', flexWrap: 'wrap'}}>
{children}
</div>
.add 'amex card', ->
<CreditCard title='Credit Card Number' brand='amex' />
| 55597 | React = require 'react'
{ storiesOf, action } = require '@kadira/storybook'
CreditCard = require './CreditCard'
storiesOf 'CreditCard', module
.add 'empty card', ->
<CreditCard />
.add 'regular card (visa/master/discover/etc.)', ->
<CreditCard
name='<NAME>'
number='4242424242424242'
month='04'
year='19'
brand='visa' />
.add 'half regular card (visa/master/discover/etc.)', ->
children = [ 'visa', 'master-card', 'american-express', 'diners-club'
'discover', 'jcb', 'maestro' ].map (brand, index) ->
number = if 'american-express' is brand
then '•••••••••••4242'
else '••••••••••••4242'
<div key={brand} style={{margin: '0 20px 20px 0', width: '260px'}}>
<CreditCard
name='<NAME>'
number={number}
month='04'
year='19'
brand={brand} />
</div>
<div style={{width: '560px', margin: '0 auto', display: 'flex', flexWrap: 'wrap'}}>
{children}
</div>
.add 'amex card', ->
<CreditCard title='Credit Card Number' brand='amex' />
| true | React = require 'react'
{ storiesOf, action } = require '@kadira/storybook'
CreditCard = require './CreditCard'
storiesOf 'CreditCard', module
.add 'empty card', ->
<CreditCard />
.add 'regular card (visa/master/discover/etc.)', ->
<CreditCard
name='PI:NAME:<NAME>END_PI'
number='4242424242424242'
month='04'
year='19'
brand='visa' />
.add 'half regular card (visa/master/discover/etc.)', ->
children = [ 'visa', 'master-card', 'american-express', 'diners-club'
'discover', 'jcb', 'maestro' ].map (brand, index) ->
number = if 'american-express' is brand
then '•••••••••••4242'
else '••••••••••••4242'
<div key={brand} style={{margin: '0 20px 20px 0', width: '260px'}}>
<CreditCard
name='PI:NAME:<NAME>END_PI'
number={number}
month='04'
year='19'
brand={brand} />
</div>
<div style={{width: '560px', margin: '0 auto', display: 'flex', flexWrap: 'wrap'}}>
{children}
</div>
.add 'amex card', ->
<CreditCard title='Credit Card Number' brand='amex' />
|
[
{
"context": "# **Author:** Peter Urbak<br/>\n# **Version:** 2013-01-29\n\n# Initialize name",
"end": 25,
"score": 0.9998606443405151,
"start": 14,
"tag": "NAME",
"value": "Peter Urbak"
}
] | client_src/util.coffee | dragonwasrobot/gesture-recognition | 2 | # **Author:** Peter Urbak<br/>
# **Version:** 2013-01-29
# Initialize namespace
root = exports ? window
root.App = {}
# Add `first` and `last` functions to the `Array` class.
Array::first = () -> @[0]
Array::last = () -> @[@.length-1]
# Add a `length` function to the `Object` class.
#
# Returns the number of properties on the object minus length itself.
# Note: not sure if this is bad style.
ObjectLength = () ->
length = 0
for key, value of @
if key isnt length
length += 1
return length
# Logging
App.log = (string) -> console.log string
| 53512 | # **Author:** <NAME><br/>
# **Version:** 2013-01-29
# Initialize namespace
root = exports ? window
root.App = {}
# Add `first` and `last` functions to the `Array` class.
Array::first = () -> @[0]
Array::last = () -> @[@.length-1]
# Add a `length` function to the `Object` class.
#
# Returns the number of properties on the object minus length itself.
# Note: not sure if this is bad style.
ObjectLength = () ->
length = 0
for key, value of @
if key isnt length
length += 1
return length
# Logging
App.log = (string) -> console.log string
| true | # **Author:** PI:NAME:<NAME>END_PI<br/>
# **Version:** 2013-01-29
# Initialize namespace
root = exports ? window
root.App = {}
# Add `first` and `last` functions to the `Array` class.
Array::first = () -> @[0]
Array::last = () -> @[@.length-1]
# Add a `length` function to the `Object` class.
#
# Returns the number of properties on the object minus length itself.
# Note: not sure if this is bad style.
ObjectLength = () ->
length = 0
for key, value of @
if key isnt length
length += 1
return length
# Logging
App.log = (string) -> console.log string
|
[
{
"context": "ubmitData(\"/user/newUser\", {\n username: username\n password: password\n }).then (r",
"end": 2483,
"score": 0.9985024929046631,
"start": 2475,
"tag": "USERNAME",
"value": "username"
},
{
"context": " username: username\n ... | public/javascripts/form.coffee | aXises/DNUI-Project | 3 | submitData = (route, data) =>
$.ajax
url: route
type: 'POST'
data: data
$(document).ready =>
$(".form-container .close, .form-container .clickable-bg").click =>
$(".form-container, .form-container .dynamic-form").removeClass("form-visible")
showPageOpen = false;
$(".form-container .dynamic-form.login-form .register").click =>
$(".form-container, .form-container .dynamic-form").removeClass("form-visible")
$(".form-container, .form-container .dynamic-form.register-form").addClass("form-visible")
$(".form-container .dynamic-form.register-form .login").click =>
$(".form-container, .form-container .dynamic-form").removeClass("form-visible")
$(".form-container, .form-container .dynamic-form.login-form").addClass("form-visible")
$("#review .buttons .add-review").click =>
submitData("/user/getSessionId", {}).then (res) =>
if res
$(".form-container, .form-container .dynamic-form").removeClass("form-visible")
$(".form-container, .form-container .dynamic-form.add-review-form").addClass("form-visible")
else
$(".form-container, .form-container .dynamic-form").removeClass("form-visible")
$(".form-container, .form-container .dynamic-form.login-form").addClass("form-visible")
$(".form-container .register-form .submit").click =>
username = $(".form-container .register-form .username").val()
password = $(".form-container .register-form .password").val()
cPassword = $(".form-container .register-form .cpassword").val()
if not username then return $(".form-container .register-form .error").text("Username cannot be empty.")
if not password then return $(".form-container .register-form .error").text("Password cannot be empty.")
if not cPassword then return $(".form-container .register-form .error").text("Password cannot be empty.")
if username.length < 5 then return $(".form-container .register-form .error").text("Username must be at least 5 letters.")
if password.length < 5 then return $(".form-container .register-form .error").text("Password be at least 5 letters.")
if password != cPassword then return $(".form-container .register-form .error").text("Password must match.")
$(".form-container .register-form .error").text(" ")
submitData("/user/newUser", {
username: username
password: password
}).then (res) =>
if not JSON.parse(res).status
$(".form-container .register-form .error").text(JSON.parse(res).message)
else
location.reload()
$(".form-container .login-form .submit").click =>
username = $(".form-container .login-form .username").val()
password = $(".form-container .login-form .password").val()
submitData("/user/login", {
username: username
password: password
}).then (res) =>
if not JSON.parse(res).status
$(".form-container .login-form .error").text(JSON.parse(res).message)
else
location.reload()
$(".form-container .add-review-form .submit").click =>
$(".add-review-form .review-text").css("border", "1px solid transparent")
$(".add-review-form .category .cat").css("border", "1px solid transparent")
$(".add-review-form .category .rating").css("border", "1px solid transparent")
$(".add-review-form .overall").css("border", "1px solid transparent")
$(".add-review-form .title").css("border", "1px solid transparent")
if not $(".add-review-form .title").val()
$(".add-review-form .title").css("border", "1px solid black")
if not $(".add-review-form .review-text").val()
$(".add-review-form .review-text").css("border", "1px solid black")
if not $(".add-review-form .overall").val() or isNaN($(".add-review-form .overall").val())
$(".add-review-form .overall").css("border", "1px solid black")
allcategory = []
for section in $(".add-review-form .category")
category = $(section).find(".cat")
rating = $(section).find(".rating")
console.log(category.val(), rating.val())
if not category.val()
category.css("border", "1px solid black")
if not rating.val() or isNaN(rating.val())
rating.css("border", "1px solid black")
if allcategory.indexOf(category.val()) != -1
category.css("border", "1px solid black")
if category.val() and rating.val() and not isNaN(rating.val()) and allcategory.indexOf(category.val()) == -1
console.log(category, rating)
allcategory.push([category.val().toLowerCase(), rating.val()])
if $(".add-review-form .review-text").val() and $(".add-review-form .overall").val() and not isNaN($(".add-review-form .overall").val()) and $(".add-review-form .title").val()
submitData("/user/getSessionId", {}).then (userId) =>
submitData("/review/addReview", {
title: $(".add-review-form .title").val()
reviewText: $(".add-review-form .review-text").val()
category: JSON.stringify(allcategory)
overall: $(".add-review-form .overall").val()
pId: $("#review").attr("pid")
userId: userId
type: $("#review").attr("type")
}).then (res) =>
location.reload()
allcategory = []
| 94549 | submitData = (route, data) =>
$.ajax
url: route
type: 'POST'
data: data
$(document).ready =>
$(".form-container .close, .form-container .clickable-bg").click =>
$(".form-container, .form-container .dynamic-form").removeClass("form-visible")
showPageOpen = false;
$(".form-container .dynamic-form.login-form .register").click =>
$(".form-container, .form-container .dynamic-form").removeClass("form-visible")
$(".form-container, .form-container .dynamic-form.register-form").addClass("form-visible")
$(".form-container .dynamic-form.register-form .login").click =>
$(".form-container, .form-container .dynamic-form").removeClass("form-visible")
$(".form-container, .form-container .dynamic-form.login-form").addClass("form-visible")
$("#review .buttons .add-review").click =>
submitData("/user/getSessionId", {}).then (res) =>
if res
$(".form-container, .form-container .dynamic-form").removeClass("form-visible")
$(".form-container, .form-container .dynamic-form.add-review-form").addClass("form-visible")
else
$(".form-container, .form-container .dynamic-form").removeClass("form-visible")
$(".form-container, .form-container .dynamic-form.login-form").addClass("form-visible")
$(".form-container .register-form .submit").click =>
username = $(".form-container .register-form .username").val()
password = $(".form-container .register-form .password").val()
cPassword = $(".form-container .register-form .cpassword").val()
if not username then return $(".form-container .register-form .error").text("Username cannot be empty.")
if not password then return $(".form-container .register-form .error").text("Password cannot be empty.")
if not cPassword then return $(".form-container .register-form .error").text("Password cannot be empty.")
if username.length < 5 then return $(".form-container .register-form .error").text("Username must be at least 5 letters.")
if password.length < 5 then return $(".form-container .register-form .error").text("Password be at least 5 letters.")
if password != cPassword then return $(".form-container .register-form .error").text("Password must match.")
$(".form-container .register-form .error").text(" ")
submitData("/user/newUser", {
username: username
password: <PASSWORD>
}).then (res) =>
if not JSON.parse(res).status
$(".form-container .register-form .error").text(JSON.parse(res).message)
else
location.reload()
$(".form-container .login-form .submit").click =>
username = $(".form-container .login-form .username").val()
password = $(".form-container .login-form .password").val()
submitData("/user/login", {
username: username
password: <PASSWORD>
}).then (res) =>
if not JSON.parse(res).status
$(".form-container .login-form .error").text(JSON.parse(res).message)
else
location.reload()
$(".form-container .add-review-form .submit").click =>
$(".add-review-form .review-text").css("border", "1px solid transparent")
$(".add-review-form .category .cat").css("border", "1px solid transparent")
$(".add-review-form .category .rating").css("border", "1px solid transparent")
$(".add-review-form .overall").css("border", "1px solid transparent")
$(".add-review-form .title").css("border", "1px solid transparent")
if not $(".add-review-form .title").val()
$(".add-review-form .title").css("border", "1px solid black")
if not $(".add-review-form .review-text").val()
$(".add-review-form .review-text").css("border", "1px solid black")
if not $(".add-review-form .overall").val() or isNaN($(".add-review-form .overall").val())
$(".add-review-form .overall").css("border", "1px solid black")
allcategory = []
for section in $(".add-review-form .category")
category = $(section).find(".cat")
rating = $(section).find(".rating")
console.log(category.val(), rating.val())
if not category.val()
category.css("border", "1px solid black")
if not rating.val() or isNaN(rating.val())
rating.css("border", "1px solid black")
if allcategory.indexOf(category.val()) != -1
category.css("border", "1px solid black")
if category.val() and rating.val() and not isNaN(rating.val()) and allcategory.indexOf(category.val()) == -1
console.log(category, rating)
allcategory.push([category.val().toLowerCase(), rating.val()])
if $(".add-review-form .review-text").val() and $(".add-review-form .overall").val() and not isNaN($(".add-review-form .overall").val()) and $(".add-review-form .title").val()
submitData("/user/getSessionId", {}).then (userId) =>
submitData("/review/addReview", {
title: $(".add-review-form .title").val()
reviewText: $(".add-review-form .review-text").val()
category: JSON.stringify(allcategory)
overall: $(".add-review-form .overall").val()
pId: $("#review").attr("pid")
userId: userId
type: $("#review").attr("type")
}).then (res) =>
location.reload()
allcategory = []
| true | submitData = (route, data) =>
$.ajax
url: route
type: 'POST'
data: data
$(document).ready =>
$(".form-container .close, .form-container .clickable-bg").click =>
$(".form-container, .form-container .dynamic-form").removeClass("form-visible")
showPageOpen = false;
$(".form-container .dynamic-form.login-form .register").click =>
$(".form-container, .form-container .dynamic-form").removeClass("form-visible")
$(".form-container, .form-container .dynamic-form.register-form").addClass("form-visible")
$(".form-container .dynamic-form.register-form .login").click =>
$(".form-container, .form-container .dynamic-form").removeClass("form-visible")
$(".form-container, .form-container .dynamic-form.login-form").addClass("form-visible")
$("#review .buttons .add-review").click =>
submitData("/user/getSessionId", {}).then (res) =>
if res
$(".form-container, .form-container .dynamic-form").removeClass("form-visible")
$(".form-container, .form-container .dynamic-form.add-review-form").addClass("form-visible")
else
$(".form-container, .form-container .dynamic-form").removeClass("form-visible")
$(".form-container, .form-container .dynamic-form.login-form").addClass("form-visible")
$(".form-container .register-form .submit").click =>
username = $(".form-container .register-form .username").val()
password = $(".form-container .register-form .password").val()
cPassword = $(".form-container .register-form .cpassword").val()
if not username then return $(".form-container .register-form .error").text("Username cannot be empty.")
if not password then return $(".form-container .register-form .error").text("Password cannot be empty.")
if not cPassword then return $(".form-container .register-form .error").text("Password cannot be empty.")
if username.length < 5 then return $(".form-container .register-form .error").text("Username must be at least 5 letters.")
if password.length < 5 then return $(".form-container .register-form .error").text("Password be at least 5 letters.")
if password != cPassword then return $(".form-container .register-form .error").text("Password must match.")
$(".form-container .register-form .error").text(" ")
submitData("/user/newUser", {
username: username
password: PI:PASSWORD:<PASSWORD>END_PI
}).then (res) =>
if not JSON.parse(res).status
$(".form-container .register-form .error").text(JSON.parse(res).message)
else
location.reload()
$(".form-container .login-form .submit").click =>
username = $(".form-container .login-form .username").val()
password = $(".form-container .login-form .password").val()
submitData("/user/login", {
username: username
password: PI:PASSWORD:<PASSWORD>END_PI
}).then (res) =>
if not JSON.parse(res).status
$(".form-container .login-form .error").text(JSON.parse(res).message)
else
location.reload()
$(".form-container .add-review-form .submit").click =>
$(".add-review-form .review-text").css("border", "1px solid transparent")
$(".add-review-form .category .cat").css("border", "1px solid transparent")
$(".add-review-form .category .rating").css("border", "1px solid transparent")
$(".add-review-form .overall").css("border", "1px solid transparent")
$(".add-review-form .title").css("border", "1px solid transparent")
if not $(".add-review-form .title").val()
$(".add-review-form .title").css("border", "1px solid black")
if not $(".add-review-form .review-text").val()
$(".add-review-form .review-text").css("border", "1px solid black")
if not $(".add-review-form .overall").val() or isNaN($(".add-review-form .overall").val())
$(".add-review-form .overall").css("border", "1px solid black")
allcategory = []
for section in $(".add-review-form .category")
category = $(section).find(".cat")
rating = $(section).find(".rating")
console.log(category.val(), rating.val())
if not category.val()
category.css("border", "1px solid black")
if not rating.val() or isNaN(rating.val())
rating.css("border", "1px solid black")
if allcategory.indexOf(category.val()) != -1
category.css("border", "1px solid black")
if category.val() and rating.val() and not isNaN(rating.val()) and allcategory.indexOf(category.val()) == -1
console.log(category, rating)
allcategory.push([category.val().toLowerCase(), rating.val()])
if $(".add-review-form .review-text").val() and $(".add-review-form .overall").val() and not isNaN($(".add-review-form .overall").val()) and $(".add-review-form .title").val()
submitData("/user/getSessionId", {}).then (userId) =>
submitData("/review/addReview", {
title: $(".add-review-form .title").val()
reviewText: $(".add-review-form .review-text").val()
category: JSON.stringify(allcategory)
overall: $(".add-review-form .overall").val()
pId: $("#review").attr("pid")
userId: userId
type: $("#review").attr("type")
}).then (res) =>
location.reload()
allcategory = []
|
[
{
"context": "# Author: Josh Bass\n\nReact = require(\"react\");\n\nCustomerEdit = requir",
"end": 19,
"score": 0.9998804330825806,
"start": 10,
"tag": "NAME",
"value": "Josh Bass"
}
] | src/client/components/customers/CustomerCreate.coffee | jbass86/Aroma | 0 | # Author: Josh Bass
React = require("react");
CustomerEdit = require("./CustomerEdit.coffee");
Moment = require("moment");
module.exports = React.createClass
getInitialState: ->
@default_state = {show_create_customer: false, first_name: "", middle_name: "", address: "", birthday: Moment(), \
customer_alert: "", customer_success: false};
render: ->
<div className="create-item">
<button type="button" className="mod-button btn btn-primary" onClick={@showCreateInventory}>Add Customer</button>
<div className="clear-both"></div>
<div className={@getCreateInventoryClasses()}>
<CustomerEdit updateCustomer={@updateCustomer} handleFinish={@handleClose}/>
</div>
</div>
getCreateInventoryClasses: ->
classes = "collapsible"
if (@state.show_create_customer)
classes += " full-height-medium";
else
classes += " no-height";
classes
showCreateInventory: (ev) ->
@setState({show_create_customer: true});
handleClose: () ->
@setState({show_create_customer: false});
window.setTimeout(() =>
@setState(@default_state);
, 1000);
updateCustomer: () ->
@props.customerUpdate();
| 107904 | # Author: <NAME>
React = require("react");
CustomerEdit = require("./CustomerEdit.coffee");
Moment = require("moment");
module.exports = React.createClass
getInitialState: ->
@default_state = {show_create_customer: false, first_name: "", middle_name: "", address: "", birthday: Moment(), \
customer_alert: "", customer_success: false};
render: ->
<div className="create-item">
<button type="button" className="mod-button btn btn-primary" onClick={@showCreateInventory}>Add Customer</button>
<div className="clear-both"></div>
<div className={@getCreateInventoryClasses()}>
<CustomerEdit updateCustomer={@updateCustomer} handleFinish={@handleClose}/>
</div>
</div>
getCreateInventoryClasses: ->
classes = "collapsible"
if (@state.show_create_customer)
classes += " full-height-medium";
else
classes += " no-height";
classes
showCreateInventory: (ev) ->
@setState({show_create_customer: true});
handleClose: () ->
@setState({show_create_customer: false});
window.setTimeout(() =>
@setState(@default_state);
, 1000);
updateCustomer: () ->
@props.customerUpdate();
| true | # Author: PI:NAME:<NAME>END_PI
React = require("react");
CustomerEdit = require("./CustomerEdit.coffee");
Moment = require("moment");
module.exports = React.createClass
getInitialState: ->
@default_state = {show_create_customer: false, first_name: "", middle_name: "", address: "", birthday: Moment(), \
customer_alert: "", customer_success: false};
render: ->
<div className="create-item">
<button type="button" className="mod-button btn btn-primary" onClick={@showCreateInventory}>Add Customer</button>
<div className="clear-both"></div>
<div className={@getCreateInventoryClasses()}>
<CustomerEdit updateCustomer={@updateCustomer} handleFinish={@handleClose}/>
</div>
</div>
getCreateInventoryClasses: ->
classes = "collapsible"
if (@state.show_create_customer)
classes += " full-height-medium";
else
classes += " no-height";
classes
showCreateInventory: (ev) ->
@setState({show_create_customer: true});
handleClose: () ->
@setState({show_create_customer: false});
window.setTimeout(() =>
@setState(@default_state);
, 1000);
updateCustomer: () ->
@props.customerUpdate();
|
[
{
"context": "ble', ->\n config1 = config2 =\n host: '127.0.0.1',\n username: undefined,\n private_ke",
"end": 406,
"score": 0.9996308088302612,
"start": 397,
"tag": "IP_ADDRESS",
"value": "127.0.0.1"
},
{
"context": "ig2 =\n host: '127.0.0.1',\n ... | packages/core/test/utils/ssh.coffee | wdavidw/node-mecano | 0 |
{merge} = require 'mixme'
nikita = require '../../src'
utils = require '../../src/utils'
{tags, config} = require '../test'
# All test are executed with an ssh connection passed as an argument
they = require('mocha-they')(config.filter ({ssh}) -> !!ssh)
describe 'utils.ssh', ->
return unless tags.api
describe 'compare', ->
it 'immutable', ->
config1 = config2 =
host: '127.0.0.1',
username: undefined,
private_key_path: '~/.ssh/id_rsa'
original = merge config1
utils.ssh.compare(config1,config2)
config1.should.eql original
they 'compare two null', ({ssh}) ->
utils.ssh.compare(null, null).should.be.true()
utils.ssh.compare(null, false).should.be.true()
they 'compare identical configs', ({ssh}) ->
utils.ssh.compare(ssh, ssh).should.be.true()
they 'compare identical connections', ({ssh}) ->
try
{ssh: conn1} = await nikita.ssh.open ssh
{ssh: conn2} = await nikita.ssh.open ssh
utils.ssh.compare(conn1, conn2).should.be.true()
finally
nikita.ssh.close ssh: conn1
nikita.ssh.close ssh: conn2
they 'compare a connection with a config', ({ssh}) ->
try
{ssh: conn} = await nikita.ssh.open ssh
utils.ssh.compare(conn, ssh).should.be.true()
finally
nikita.ssh.close ssh: conn
they 'compare a config with a connection', ({ssh}) ->
try
{ssh: conn} = await nikita.ssh.open ssh
utils.ssh.compare(ssh, conn).should.be.true()
finally
nikita.ssh.close ssh: conn
describe 'is', ->
it 'undefined', ->
utils.ssh.is(undefined).should.be.false()
they 'connection', ({ssh}) ->
try
{ssh: conn} = await nikita.ssh.open ssh
utils.ssh.is(conn).should.be.true()
finally
nikita.ssh.close ssh: conn
| 39935 |
{merge} = require 'mixme'
nikita = require '../../src'
utils = require '../../src/utils'
{tags, config} = require '../test'
# All test are executed with an ssh connection passed as an argument
they = require('mocha-they')(config.filter ({ssh}) -> !!ssh)
describe 'utils.ssh', ->
return unless tags.api
describe 'compare', ->
it 'immutable', ->
config1 = config2 =
host: '127.0.0.1',
username: undefined,
private_key_path: <KEY>'
original = merge config1
utils.ssh.compare(config1,config2)
config1.should.eql original
they 'compare two null', ({ssh}) ->
utils.ssh.compare(null, null).should.be.true()
utils.ssh.compare(null, false).should.be.true()
they 'compare identical configs', ({ssh}) ->
utils.ssh.compare(ssh, ssh).should.be.true()
they 'compare identical connections', ({ssh}) ->
try
{ssh: conn1} = await nikita.ssh.open ssh
{ssh: conn2} = await nikita.ssh.open ssh
utils.ssh.compare(conn1, conn2).should.be.true()
finally
nikita.ssh.close ssh: conn1
nikita.ssh.close ssh: conn2
they 'compare a connection with a config', ({ssh}) ->
try
{ssh: conn} = await nikita.ssh.open ssh
utils.ssh.compare(conn, ssh).should.be.true()
finally
nikita.ssh.close ssh: conn
they 'compare a config with a connection', ({ssh}) ->
try
{ssh: conn} = await nikita.ssh.open ssh
utils.ssh.compare(ssh, conn).should.be.true()
finally
nikita.ssh.close ssh: conn
describe 'is', ->
it 'undefined', ->
utils.ssh.is(undefined).should.be.false()
they 'connection', ({ssh}) ->
try
{ssh: conn} = await nikita.ssh.open ssh
utils.ssh.is(conn).should.be.true()
finally
nikita.ssh.close ssh: conn
| true |
{merge} = require 'mixme'
nikita = require '../../src'
utils = require '../../src/utils'
{tags, config} = require '../test'
# All test are executed with an ssh connection passed as an argument
they = require('mocha-they')(config.filter ({ssh}) -> !!ssh)
describe 'utils.ssh', ->
return unless tags.api
describe 'compare', ->
it 'immutable', ->
config1 = config2 =
host: '127.0.0.1',
username: undefined,
private_key_path: PI:KEY:<KEY>END_PI'
original = merge config1
utils.ssh.compare(config1,config2)
config1.should.eql original
they 'compare two null', ({ssh}) ->
utils.ssh.compare(null, null).should.be.true()
utils.ssh.compare(null, false).should.be.true()
they 'compare identical configs', ({ssh}) ->
utils.ssh.compare(ssh, ssh).should.be.true()
they 'compare identical connections', ({ssh}) ->
try
{ssh: conn1} = await nikita.ssh.open ssh
{ssh: conn2} = await nikita.ssh.open ssh
utils.ssh.compare(conn1, conn2).should.be.true()
finally
nikita.ssh.close ssh: conn1
nikita.ssh.close ssh: conn2
they 'compare a connection with a config', ({ssh}) ->
try
{ssh: conn} = await nikita.ssh.open ssh
utils.ssh.compare(conn, ssh).should.be.true()
finally
nikita.ssh.close ssh: conn
they 'compare a config with a connection', ({ssh}) ->
try
{ssh: conn} = await nikita.ssh.open ssh
utils.ssh.compare(ssh, conn).should.be.true()
finally
nikita.ssh.close ssh: conn
describe 'is', ->
it 'undefined', ->
utils.ssh.is(undefined).should.be.false()
they 'connection', ({ssh}) ->
try
{ssh: conn} = await nikita.ssh.open ssh
utils.ssh.is(conn).should.be.true()
finally
nikita.ssh.close ssh: conn
|
[
{
"context": " Coffeescript implementation of PHP version from David Grudl\n http://latrine.dgx.cz/jak-overit-platne-ic-a-",
"end": 87,
"score": 0.9139158129692078,
"start": 76,
"tag": "NAME",
"value": "David Grudl"
}
] | src/Validators.coffee | ezo5/Node-AresData | 5 | class Validators
###
Coffeescript implementation of PHP version from David Grudl
http://latrine.dgx.cz/jak-overit-platne-ic-a-rodne-cislo
###
@companyIdentification: (identification) ->
identification += ''
identification = identification.replace(/\s+/g, '')
if identification.match(/^\d{8}$/) == null
return false
a = 0
for i in [0..6]
a += identification.charAt(i) * (8 - i)
a = a % 11
switch a
when 0, 10 then c = 1
when 1 then c = 0
else c = 11 - a
return parseInt(identification.charAt(7)) == c
module.exports = Validators
| 223878 | class Validators
###
Coffeescript implementation of PHP version from <NAME>
http://latrine.dgx.cz/jak-overit-platne-ic-a-rodne-cislo
###
@companyIdentification: (identification) ->
identification += ''
identification = identification.replace(/\s+/g, '')
if identification.match(/^\d{8}$/) == null
return false
a = 0
for i in [0..6]
a += identification.charAt(i) * (8 - i)
a = a % 11
switch a
when 0, 10 then c = 1
when 1 then c = 0
else c = 11 - a
return parseInt(identification.charAt(7)) == c
module.exports = Validators
| true | class Validators
###
Coffeescript implementation of PHP version from PI:NAME:<NAME>END_PI
http://latrine.dgx.cz/jak-overit-platne-ic-a-rodne-cislo
###
@companyIdentification: (identification) ->
identification += ''
identification = identification.replace(/\s+/g, '')
if identification.match(/^\d{8}$/) == null
return false
a = 0
for i in [0..6]
a += identification.charAt(i) * (8 - i)
a = a % 11
switch a
when 0, 10 then c = 1
when 1 then c = 0
else c = 11 - a
return parseInt(identification.charAt(7)) == c
module.exports = Validators
|
[
{
"context": " yield utils.initAdmin()\n yield utils.loginUser(@admin)\n yield request.postAsync(getURL('/db/article'",
"end": 614,
"score": 0.9904212951660156,
"start": 608,
"tag": "USERNAME",
"value": "@admin"
},
{
"context": "s', utils.wrap (done) ->\n yield utils.loginUs... | spec/server/functional/article.spec.coffee | l34kr/codecombat | 2 | require '../common'
utils = require '../utils'
_ = require 'lodash'
Promise = require 'bluebird'
request = require '../request'
requestAsync = Promise.promisify(request, {multiArgs: true})
Article = require '../../../server/models/Article'
User = require '../../../server/models/User'
describe 'GET /db/article', ->
articleData1 = { name: 'Article 1', body: 'Article 1 body cow', i18nCoverage: [] }
articleData2 = { name: 'Article 2', body: 'Article 2 body moo' }
beforeEach utils.wrap (done) ->
yield utils.clearModels([Article])
@admin = yield utils.initAdmin()
yield utils.loginUser(@admin)
yield request.postAsync(getURL('/db/article'), { json: articleData1 })
yield request.postAsync(getURL('/db/article'), { json: articleData2 })
yield utils.becomeAnonymous()
done()
it 'returns an array of Article objects', utils.wrap (done) ->
[res, body] = yield request.getAsync { uri: getURL('/db/article'), json: true }
expect(body.length).toBe(2)
done()
it 'accepts a limit parameter', utils.wrap (done) ->
[res, body] = yield request.getAsync {uri: getURL('/db/article?limit=1'), json: true}
expect(body.length).toBe(1)
done()
it 'returns 422 for an invalid limit parameter', utils.wrap (done) ->
[res, body] = yield request.getAsync {uri: getURL('/db/article?limit=word'), json: true}
expect(res.statusCode).toBe(422)
done()
it 'accepts a skip parameter', utils.wrap (done) ->
[res, body] = yield request.getAsync {uri: getURL('/db/article?skip=1'), json: true}
expect(body.length).toBe(1)
[res, body] = yield request.getAsync {uri: getURL('/db/article?skip=2'), json: true}
expect(body.length).toBe(0)
done()
it 'returns 422 for an invalid skip parameter', utils.wrap (done) ->
[res, body] = yield request.getAsync {uri: getURL('/db/article?skip=???'), json: true}
expect(res.statusCode).toBe(422)
done()
it 'accepts a custom project parameter', utils.wrap (done) ->
[res, body] = yield request.getAsync {uri: getURL('/db/article?project=name,body'), json: true}
expect(body.length).toBe(2)
for doc in body
expect(_.size(_.xor(_.keys(doc), ['_id', 'name', 'body']))).toBe(0)
done()
it 'returns a default projection if project is "true"', utils.wrap (done) ->
[res, body] = yield request.getAsync {uri: getURL('/db/article?project=true'), json: true}
expect(res.statusCode).toBe(200)
expect(body.length).toBe(2)
expect(body[0].body).toBeUndefined()
expect(body[0].version).toBeDefined()
done()
it 'accepts custom filter parameters', utils.wrap (done) ->
yield utils.loginUser(@admin)
[res, body] = yield request.getAsync {uri: getURL('/db/article?filter[slug]="article-1"'), json: true}
expect(body.length).toBe(1)
done()
it 'ignores custom filter parameters for non-admins', utils.wrap (done) ->
user = yield utils.initUser()
yield utils.loginUser(user)
[res, body] = yield request.getAsync {uri: getURL('/db/article?filter[slug]="article-1"'), json: true}
expect(body.length).toBe(2)
done()
it 'accepts custom condition parameters', utils.wrap (done) ->
yield utils.loginUser(@admin)
[res, body] = yield request.getAsync {uri: getURL('/db/article?conditions[select]="slug body"'), json: true}
expect(body.length).toBe(2)
for doc in body
expect(_.size(_.xor(_.keys(doc), ['_id', 'slug', 'body']))).toBe(0)
done()
it 'ignores custom condition parameters for non-admins', utils.wrap (done) ->
user = yield utils.initUser()
yield utils.loginUser(user)
[res, body] = yield request.getAsync {uri: getURL('/db/article?conditions[select]="slug body"'), json: true}
expect(body.length).toBe(2)
for doc in body
expect(doc.name).toBeDefined()
done()
it 'allows non-admins to view by i18n-coverage', utils.wrap (done) ->
[res, body] = yield request.getAsync {uri: getURL('/db/article?view=i18n-coverage'), json: true}
expect(body.length).toBe(1)
expect(body[0].slug).toBe('article-1')
done()
it 'allows non-admins to search by text', utils.wrap (done) ->
[res, body] = yield request.getAsync {uri: getURL('/db/article?term=moo'), json: true}
expect(body.length).toBe(1)
expect(body[0].slug).toBe('article-2')
done()
describe 'POST /db/article', ->
articleData = { name: 'Article', body: 'Article', otherProp: 'not getting set' }
beforeEach utils.wrap (done) ->
yield utils.clearModels([Article])
@admin = yield utils.initAdmin({})
yield utils.loginUser(@admin)
[@res, @body] = yield request.postAsync {
uri: getURL('/db/article'), json: articleData
}
done()
it 'creates a new Article, returning 201', utils.wrap (done) ->
expect(@res.statusCode).toBe(201)
article = yield Article.findById(@body._id).exec()
expect(article).toBeDefined()
done()
it 'sets creator to the user who created it', ->
expect(@res.body.creator).toBe(@admin.id)
it 'sets original to _id', ->
body = @res.body
expect(body.original).toBe(body._id)
it 'allows you to set Article\'s editableProperties', ->
expect(@body.name).toBe('Article')
it 'ignores properties not included in editableProperties', ->
expect(@body.otherProp).toBeUndefined()
it 'returns 422 when properties do not pass validation', utils.wrap (done) ->
[res, body] = yield request.postAsync {
uri: getURL('/db/article'), json: { i18nCoverage: 9001 }
}
expect(res.statusCode).toBe(422)
expect(body.validationErrors).toBeDefined()
done()
it 'allows admins to create Articles', -> # handled in beforeEach
it 'allows artisans to create Articles', utils.wrap (done) ->
yield utils.clearModels([Article])
artisan = yield utils.initArtisan({})
yield utils.loginUser(artisan)
[res, body] = yield request.postAsync({uri: getURL('/db/article'), json: articleData })
expect(res.statusCode).toBe(201)
done()
it 'does not allow normal users to create Articles', utils.wrap (done) ->
yield utils.clearModels([Article])
user = yield utils.initUser({})
yield utils.loginUser(user)
[res, body] = yield request.postAsync({uri: getURL('/db/article'), json: articleData })
expect(res.statusCode).toBe(403)
done()
it 'does not allow anonymous users to create Articles', utils.wrap (done) ->
yield utils.clearModels([Article])
yield utils.becomeAnonymous()
[res, body] = yield request.postAsync({uri: getURL('/db/article'), json: articleData })
expect(res.statusCode).toBe(401)
done()
it 'does not allow creating Articles with reserved words', utils.wrap (done) ->
[res, body] = yield request.postAsync { uri: getURL('/db/article'), json: { name: 'Names' } }
expect(res.statusCode).toBe(422)
done()
it 'does not allow creating a second article of the same name', utils.wrap (done) ->
[res, body] = yield request.postAsync { uri: getURL('/db/article'), json: articleData }
expect(res.statusCode).toBe(409)
done()
describe 'GET /db/article/:handle', ->
articleData = { name: 'Some Name', body: 'Article' }
beforeEach utils.wrap (done) ->
yield utils.clearModels([Article])
@admin = yield utils.initAdmin({})
yield utils.loginUser(@admin)
[@res, @body] = yield request.postAsync {
uri: getURL('/db/article'), json: articleData
}
done()
it 'returns Article by id', utils.wrap (done) ->
[res, body] = yield request.getAsync {uri: getURL("/db/article/#{@body._id}"), json: true}
expect(res.statusCode).toBe(200)
expect(_.isObject(body)).toBe(true)
done()
it 'returns Article by slug', utils.wrap (done) ->
[res, body] = yield request.getAsync {uri: getURL("/db/article/some-name"), json: true}
expect(res.statusCode).toBe(200)
expect(_.isObject(body)).toBe(true)
done()
it 'returns not found if handle does not exist in the db', utils.wrap (done) ->
[res, body] = yield request.getAsync {uri: getURL("/db/article/dne"), json: true}
expect(res.statusCode).toBe(404)
done()
putTests = (method='PUT') ->
articleData = { name: 'Some Name', body: 'Article' }
beforeEach utils.wrap (done) ->
yield utils.clearModels([Article])
@admin = yield utils.initAdmin({})
yield utils.loginUser(@admin)
[@res, @body] = yield request.postAsync {
uri: getURL('/db/article'), json: articleData
}
done()
it 'edits editable Article properties', utils.wrap (done) ->
[res, body] = yield requestAsync {method: method, uri: getURL("/db/article/#{@body._id}"), json: { body: 'New body' }}
expect(body.body).toBe('New body')
done()
it 'updates the slug when the name is changed', utils.wrap (done) ->
[res, body] = yield requestAsync {method: method, uri: getURL("/db/article/#{@body._id}"), json: json = { name: 'New name' }}
expect(body.name).toBe('New name')
expect(body.slug).toBe('new-name')
done()
it 'does not allow normal artisan, non-admins to make changes', utils.wrap (done) ->
artisan = yield utils.initArtisan({})
yield utils.loginUser(artisan)
[res, body] = yield requestAsync {method: method, uri: getURL("/db/article/#{@body._id}"), json: { name: 'Another name' }}
expect(res.statusCode).toBe(403)
done()
describe 'PUT /db/article/:handle', -> putTests('PUT')
describe 'PATCH /db/article/:handle', -> putTests('PATCH')
describe 'POST /db/article/:handle/new-version', ->
articleData = { name: 'Article name', body: 'Article body', i18n: {} }
articleID = null
beforeEach utils.wrap (done) ->
yield utils.clearModels([Article])
@admin = yield utils.initAdmin({})
yield utils.loginUser(@admin)
[res, body] = yield request.postAsync { uri: getURL('/db/article'), json: articleData }
expect(res.statusCode).toBe(201)
articleID = body._id
done()
postNewVersion = Promise.promisify (json, expectedStatus=201, done) ->
if _.isFunction(expectedStatus)
done = expectedStatus
expectedStatus = 201
url = getURL("/db/article/#{articleID}/new-version")
request.post { uri: url, json: json }, (err, res, body) ->
expect(res.statusCode).toBe(expectedStatus)
done(err)
testArrayEqual = (given, expected) ->
expect(_.isEqual(given, expected)).toBe(true)
it 'creates a new major version, updating model and version properties', utils.wrap (done) ->
yield postNewVersion({ name: 'Article name', body: 'New body' })
yield postNewVersion({ name: 'New name', body: 'New new body' })
articles = yield Article.find()
expect(articles.length).toBe(3)
versions = (article.get('version') for article in articles)
articles = (article.toObject() for article in articles)
testArrayEqual(_.pluck(versions, 'major'), [0, 1, 2])
testArrayEqual(_.pluck(versions, 'minor'), [0, 0, 0])
testArrayEqual(_.pluck(versions, 'isLatestMajor'), [false, false, true])
testArrayEqual(_.pluck(versions, 'isLatestMinor'), [true, true, true])
testArrayEqual(_.pluck(articles, 'name'), ['Article name', 'Article name', 'New name'])
testArrayEqual(_.pluck(articles, 'body'), ['Article body', 'New body', 'New new body'])
testArrayEqual(_.pluck(articles, 'slug'), [undefined, undefined, 'new-name'])
testArrayEqual(_.pluck(articles, 'index'), [undefined, undefined, true])
done()
it 'works if there is no document with the appropriate version settings (new major)', utils.wrap (done) ->
article = yield Article.findById(articleID)
article.set({ 'version.isLatestMajor': false, 'version.isLatestMinor': false })
yield article.save()
yield postNewVersion({ name: 'Article name', body: 'New body' })
articles = yield Article.find()
expect(articles.length).toBe(2)
versions = (article.get('version') for article in articles)
articles = (article.toObject() for article in articles)
testArrayEqual(_.pluck(versions, 'major'), [0, 1])
testArrayEqual(_.pluck(versions, 'minor'), [0, 0])
testArrayEqual(_.pluck(versions, 'isLatestMajor'), [false, true])
testArrayEqual(_.pluck(versions, 'isLatestMinor'), [false, true]) # does not fix the old version's value
testArrayEqual(_.pluck(articles, 'body'), ['Article body', 'New body'])
testArrayEqual(_.pluck(articles, 'slug'), [undefined, 'article-name'])
testArrayEqual(_.pluck(articles, 'index'), [undefined, true])
done()
it 'creates a new minor version if version.major is included', utils.wrap (done) ->
yield postNewVersion({ name: 'Article name', body: 'New body', version: { major: 0 } })
yield postNewVersion({ name: 'Article name', body: 'New new body', version: { major: 0 } })
articles = yield Article.find()
expect(articles.length).toBe(3)
versions = (article.get('version') for article in articles)
articles = (article.toObject() for article in articles)
testArrayEqual(_.pluck(versions, 'major'), [0, 0, 0])
testArrayEqual(_.pluck(versions, 'minor'), [0, 1, 2])
testArrayEqual(_.pluck(versions, 'isLatestMajor'), [false, false, true])
testArrayEqual(_.pluck(versions, 'isLatestMinor'), [false, false, true])
testArrayEqual(_.pluck(articles, 'name'), ['Article name', 'Article name', 'Article name'])
testArrayEqual(_.pluck(articles, 'body'), ['Article body', 'New body', 'New new body'])
testArrayEqual(_.pluck(articles, 'slug'), [undefined, undefined, 'article-name'])
testArrayEqual(_.pluck(articles, 'index'), [undefined, undefined, true])
done()
it 'works if there is no document with the appropriate version settings (new minor)', utils.wrap (done) ->
article = yield Article.findById(articleID)
article.set({ 'version.isLatestMajor': false, 'version.isLatestMinor': false })
yield article.save()
yield postNewVersion({ name: 'Article name', body: 'New body', version: { major: 0 } })
articles = yield Article.find()
expect(articles.length).toBe(2)
versions = (article.get('version') for article in articles)
articles = (article.toObject() for article in articles)
testArrayEqual(_.pluck(versions, 'major'), [0, 0])
testArrayEqual(_.pluck(versions, 'minor'), [0, 1])
testArrayEqual(_.pluck(versions, 'isLatestMajor'), [false, false])
testArrayEqual(_.pluck(versions, 'isLatestMinor'), [false, true])
testArrayEqual(_.pluck(articles, 'body'), ['Article body', 'New body'])
testArrayEqual(_.pluck(articles, 'slug'), [undefined, 'article-name'])
testArrayEqual(_.pluck(articles, 'index'), [undefined, true])
done()
it 'allows adding new minor versions to old major versions', utils.wrap (done) ->
yield postNewVersion({ name: 'Article name', body: 'New body' })
yield postNewVersion({ name: 'Article name', body: 'New new body', version: { major: 0 } })
articles = yield Article.find()
expect(articles.length).toBe(3)
versions = (article.get('version') for article in articles)
articles = (article.toObject() for article in articles)
testArrayEqual(_.pluck(versions, 'major'), [0, 1, 0])
testArrayEqual(_.pluck(versions, 'minor'), [0, 0, 1])
testArrayEqual(_.pluck(versions, 'isLatestMajor'), [false, true, false])
testArrayEqual(_.pluck(versions, 'isLatestMinor'), [false, true, true])
testArrayEqual(_.pluck(articles, 'name'), ['Article name', 'Article name', 'Article name'])
testArrayEqual(_.pluck(articles, 'body'), ['Article body', 'New body', 'New new body'])
testArrayEqual(_.pluck(articles, 'slug'), [undefined, 'article-name', undefined])
testArrayEqual(_.pluck(articles, 'index'), [undefined, true, undefined])
done()
it 'unsets properties which are not included in the request', utils.wrap (done) ->
yield postNewVersion({ name: 'Article name', version: { major: 0 } })
articles = yield Article.find()
expect(articles.length).toBe(2)
expect(articles[1].get('body')).toBeUndefined()
done()
it 'works for artisans', utils.wrap (done) ->
yield utils.logout()
artisan = yield utils.initArtisan()
yield utils.loginUser(artisan)
yield postNewVersion({ name: 'Article name', body: 'New body' })
articles = yield Article.find()
expect(articles.length).toBe(2)
done()
it 'works for normal users submitting translations', utils.wrap (done) ->
yield utils.logout()
user = yield utils.initUser()
yield utils.loginUser(user)
yield postNewVersion({ name: 'Article name', body: 'Article body', i18n: { fr: { name: 'Le Article' }}}, 201)
articles = yield Article.find()
expect(articles.length).toBe(2)
done()
it 'does not work for normal users', utils.wrap (done) ->
yield utils.logout()
user = yield utils.initUser()
yield utils.loginUser(user)
yield postNewVersion({ name: 'Article name', body: 'New body' }, 403)
articles = yield Article.find()
expect(articles.length).toBe(1)
done()
it 'does not work for anonymous users', utils.wrap (done) ->
yield utils.becomeAnonymous()
yield postNewVersion({ name: 'Article name', body: 'New body' }, 401)
articles = yield Article.find()
expect(articles.length).toBe(1)
done()
it 'notifies watchers of changes', utils.wrap (done) ->
sendwithus = require '../../../server/sendwithus'
spyOn(sendwithus.api, 'send').and.callFake (context, cb) ->
expect(context.email_id).toBe(sendwithus.templates.change_made_notify_watcher)
expect(context.recipient.address).toBe('test@gmail.com')
done()
user = yield User({email: 'test@gmail.com', name: 'a user'}).save()
article = yield Article.findById(articleID)
article.set('watchers', article.get('watchers').concat([user.get('_id')]))
yield article.save()
yield postNewVersion({ name: 'Article name', body: 'New body', commitMessage: 'Commit message' })
it 'sends a notification to artisan and main Slack channels', utils.wrap (done) ->
slack = require '../../../server/slack'
spyOn(slack, 'sendSlackMessage')
yield postNewVersion({ name: 'Article name', body: 'New body' })
expect(slack.sendSlackMessage).toHaveBeenCalled()
done()
describe 'version fetching endpoints', ->
articleData = { name: 'Original version', body: 'Article body' }
articleOriginal = null
postNewVersion = Promise.promisify (json, expectedStatus=201, done) ->
if _.isFunction(expectedStatus)
done = expectedStatus
expectedStatus = 201
url = getURL("/db/article/#{articleOriginal}/new-version")
request.post { uri: url, json: json }, (err, res) ->
expect(res.statusCode).toBe(expectedStatus)
done(err)
beforeEach utils.wrap (done) ->
yield utils.clearModels([Article])
@admin = yield utils.initAdmin({})
yield utils.loginUser(@admin)
[res, body] = yield request.postAsync { uri: getURL('/db/article'), json: articleData }
expect(res.statusCode).toBe(201)
articleOriginal = body._id
yield postNewVersion({ name: 'Latest minor version', body: 'New body', version: {major: 0} })
yield postNewVersion({ name: 'Latest major version', body: 'New new body' })
done()
describe 'GET /db/article/:handle/version/:version', ->
it 'returns the latest version for the given original article when :version is empty', utils.wrap (done) ->
[res, body] = yield request.getAsync { uri: getURL("/db/article/#{articleOriginal}/version"), json: true }
expect(body.name).toBe('Latest major version')
done()
it 'returns the latest of a given major version when :version is X', utils.wrap (done) ->
[res, body] = yield request.getAsync { uri: getURL("/db/article/#{articleOriginal}/version/0"), json: true }
expect(body.name).toBe('Latest minor version')
done()
it 'returns a specific version when :version is X.Y', utils.wrap (done) ->
[res, body] = yield request.getAsync { uri: getURL("/db/article/#{articleOriginal}/version/0.0"), json: true }
expect(body.name).toBe('Original version')
done()
it 'returns 422 when the original value is invalid', utils.wrap (done) ->
[res, body] = yield request.getAsync { uri: getURL('/db/article/dne/version'), json: true }
expect(res.statusCode).toBe(422)
done()
it 'returns 404 when the original value cannot be found', utils.wrap (done) ->
[res, body] = yield request.getAsync { uri: getURL('/db/article/012345678901234567890123/version'), json: true }
expect(res.statusCode).toBe(404)
done()
describe 'GET /db/article/:handle/versions', ->
it 'returns an array of versions sorted by creation for the given original article', utils.wrap (done) ->
[res, body] = yield request.getAsync { uri: getURL("/db/article/#{articleOriginal}/versions"), json: true }
expect(body.length).toBe(3)
expect(body[0].name).toBe('Latest major version')
expect(body[1].name).toBe('Latest minor version')
expect(body[2].name).toBe('Original version')
done()
it 'projects most properties by default', utils.wrap (done) ->
[res, body] = yield request.getAsync { uri: getURL("/db/article/#{articleOriginal}/versions"), json: true }
expect(body[0].body).toBeUndefined()
done()
describe 'GET /db/article/:handle/files', ->
it 'returns an array of file metadata for the given original article', utils.wrap (done) ->
yield utils.clearModels([Article])
articleData = { name: 'Article', body: 'Article' }
admin = yield utils.initAdmin({})
yield utils.loginUser(admin)
[res, article] = yield request.postAsync { uri: getURL('/db/article'), json: articleData }
expect(res.statusCode).toBe(201)
[res, body] = yield request.postAsync(getURL('/file'), { json: {
url: getURL('/assets/main.html')
filename: 'test.html'
path: 'db/article/'+article.original
mimetype: 'text/html'
}})
[res, body] = yield request.getAsync(getURL('/db/article/'+article.original+'/files'), {json: true})
expect(body.length).toBe(1)
expect(body[0].filename).toBe('test.html')
expect(body[0].metadata.path).toBe('db/article/'+article.original)
done()
describe 'GET and POST /db/article/:handle/names', ->
articleData1 = { name: 'Article 1', body: 'Article 1 body' }
articleData2 = { name: 'Article 2', body: 'Article 2 body' }
it 'returns an object mapping ids to names', utils.wrap (done) ->
yield utils.clearModels([Article])
admin = yield utils.initAdmin({})
yield utils.loginUser(admin)
[res, article1] = yield request.postAsync(getURL('/db/article'), { json: articleData1 })
[res, article2] = yield request.postAsync(getURL('/db/article'), { json: articleData2 })
yield utils.becomeAnonymous()
[res, body] = yield request.getAsync { uri: getURL('/db/article/names?ids='+[article1._id, article2._id].join(',')), json: true }
expect(body.length).toBe(2)
expect(body[0].name).toBe('Article 1')
[res, body] = yield request.postAsync { uri: getURL('/db/article/names?ids='+[article1._id, article2._id].join(',')), json: true }
expect(body.length).toBe(2)
expect(body[0].name).toBe('Article 1')
done()
describe 'GET /db/article/:handle/patches', ->
it 'returns pending patches for the given original article', utils.wrap (done) ->
yield utils.clearModels([Article])
articleData = { name: 'Article', body: 'Article' }
admin = yield utils.initAdmin({})
yield utils.loginUser(admin)
[res, article] = yield request.postAsync { uri: getURL('/db/article'), json: articleData }
expect(res.statusCode).toBe(201)
[res, patch] = yield request.postAsync { uri: getURL('/db/patch'), json: {
delta: {name:['test']}
commitMessage: 'Test commit'
target: {
collection: 'article'
id: article._id
}
}}
expect(res.statusCode).toBe(201)
[res, patches] = yield request.getAsync getURL("/db/article/#{article._id}/patches"), { json: true }
expect(res.statusCode).toBe(200)
expect(patches.length).toBe(1)
expect(patches[0]._id).toBe(patch._id)
done()
it 'returns 404 for handles that do not exist', utils.wrap (done) ->
[res, body] = yield request.getAsync getURL("/db/article/dne/patches"), { json: true }
expect(res.statusCode).toBe(404)
done()
describe 'POST /db/article/:handle/watchers', ->
it 'adds self to the list of watchers, and is idempotent', utils.wrap (done) ->
# create article
yield utils.clearModels([Article])
articleData = { name: 'Article', body: 'Article' }
admin = yield utils.initAdmin({})
yield utils.loginUser(admin)
[res, article] = yield request.postAsync { uri: getURL('/db/article'), json: articleData }
expect(res.statusCode).toBe(201)
# add new user as watcher
yield utils.logout()
user = yield utils.initUser()
yield utils.loginUser(user)
[res, article] = yield request.postAsync { uri: getURL("/db/article/#{article._id}/watchers"), json: true }
expect(res.statusCode).toBe(200)
expect(_.contains(article.watchers, user.id)).toBe(true)
# check idempotence, db
numWatchers = article.watchers.length
[res, article] = yield request.postAsync { uri: getURL("/db/article/#{article._id}/watchers"), json: true }
expect(res.statusCode).toBe(200)
expect(numWatchers).toBe(article.watchers.length)
article = yield Article.findById(article._id)
expect(_.last(article.get('watchers')).toString()).toBe(user.id)
done()
describe 'DELETE /db/article/:handle/watchers', ->
it 'removes self from the list of watchers, and is idempotent', utils.wrap (done) ->
# create article
yield utils.clearModels([Article])
articleData = { name: 'Article', body: 'Article' }
admin = yield utils.initAdmin({})
yield utils.loginUser(admin)
[res, article] = yield request.postAsync { uri: getURL('/db/article'), json: articleData }
expect(res.statusCode).toBe(201)
# add new user as watcher
yield utils.logout()
user = yield utils.initUser()
yield utils.loginUser(user)
[res, article] = yield request.postAsync { uri: getURL("/db/article/#{article._id}/watchers"), json: true }
expect(_.contains(article.watchers, user.id)).toBe(true)
# remove user as watcher
[res, article] = yield request.delAsync { uri: getURL("/db/article/#{article._id}/watchers"), json: true }
expect(res.statusCode).toBe(200)
expect(_.contains(article.watchers, user.id)).toBe(false)
# check idempotence, db
numWatchers = article.watchers.length
[res, article] = yield request.delAsync { uri: getURL("/db/article/#{article._id}/watchers"), json: true }
expect(res.statusCode).toBe(200)
expect(numWatchers).toBe(article.watchers.length)
article = yield Article.findById(article._id)
ids = (id.toString() for id in article.get('watchers'))
expect(_.contains(ids, user.id)).toBe(false)
done()
describe 'POST /db/article/:handle/patch', ->
it 'creates a new version if the changes are translation only', utils.wrap (done) ->
# create article
yield utils.clearModels([Article])
admin = yield utils.initAdmin()
yield utils.loginUser(admin)
article = yield utils.makeArticle()
# submit a translation patch
user = yield utils.initUser()
yield utils.loginUser(user)
originalArticle = article.toObject()
changedArticle = _.extend({}, originalArticle, {i18n: {de: {name: 'Name in German'}}})
json = {
delta: jsondiffpatch.diff(originalArticle, changedArticle)
commitMessage: 'Server test commit'
target: {
collection: 'article'
id: article.id
}
}
url = utils.getURL("/db/article/#{article.id}/patch")
[res, body] = yield request.postAsync({ url, json })
expect(res.statusCode).toBe(201)
[firstArticle, secondArticle] = yield Article.find().sort('_id')
expectedVersion = { isLatestMinor: false, isLatestMajor: false, minor: 0, major: 0 }
expect(_.isEqual(firstArticle.get('version'), expectedVersion)).toBe(true)
expectedVersion = { isLatestMinor: true, isLatestMajor: true, minor: 1, major: 0 }
expect(_.isEqual(secondArticle.get('version'), expectedVersion)).toBe(true)
expect(firstArticle.get('i18n.de.name')).toBe(undefined)
expect(secondArticle.get('i18n.de.name')).toBe('Name in German')
expect(firstArticle.get('creator').equals(admin._id)).toBe(true)
expect(secondArticle.get('creator').equals(user._id)).toBe(true)
expect(firstArticle.get('commitMessage')).toBeUndefined()
expect(secondArticle.get('commitMessage')).toBe('Server test commit')
done()
| 15878 | require '../common'
utils = require '../utils'
_ = require 'lodash'
Promise = require 'bluebird'
request = require '../request'
requestAsync = Promise.promisify(request, {multiArgs: true})
Article = require '../../../server/models/Article'
User = require '../../../server/models/User'
describe 'GET /db/article', ->
articleData1 = { name: 'Article 1', body: 'Article 1 body cow', i18nCoverage: [] }
articleData2 = { name: 'Article 2', body: 'Article 2 body moo' }
beforeEach utils.wrap (done) ->
yield utils.clearModels([Article])
@admin = yield utils.initAdmin()
yield utils.loginUser(@admin)
yield request.postAsync(getURL('/db/article'), { json: articleData1 })
yield request.postAsync(getURL('/db/article'), { json: articleData2 })
yield utils.becomeAnonymous()
done()
it 'returns an array of Article objects', utils.wrap (done) ->
[res, body] = yield request.getAsync { uri: getURL('/db/article'), json: true }
expect(body.length).toBe(2)
done()
it 'accepts a limit parameter', utils.wrap (done) ->
[res, body] = yield request.getAsync {uri: getURL('/db/article?limit=1'), json: true}
expect(body.length).toBe(1)
done()
it 'returns 422 for an invalid limit parameter', utils.wrap (done) ->
[res, body] = yield request.getAsync {uri: getURL('/db/article?limit=word'), json: true}
expect(res.statusCode).toBe(422)
done()
it 'accepts a skip parameter', utils.wrap (done) ->
[res, body] = yield request.getAsync {uri: getURL('/db/article?skip=1'), json: true}
expect(body.length).toBe(1)
[res, body] = yield request.getAsync {uri: getURL('/db/article?skip=2'), json: true}
expect(body.length).toBe(0)
done()
it 'returns 422 for an invalid skip parameter', utils.wrap (done) ->
[res, body] = yield request.getAsync {uri: getURL('/db/article?skip=???'), json: true}
expect(res.statusCode).toBe(422)
done()
it 'accepts a custom project parameter', utils.wrap (done) ->
[res, body] = yield request.getAsync {uri: getURL('/db/article?project=name,body'), json: true}
expect(body.length).toBe(2)
for doc in body
expect(_.size(_.xor(_.keys(doc), ['_id', 'name', 'body']))).toBe(0)
done()
it 'returns a default projection if project is "true"', utils.wrap (done) ->
[res, body] = yield request.getAsync {uri: getURL('/db/article?project=true'), json: true}
expect(res.statusCode).toBe(200)
expect(body.length).toBe(2)
expect(body[0].body).toBeUndefined()
expect(body[0].version).toBeDefined()
done()
it 'accepts custom filter parameters', utils.wrap (done) ->
yield utils.loginUser(@admin)
[res, body] = yield request.getAsync {uri: getURL('/db/article?filter[slug]="article-1"'), json: true}
expect(body.length).toBe(1)
done()
it 'ignores custom filter parameters for non-admins', utils.wrap (done) ->
user = yield utils.initUser()
yield utils.loginUser(user)
[res, body] = yield request.getAsync {uri: getURL('/db/article?filter[slug]="article-1"'), json: true}
expect(body.length).toBe(2)
done()
it 'accepts custom condition parameters', utils.wrap (done) ->
yield utils.loginUser(@admin)
[res, body] = yield request.getAsync {uri: getURL('/db/article?conditions[select]="slug body"'), json: true}
expect(body.length).toBe(2)
for doc in body
expect(_.size(_.xor(_.keys(doc), ['_id', 'slug', 'body']))).toBe(0)
done()
it 'ignores custom condition parameters for non-admins', utils.wrap (done) ->
user = yield utils.initUser()
yield utils.loginUser(user)
[res, body] = yield request.getAsync {uri: getURL('/db/article?conditions[select]="slug body"'), json: true}
expect(body.length).toBe(2)
for doc in body
expect(doc.name).toBeDefined()
done()
it 'allows non-admins to view by i18n-coverage', utils.wrap (done) ->
[res, body] = yield request.getAsync {uri: getURL('/db/article?view=i18n-coverage'), json: true}
expect(body.length).toBe(1)
expect(body[0].slug).toBe('article-1')
done()
it 'allows non-admins to search by text', utils.wrap (done) ->
[res, body] = yield request.getAsync {uri: getURL('/db/article?term=moo'), json: true}
expect(body.length).toBe(1)
expect(body[0].slug).toBe('article-2')
done()
describe 'POST /db/article', ->
articleData = { name: 'Article', body: 'Article', otherProp: 'not getting set' }
beforeEach utils.wrap (done) ->
yield utils.clearModels([Article])
@admin = yield utils.initAdmin({})
yield utils.loginUser(@admin)
[@res, @body] = yield request.postAsync {
uri: getURL('/db/article'), json: articleData
}
done()
it 'creates a new Article, returning 201', utils.wrap (done) ->
expect(@res.statusCode).toBe(201)
article = yield Article.findById(@body._id).exec()
expect(article).toBeDefined()
done()
it 'sets creator to the user who created it', ->
expect(@res.body.creator).toBe(@admin.id)
it 'sets original to _id', ->
body = @res.body
expect(body.original).toBe(body._id)
it 'allows you to set Article\'s editableProperties', ->
expect(@body.name).toBe('Article')
it 'ignores properties not included in editableProperties', ->
expect(@body.otherProp).toBeUndefined()
it 'returns 422 when properties do not pass validation', utils.wrap (done) ->
[res, body] = yield request.postAsync {
uri: getURL('/db/article'), json: { i18nCoverage: 9001 }
}
expect(res.statusCode).toBe(422)
expect(body.validationErrors).toBeDefined()
done()
it 'allows admins to create Articles', -> # handled in beforeEach
it 'allows artisans to create Articles', utils.wrap (done) ->
yield utils.clearModels([Article])
artisan = yield utils.initArtisan({})
yield utils.loginUser(artisan)
[res, body] = yield request.postAsync({uri: getURL('/db/article'), json: articleData })
expect(res.statusCode).toBe(201)
done()
it 'does not allow normal users to create Articles', utils.wrap (done) ->
yield utils.clearModels([Article])
user = yield utils.initUser({})
yield utils.loginUser(user)
[res, body] = yield request.postAsync({uri: getURL('/db/article'), json: articleData })
expect(res.statusCode).toBe(403)
done()
it 'does not allow anonymous users to create Articles', utils.wrap (done) ->
yield utils.clearModels([Article])
yield utils.becomeAnonymous()
[res, body] = yield request.postAsync({uri: getURL('/db/article'), json: articleData })
expect(res.statusCode).toBe(401)
done()
it 'does not allow creating Articles with reserved words', utils.wrap (done) ->
[res, body] = yield request.postAsync { uri: getURL('/db/article'), json: { name: 'Names' } }
expect(res.statusCode).toBe(422)
done()
it 'does not allow creating a second article of the same name', utils.wrap (done) ->
[res, body] = yield request.postAsync { uri: getURL('/db/article'), json: articleData }
expect(res.statusCode).toBe(409)
done()
describe 'GET /db/article/:handle', ->
articleData = { name: '<NAME>', body: 'Article' }
beforeEach utils.wrap (done) ->
yield utils.clearModels([Article])
@admin = yield utils.initAdmin({})
yield utils.loginUser(@admin)
[@res, @body] = yield request.postAsync {
uri: getURL('/db/article'), json: articleData
}
done()
it 'returns Article by id', utils.wrap (done) ->
[res, body] = yield request.getAsync {uri: getURL("/db/article/#{@body._id}"), json: true}
expect(res.statusCode).toBe(200)
expect(_.isObject(body)).toBe(true)
done()
it 'returns Article by slug', utils.wrap (done) ->
[res, body] = yield request.getAsync {uri: getURL("/db/article/some-name"), json: true}
expect(res.statusCode).toBe(200)
expect(_.isObject(body)).toBe(true)
done()
it 'returns not found if handle does not exist in the db', utils.wrap (done) ->
[res, body] = yield request.getAsync {uri: getURL("/db/article/dne"), json: true}
expect(res.statusCode).toBe(404)
done()
putTests = (method='PUT') ->
articleData = { name: '<NAME>', body: 'Article' }
beforeEach utils.wrap (done) ->
yield utils.clearModels([Article])
@admin = yield utils.initAdmin({})
yield utils.loginUser(@admin)
[@res, @body] = yield request.postAsync {
uri: getURL('/db/article'), json: articleData
}
done()
it 'edits editable Article properties', utils.wrap (done) ->
[res, body] = yield requestAsync {method: method, uri: getURL("/db/article/#{@body._id}"), json: { body: 'New body' }}
expect(body.body).toBe('New body')
done()
it 'updates the slug when the name is changed', utils.wrap (done) ->
[res, body] = yield requestAsync {method: method, uri: getURL("/db/article/#{@body._id}"), json: json = { name: 'New name' }}
expect(body.name).toBe('New name')
expect(body.slug).toBe('new-name')
done()
it 'does not allow normal artisan, non-admins to make changes', utils.wrap (done) ->
artisan = yield utils.initArtisan({})
yield utils.loginUser(artisan)
[res, body] = yield requestAsync {method: method, uri: getURL("/db/article/#{@body._id}"), json: { name: 'Another name' }}
expect(res.statusCode).toBe(403)
done()
describe 'PUT /db/article/:handle', -> putTests('PUT')
describe 'PATCH /db/article/:handle', -> putTests('PATCH')
describe 'POST /db/article/:handle/new-version', ->
articleData = { name: 'Article name', body: 'Article body', i18n: {} }
articleID = null
beforeEach utils.wrap (done) ->
yield utils.clearModels([Article])
@admin = yield utils.initAdmin({})
yield utils.loginUser(@admin)
[res, body] = yield request.postAsync { uri: getURL('/db/article'), json: articleData }
expect(res.statusCode).toBe(201)
articleID = body._id
done()
postNewVersion = Promise.promisify (json, expectedStatus=201, done) ->
if _.isFunction(expectedStatus)
done = expectedStatus
expectedStatus = 201
url = getURL("/db/article/#{articleID}/new-version")
request.post { uri: url, json: json }, (err, res, body) ->
expect(res.statusCode).toBe(expectedStatus)
done(err)
testArrayEqual = (given, expected) ->
expect(_.isEqual(given, expected)).toBe(true)
it 'creates a new major version, updating model and version properties', utils.wrap (done) ->
yield postNewVersion({ name: 'Article name', body: 'New body' })
yield postNewVersion({ name: '<NAME> name', body: 'New new body' })
articles = yield Article.find()
expect(articles.length).toBe(3)
versions = (article.get('version') for article in articles)
articles = (article.toObject() for article in articles)
testArrayEqual(_.pluck(versions, 'major'), [0, 1, 2])
testArrayEqual(_.pluck(versions, 'minor'), [0, 0, 0])
testArrayEqual(_.pluck(versions, 'isLatestMajor'), [false, false, true])
testArrayEqual(_.pluck(versions, 'isLatestMinor'), [true, true, true])
testArrayEqual(_.pluck(articles, 'name'), ['Article name', 'Article name', 'New name'])
testArrayEqual(_.pluck(articles, 'body'), ['Article body', 'New body', 'New new body'])
testArrayEqual(_.pluck(articles, 'slug'), [undefined, undefined, 'new-name'])
testArrayEqual(_.pluck(articles, 'index'), [undefined, undefined, true])
done()
it 'works if there is no document with the appropriate version settings (new major)', utils.wrap (done) ->
article = yield Article.findById(articleID)
article.set({ 'version.isLatestMajor': false, 'version.isLatestMinor': false })
yield article.save()
yield postNewVersion({ name: 'Article name', body: 'New body' })
articles = yield Article.find()
expect(articles.length).toBe(2)
versions = (article.get('version') for article in articles)
articles = (article.toObject() for article in articles)
testArrayEqual(_.pluck(versions, 'major'), [0, 1])
testArrayEqual(_.pluck(versions, 'minor'), [0, 0])
testArrayEqual(_.pluck(versions, 'isLatestMajor'), [false, true])
testArrayEqual(_.pluck(versions, 'isLatestMinor'), [false, true]) # does not fix the old version's value
testArrayEqual(_.pluck(articles, 'body'), ['Article body', 'New body'])
testArrayEqual(_.pluck(articles, 'slug'), [undefined, 'article-name'])
testArrayEqual(_.pluck(articles, 'index'), [undefined, true])
done()
it 'creates a new minor version if version.major is included', utils.wrap (done) ->
yield postNewVersion({ name: 'Article name', body: 'New body', version: { major: 0 } })
yield postNewVersion({ name: 'Article name', body: 'New new body', version: { major: 0 } })
articles = yield Article.find()
expect(articles.length).toBe(3)
versions = (article.get('version') for article in articles)
articles = (article.toObject() for article in articles)
testArrayEqual(_.pluck(versions, 'major'), [0, 0, 0])
testArrayEqual(_.pluck(versions, 'minor'), [0, 1, 2])
testArrayEqual(_.pluck(versions, 'isLatestMajor'), [false, false, true])
testArrayEqual(_.pluck(versions, 'isLatestMinor'), [false, false, true])
testArrayEqual(_.pluck(articles, 'name'), ['Article name', 'Article name', 'Article name'])
testArrayEqual(_.pluck(articles, 'body'), ['Article body', 'New body', 'New new body'])
testArrayEqual(_.pluck(articles, 'slug'), [undefined, undefined, 'article-name'])
testArrayEqual(_.pluck(articles, 'index'), [undefined, undefined, true])
done()
it 'works if there is no document with the appropriate version settings (new minor)', utils.wrap (done) ->
article = yield Article.findById(articleID)
article.set({ 'version.isLatestMajor': false, 'version.isLatestMinor': false })
yield article.save()
yield postNewVersion({ name: 'Article name', body: 'New body', version: { major: 0 } })
articles = yield Article.find()
expect(articles.length).toBe(2)
versions = (article.get('version') for article in articles)
articles = (article.toObject() for article in articles)
testArrayEqual(_.pluck(versions, 'major'), [0, 0])
testArrayEqual(_.pluck(versions, 'minor'), [0, 1])
testArrayEqual(_.pluck(versions, 'isLatestMajor'), [false, false])
testArrayEqual(_.pluck(versions, 'isLatestMinor'), [false, true])
testArrayEqual(_.pluck(articles, 'body'), ['Article body', 'New body'])
testArrayEqual(_.pluck(articles, 'slug'), [undefined, 'article-name'])
testArrayEqual(_.pluck(articles, 'index'), [undefined, true])
done()
it 'allows adding new minor versions to old major versions', utils.wrap (done) ->
yield postNewVersion({ name: 'Article name', body: 'New body' })
yield postNewVersion({ name: 'Article name', body: 'New new body', version: { major: 0 } })
articles = yield Article.find()
expect(articles.length).toBe(3)
versions = (article.get('version') for article in articles)
articles = (article.toObject() for article in articles)
testArrayEqual(_.pluck(versions, 'major'), [0, 1, 0])
testArrayEqual(_.pluck(versions, 'minor'), [0, 0, 1])
testArrayEqual(_.pluck(versions, 'isLatestMajor'), [false, true, false])
testArrayEqual(_.pluck(versions, 'isLatestMinor'), [false, true, true])
testArrayEqual(_.pluck(articles, 'name'), ['Article name', 'Article name', 'Article name'])
testArrayEqual(_.pluck(articles, 'body'), ['Article body', 'New body', 'New new body'])
testArrayEqual(_.pluck(articles, 'slug'), [undefined, 'article-name', undefined])
testArrayEqual(_.pluck(articles, 'index'), [undefined, true, undefined])
done()
it 'unsets properties which are not included in the request', utils.wrap (done) ->
yield postNewVersion({ name: 'Article name', version: { major: 0 } })
articles = yield Article.find()
expect(articles.length).toBe(2)
expect(articles[1].get('body')).toBeUndefined()
done()
it 'works for artisans', utils.wrap (done) ->
yield utils.logout()
artisan = yield utils.initArtisan()
yield utils.loginUser(artisan)
yield postNewVersion({ name: 'Article name', body: 'New body' })
articles = yield Article.find()
expect(articles.length).toBe(2)
done()
it 'works for normal users submitting translations', utils.wrap (done) ->
yield utils.logout()
user = yield utils.initUser()
yield utils.loginUser(user)
yield postNewVersion({ name: 'Article name', body: 'Article body', i18n: { fr: { name: 'Le Article' }}}, 201)
articles = yield Article.find()
expect(articles.length).toBe(2)
done()
it 'does not work for normal users', utils.wrap (done) ->
yield utils.logout()
user = yield utils.initUser()
yield utils.loginUser(user)
yield postNewVersion({ name: 'Article name', body: 'New body' }, 403)
articles = yield Article.find()
expect(articles.length).toBe(1)
done()
it 'does not work for anonymous users', utils.wrap (done) ->
yield utils.becomeAnonymous()
yield postNewVersion({ name: 'Article name', body: 'New body' }, 401)
articles = yield Article.find()
expect(articles.length).toBe(1)
done()
it 'notifies watchers of changes', utils.wrap (done) ->
sendwithus = require '../../../server/sendwithus'
spyOn(sendwithus.api, 'send').and.callFake (context, cb) ->
expect(context.email_id).toBe(sendwithus.templates.change_made_notify_watcher)
expect(context.recipient.address).toBe('<EMAIL>')
done()
user = yield User({email: '<EMAIL>', name: 'a user'}).save()
article = yield Article.findById(articleID)
article.set('watchers', article.get('watchers').concat([user.get('_id')]))
yield article.save()
yield postNewVersion({ name: 'Article name', body: 'New body', commitMessage: 'Commit message' })
it 'sends a notification to artisan and main Slack channels', utils.wrap (done) ->
slack = require '../../../server/slack'
spyOn(slack, 'sendSlackMessage')
yield postNewVersion({ name: 'Article name', body: 'New body' })
expect(slack.sendSlackMessage).toHaveBeenCalled()
done()
describe 'version fetching endpoints', ->
articleData = { name: 'Original version', body: 'Article body' }
articleOriginal = null
postNewVersion = Promise.promisify (json, expectedStatus=201, done) ->
if _.isFunction(expectedStatus)
done = expectedStatus
expectedStatus = 201
url = getURL("/db/article/#{articleOriginal}/new-version")
request.post { uri: url, json: json }, (err, res) ->
expect(res.statusCode).toBe(expectedStatus)
done(err)
beforeEach utils.wrap (done) ->
yield utils.clearModels([Article])
@admin = yield utils.initAdmin({})
yield utils.loginUser(@admin)
[res, body] = yield request.postAsync { uri: getURL('/db/article'), json: articleData }
expect(res.statusCode).toBe(201)
articleOriginal = body._id
yield postNewVersion({ name: 'Latest minor version', body: 'New body', version: {major: 0} })
yield postNewVersion({ name: 'Latest major version', body: 'New new body' })
done()
describe 'GET /db/article/:handle/version/:version', ->
it 'returns the latest version for the given original article when :version is empty', utils.wrap (done) ->
[res, body] = yield request.getAsync { uri: getURL("/db/article/#{articleOriginal}/version"), json: true }
expect(body.name).toBe('Latest major version')
done()
it 'returns the latest of a given major version when :version is X', utils.wrap (done) ->
[res, body] = yield request.getAsync { uri: getURL("/db/article/#{articleOriginal}/version/0"), json: true }
expect(body.name).toBe('Latest minor version')
done()
it 'returns a specific version when :version is X.Y', utils.wrap (done) ->
[res, body] = yield request.getAsync { uri: getURL("/db/article/#{articleOriginal}/version/0.0"), json: true }
expect(body.name).toBe('Original version')
done()
it 'returns 422 when the original value is invalid', utils.wrap (done) ->
[res, body] = yield request.getAsync { uri: getURL('/db/article/dne/version'), json: true }
expect(res.statusCode).toBe(422)
done()
it 'returns 404 when the original value cannot be found', utils.wrap (done) ->
[res, body] = yield request.getAsync { uri: getURL('/db/article/012345678901234567890123/version'), json: true }
expect(res.statusCode).toBe(404)
done()
describe 'GET /db/article/:handle/versions', ->
it 'returns an array of versions sorted by creation for the given original article', utils.wrap (done) ->
[res, body] = yield request.getAsync { uri: getURL("/db/article/#{articleOriginal}/versions"), json: true }
expect(body.length).toBe(3)
expect(body[0].name).toBe('Latest major version')
expect(body[1].name).toBe('Latest minor version')
expect(body[2].name).toBe('Original version')
done()
it 'projects most properties by default', utils.wrap (done) ->
[res, body] = yield request.getAsync { uri: getURL("/db/article/#{articleOriginal}/versions"), json: true }
expect(body[0].body).toBeUndefined()
done()
describe 'GET /db/article/:handle/files', ->
it 'returns an array of file metadata for the given original article', utils.wrap (done) ->
yield utils.clearModels([Article])
articleData = { name: 'Article', body: 'Article' }
admin = yield utils.initAdmin({})
yield utils.loginUser(admin)
[res, article] = yield request.postAsync { uri: getURL('/db/article'), json: articleData }
expect(res.statusCode).toBe(201)
[res, body] = yield request.postAsync(getURL('/file'), { json: {
url: getURL('/assets/main.html')
filename: 'test.html'
path: 'db/article/'+article.original
mimetype: 'text/html'
}})
[res, body] = yield request.getAsync(getURL('/db/article/'+article.original+'/files'), {json: true})
expect(body.length).toBe(1)
expect(body[0].filename).toBe('test.html')
expect(body[0].metadata.path).toBe('db/article/'+article.original)
done()
describe 'GET and POST /db/article/:handle/names', ->
articleData1 = { name: 'Article 1', body: 'Article 1 body' }
articleData2 = { name: 'Article 2', body: 'Article 2 body' }
it 'returns an object mapping ids to names', utils.wrap (done) ->
yield utils.clearModels([Article])
admin = yield utils.initAdmin({})
yield utils.loginUser(admin)
[res, article1] = yield request.postAsync(getURL('/db/article'), { json: articleData1 })
[res, article2] = yield request.postAsync(getURL('/db/article'), { json: articleData2 })
yield utils.becomeAnonymous()
[res, body] = yield request.getAsync { uri: getURL('/db/article/names?ids='+[article1._id, article2._id].join(',')), json: true }
expect(body.length).toBe(2)
expect(body[0].name).toBe('Article 1')
[res, body] = yield request.postAsync { uri: getURL('/db/article/names?ids='+[article1._id, article2._id].join(',')), json: true }
expect(body.length).toBe(2)
expect(body[0].name).toBe('Article 1')
done()
describe 'GET /db/article/:handle/patches', ->
it 'returns pending patches for the given original article', utils.wrap (done) ->
yield utils.clearModels([Article])
articleData = { name: 'Article', body: 'Article' }
admin = yield utils.initAdmin({})
yield utils.loginUser(admin)
[res, article] = yield request.postAsync { uri: getURL('/db/article'), json: articleData }
expect(res.statusCode).toBe(201)
[res, patch] = yield request.postAsync { uri: getURL('/db/patch'), json: {
delta: {name:['test']}
commitMessage: 'Test commit'
target: {
collection: 'article'
id: article._id
}
}}
expect(res.statusCode).toBe(201)
[res, patches] = yield request.getAsync getURL("/db/article/#{article._id}/patches"), { json: true }
expect(res.statusCode).toBe(200)
expect(patches.length).toBe(1)
expect(patches[0]._id).toBe(patch._id)
done()
it 'returns 404 for handles that do not exist', utils.wrap (done) ->
[res, body] = yield request.getAsync getURL("/db/article/dne/patches"), { json: true }
expect(res.statusCode).toBe(404)
done()
describe 'POST /db/article/:handle/watchers', ->
it 'adds self to the list of watchers, and is idempotent', utils.wrap (done) ->
# create article
yield utils.clearModels([Article])
articleData = { name: 'Article', body: 'Article' }
admin = yield utils.initAdmin({})
yield utils.loginUser(admin)
[res, article] = yield request.postAsync { uri: getURL('/db/article'), json: articleData }
expect(res.statusCode).toBe(201)
# add new user as watcher
yield utils.logout()
user = yield utils.initUser()
yield utils.loginUser(user)
[res, article] = yield request.postAsync { uri: getURL("/db/article/#{article._id}/watchers"), json: true }
expect(res.statusCode).toBe(200)
expect(_.contains(article.watchers, user.id)).toBe(true)
# check idempotence, db
numWatchers = article.watchers.length
[res, article] = yield request.postAsync { uri: getURL("/db/article/#{article._id}/watchers"), json: true }
expect(res.statusCode).toBe(200)
expect(numWatchers).toBe(article.watchers.length)
article = yield Article.findById(article._id)
expect(_.last(article.get('watchers')).toString()).toBe(user.id)
done()
describe 'DELETE /db/article/:handle/watchers', ->
it 'removes self from the list of watchers, and is idempotent', utils.wrap (done) ->
# create article
yield utils.clearModels([Article])
articleData = { name: 'Article', body: 'Article' }
admin = yield utils.initAdmin({})
yield utils.loginUser(admin)
[res, article] = yield request.postAsync { uri: getURL('/db/article'), json: articleData }
expect(res.statusCode).toBe(201)
# add new user as watcher
yield utils.logout()
user = yield utils.initUser()
yield utils.loginUser(user)
[res, article] = yield request.postAsync { uri: getURL("/db/article/#{article._id}/watchers"), json: true }
expect(_.contains(article.watchers, user.id)).toBe(true)
# remove user as watcher
[res, article] = yield request.delAsync { uri: getURL("/db/article/#{article._id}/watchers"), json: true }
expect(res.statusCode).toBe(200)
expect(_.contains(article.watchers, user.id)).toBe(false)
# check idempotence, db
numWatchers = article.watchers.length
[res, article] = yield request.delAsync { uri: getURL("/db/article/#{article._id}/watchers"), json: true }
expect(res.statusCode).toBe(200)
expect(numWatchers).toBe(article.watchers.length)
article = yield Article.findById(article._id)
ids = (id.toString() for id in article.get('watchers'))
expect(_.contains(ids, user.id)).toBe(false)
done()
describe 'POST /db/article/:handle/patch', ->
it 'creates a new version if the changes are translation only', utils.wrap (done) ->
# create article
yield utils.clearModels([Article])
admin = yield utils.initAdmin()
yield utils.loginUser(admin)
article = yield utils.makeArticle()
# submit a translation patch
user = yield utils.initUser()
yield utils.loginUser(user)
originalArticle = article.toObject()
changedArticle = _.extend({}, originalArticle, {i18n: {de: {name: 'Name in German'}}})
json = {
delta: jsondiffpatch.diff(originalArticle, changedArticle)
commitMessage: 'Server test commit'
target: {
collection: 'article'
id: article.id
}
}
url = utils.getURL("/db/article/#{article.id}/patch")
[res, body] = yield request.postAsync({ url, json })
expect(res.statusCode).toBe(201)
[firstArticle, secondArticle] = yield Article.find().sort('_id')
expectedVersion = { isLatestMinor: false, isLatestMajor: false, minor: 0, major: 0 }
expect(_.isEqual(firstArticle.get('version'), expectedVersion)).toBe(true)
expectedVersion = { isLatestMinor: true, isLatestMajor: true, minor: 1, major: 0 }
expect(_.isEqual(secondArticle.get('version'), expectedVersion)).toBe(true)
expect(firstArticle.get('i18n.de.name')).toBe(undefined)
expect(secondArticle.get('i18n.de.name')).toBe('Name in German')
expect(firstArticle.get('creator').equals(admin._id)).toBe(true)
expect(secondArticle.get('creator').equals(user._id)).toBe(true)
expect(firstArticle.get('commitMessage')).toBeUndefined()
expect(secondArticle.get('commitMessage')).toBe('Server test commit')
done()
| true | require '../common'
utils = require '../utils'
_ = require 'lodash'
Promise = require 'bluebird'
request = require '../request'
requestAsync = Promise.promisify(request, {multiArgs: true})
Article = require '../../../server/models/Article'
User = require '../../../server/models/User'
describe 'GET /db/article', ->
articleData1 = { name: 'Article 1', body: 'Article 1 body cow', i18nCoverage: [] }
articleData2 = { name: 'Article 2', body: 'Article 2 body moo' }
beforeEach utils.wrap (done) ->
yield utils.clearModels([Article])
@admin = yield utils.initAdmin()
yield utils.loginUser(@admin)
yield request.postAsync(getURL('/db/article'), { json: articleData1 })
yield request.postAsync(getURL('/db/article'), { json: articleData2 })
yield utils.becomeAnonymous()
done()
it 'returns an array of Article objects', utils.wrap (done) ->
[res, body] = yield request.getAsync { uri: getURL('/db/article'), json: true }
expect(body.length).toBe(2)
done()
it 'accepts a limit parameter', utils.wrap (done) ->
[res, body] = yield request.getAsync {uri: getURL('/db/article?limit=1'), json: true}
expect(body.length).toBe(1)
done()
it 'returns 422 for an invalid limit parameter', utils.wrap (done) ->
[res, body] = yield request.getAsync {uri: getURL('/db/article?limit=word'), json: true}
expect(res.statusCode).toBe(422)
done()
it 'accepts a skip parameter', utils.wrap (done) ->
[res, body] = yield request.getAsync {uri: getURL('/db/article?skip=1'), json: true}
expect(body.length).toBe(1)
[res, body] = yield request.getAsync {uri: getURL('/db/article?skip=2'), json: true}
expect(body.length).toBe(0)
done()
it 'returns 422 for an invalid skip parameter', utils.wrap (done) ->
[res, body] = yield request.getAsync {uri: getURL('/db/article?skip=???'), json: true}
expect(res.statusCode).toBe(422)
done()
it 'accepts a custom project parameter', utils.wrap (done) ->
[res, body] = yield request.getAsync {uri: getURL('/db/article?project=name,body'), json: true}
expect(body.length).toBe(2)
for doc in body
expect(_.size(_.xor(_.keys(doc), ['_id', 'name', 'body']))).toBe(0)
done()
it 'returns a default projection if project is "true"', utils.wrap (done) ->
[res, body] = yield request.getAsync {uri: getURL('/db/article?project=true'), json: true}
expect(res.statusCode).toBe(200)
expect(body.length).toBe(2)
expect(body[0].body).toBeUndefined()
expect(body[0].version).toBeDefined()
done()
it 'accepts custom filter parameters', utils.wrap (done) ->
yield utils.loginUser(@admin)
[res, body] = yield request.getAsync {uri: getURL('/db/article?filter[slug]="article-1"'), json: true}
expect(body.length).toBe(1)
done()
it 'ignores custom filter parameters for non-admins', utils.wrap (done) ->
user = yield utils.initUser()
yield utils.loginUser(user)
[res, body] = yield request.getAsync {uri: getURL('/db/article?filter[slug]="article-1"'), json: true}
expect(body.length).toBe(2)
done()
it 'accepts custom condition parameters', utils.wrap (done) ->
yield utils.loginUser(@admin)
[res, body] = yield request.getAsync {uri: getURL('/db/article?conditions[select]="slug body"'), json: true}
expect(body.length).toBe(2)
for doc in body
expect(_.size(_.xor(_.keys(doc), ['_id', 'slug', 'body']))).toBe(0)
done()
it 'ignores custom condition parameters for non-admins', utils.wrap (done) ->
user = yield utils.initUser()
yield utils.loginUser(user)
[res, body] = yield request.getAsync {uri: getURL('/db/article?conditions[select]="slug body"'), json: true}
expect(body.length).toBe(2)
for doc in body
expect(doc.name).toBeDefined()
done()
it 'allows non-admins to view by i18n-coverage', utils.wrap (done) ->
[res, body] = yield request.getAsync {uri: getURL('/db/article?view=i18n-coverage'), json: true}
expect(body.length).toBe(1)
expect(body[0].slug).toBe('article-1')
done()
it 'allows non-admins to search by text', utils.wrap (done) ->
[res, body] = yield request.getAsync {uri: getURL('/db/article?term=moo'), json: true}
expect(body.length).toBe(1)
expect(body[0].slug).toBe('article-2')
done()
describe 'POST /db/article', ->
articleData = { name: 'Article', body: 'Article', otherProp: 'not getting set' }
beforeEach utils.wrap (done) ->
yield utils.clearModels([Article])
@admin = yield utils.initAdmin({})
yield utils.loginUser(@admin)
[@res, @body] = yield request.postAsync {
uri: getURL('/db/article'), json: articleData
}
done()
it 'creates a new Article, returning 201', utils.wrap (done) ->
expect(@res.statusCode).toBe(201)
article = yield Article.findById(@body._id).exec()
expect(article).toBeDefined()
done()
it 'sets creator to the user who created it', ->
expect(@res.body.creator).toBe(@admin.id)
it 'sets original to _id', ->
body = @res.body
expect(body.original).toBe(body._id)
it 'allows you to set Article\'s editableProperties', ->
expect(@body.name).toBe('Article')
it 'ignores properties not included in editableProperties', ->
expect(@body.otherProp).toBeUndefined()
it 'returns 422 when properties do not pass validation', utils.wrap (done) ->
[res, body] = yield request.postAsync {
uri: getURL('/db/article'), json: { i18nCoverage: 9001 }
}
expect(res.statusCode).toBe(422)
expect(body.validationErrors).toBeDefined()
done()
it 'allows admins to create Articles', -> # handled in beforeEach
it 'allows artisans to create Articles', utils.wrap (done) ->
yield utils.clearModels([Article])
artisan = yield utils.initArtisan({})
yield utils.loginUser(artisan)
[res, body] = yield request.postAsync({uri: getURL('/db/article'), json: articleData })
expect(res.statusCode).toBe(201)
done()
it 'does not allow normal users to create Articles', utils.wrap (done) ->
yield utils.clearModels([Article])
user = yield utils.initUser({})
yield utils.loginUser(user)
[res, body] = yield request.postAsync({uri: getURL('/db/article'), json: articleData })
expect(res.statusCode).toBe(403)
done()
it 'does not allow anonymous users to create Articles', utils.wrap (done) ->
yield utils.clearModels([Article])
yield utils.becomeAnonymous()
[res, body] = yield request.postAsync({uri: getURL('/db/article'), json: articleData })
expect(res.statusCode).toBe(401)
done()
it 'does not allow creating Articles with reserved words', utils.wrap (done) ->
[res, body] = yield request.postAsync { uri: getURL('/db/article'), json: { name: 'Names' } }
expect(res.statusCode).toBe(422)
done()
it 'does not allow creating a second article of the same name', utils.wrap (done) ->
[res, body] = yield request.postAsync { uri: getURL('/db/article'), json: articleData }
expect(res.statusCode).toBe(409)
done()
describe 'GET /db/article/:handle', ->
articleData = { name: 'PI:NAME:<NAME>END_PI', body: 'Article' }
beforeEach utils.wrap (done) ->
yield utils.clearModels([Article])
@admin = yield utils.initAdmin({})
yield utils.loginUser(@admin)
[@res, @body] = yield request.postAsync {
uri: getURL('/db/article'), json: articleData
}
done()
it 'returns Article by id', utils.wrap (done) ->
[res, body] = yield request.getAsync {uri: getURL("/db/article/#{@body._id}"), json: true}
expect(res.statusCode).toBe(200)
expect(_.isObject(body)).toBe(true)
done()
it 'returns Article by slug', utils.wrap (done) ->
[res, body] = yield request.getAsync {uri: getURL("/db/article/some-name"), json: true}
expect(res.statusCode).toBe(200)
expect(_.isObject(body)).toBe(true)
done()
it 'returns not found if handle does not exist in the db', utils.wrap (done) ->
[res, body] = yield request.getAsync {uri: getURL("/db/article/dne"), json: true}
expect(res.statusCode).toBe(404)
done()
putTests = (method='PUT') ->
articleData = { name: 'PI:NAME:<NAME>END_PI', body: 'Article' }
beforeEach utils.wrap (done) ->
yield utils.clearModels([Article])
@admin = yield utils.initAdmin({})
yield utils.loginUser(@admin)
[@res, @body] = yield request.postAsync {
uri: getURL('/db/article'), json: articleData
}
done()
it 'edits editable Article properties', utils.wrap (done) ->
[res, body] = yield requestAsync {method: method, uri: getURL("/db/article/#{@body._id}"), json: { body: 'New body' }}
expect(body.body).toBe('New body')
done()
it 'updates the slug when the name is changed', utils.wrap (done) ->
[res, body] = yield requestAsync {method: method, uri: getURL("/db/article/#{@body._id}"), json: json = { name: 'New name' }}
expect(body.name).toBe('New name')
expect(body.slug).toBe('new-name')
done()
it 'does not allow normal artisan, non-admins to make changes', utils.wrap (done) ->
artisan = yield utils.initArtisan({})
yield utils.loginUser(artisan)
[res, body] = yield requestAsync {method: method, uri: getURL("/db/article/#{@body._id}"), json: { name: 'Another name' }}
expect(res.statusCode).toBe(403)
done()
describe 'PUT /db/article/:handle', -> putTests('PUT')
describe 'PATCH /db/article/:handle', -> putTests('PATCH')
describe 'POST /db/article/:handle/new-version', ->
articleData = { name: 'Article name', body: 'Article body', i18n: {} }
articleID = null
beforeEach utils.wrap (done) ->
yield utils.clearModels([Article])
@admin = yield utils.initAdmin({})
yield utils.loginUser(@admin)
[res, body] = yield request.postAsync { uri: getURL('/db/article'), json: articleData }
expect(res.statusCode).toBe(201)
articleID = body._id
done()
postNewVersion = Promise.promisify (json, expectedStatus=201, done) ->
if _.isFunction(expectedStatus)
done = expectedStatus
expectedStatus = 201
url = getURL("/db/article/#{articleID}/new-version")
request.post { uri: url, json: json }, (err, res, body) ->
expect(res.statusCode).toBe(expectedStatus)
done(err)
testArrayEqual = (given, expected) ->
expect(_.isEqual(given, expected)).toBe(true)
it 'creates a new major version, updating model and version properties', utils.wrap (done) ->
yield postNewVersion({ name: 'Article name', body: 'New body' })
yield postNewVersion({ name: 'PI:NAME:<NAME>END_PI name', body: 'New new body' })
articles = yield Article.find()
expect(articles.length).toBe(3)
versions = (article.get('version') for article in articles)
articles = (article.toObject() for article in articles)
testArrayEqual(_.pluck(versions, 'major'), [0, 1, 2])
testArrayEqual(_.pluck(versions, 'minor'), [0, 0, 0])
testArrayEqual(_.pluck(versions, 'isLatestMajor'), [false, false, true])
testArrayEqual(_.pluck(versions, 'isLatestMinor'), [true, true, true])
testArrayEqual(_.pluck(articles, 'name'), ['Article name', 'Article name', 'New name'])
testArrayEqual(_.pluck(articles, 'body'), ['Article body', 'New body', 'New new body'])
testArrayEqual(_.pluck(articles, 'slug'), [undefined, undefined, 'new-name'])
testArrayEqual(_.pluck(articles, 'index'), [undefined, undefined, true])
done()
it 'works if there is no document with the appropriate version settings (new major)', utils.wrap (done) ->
article = yield Article.findById(articleID)
article.set({ 'version.isLatestMajor': false, 'version.isLatestMinor': false })
yield article.save()
yield postNewVersion({ name: 'Article name', body: 'New body' })
articles = yield Article.find()
expect(articles.length).toBe(2)
versions = (article.get('version') for article in articles)
articles = (article.toObject() for article in articles)
testArrayEqual(_.pluck(versions, 'major'), [0, 1])
testArrayEqual(_.pluck(versions, 'minor'), [0, 0])
testArrayEqual(_.pluck(versions, 'isLatestMajor'), [false, true])
testArrayEqual(_.pluck(versions, 'isLatestMinor'), [false, true]) # does not fix the old version's value
testArrayEqual(_.pluck(articles, 'body'), ['Article body', 'New body'])
testArrayEqual(_.pluck(articles, 'slug'), [undefined, 'article-name'])
testArrayEqual(_.pluck(articles, 'index'), [undefined, true])
done()
it 'creates a new minor version if version.major is included', utils.wrap (done) ->
yield postNewVersion({ name: 'Article name', body: 'New body', version: { major: 0 } })
yield postNewVersion({ name: 'Article name', body: 'New new body', version: { major: 0 } })
articles = yield Article.find()
expect(articles.length).toBe(3)
versions = (article.get('version') for article in articles)
articles = (article.toObject() for article in articles)
testArrayEqual(_.pluck(versions, 'major'), [0, 0, 0])
testArrayEqual(_.pluck(versions, 'minor'), [0, 1, 2])
testArrayEqual(_.pluck(versions, 'isLatestMajor'), [false, false, true])
testArrayEqual(_.pluck(versions, 'isLatestMinor'), [false, false, true])
testArrayEqual(_.pluck(articles, 'name'), ['Article name', 'Article name', 'Article name'])
testArrayEqual(_.pluck(articles, 'body'), ['Article body', 'New body', 'New new body'])
testArrayEqual(_.pluck(articles, 'slug'), [undefined, undefined, 'article-name'])
testArrayEqual(_.pluck(articles, 'index'), [undefined, undefined, true])
done()
it 'works if there is no document with the appropriate version settings (new minor)', utils.wrap (done) ->
article = yield Article.findById(articleID)
article.set({ 'version.isLatestMajor': false, 'version.isLatestMinor': false })
yield article.save()
yield postNewVersion({ name: 'Article name', body: 'New body', version: { major: 0 } })
articles = yield Article.find()
expect(articles.length).toBe(2)
versions = (article.get('version') for article in articles)
articles = (article.toObject() for article in articles)
testArrayEqual(_.pluck(versions, 'major'), [0, 0])
testArrayEqual(_.pluck(versions, 'minor'), [0, 1])
testArrayEqual(_.pluck(versions, 'isLatestMajor'), [false, false])
testArrayEqual(_.pluck(versions, 'isLatestMinor'), [false, true])
testArrayEqual(_.pluck(articles, 'body'), ['Article body', 'New body'])
testArrayEqual(_.pluck(articles, 'slug'), [undefined, 'article-name'])
testArrayEqual(_.pluck(articles, 'index'), [undefined, true])
done()
it 'allows adding new minor versions to old major versions', utils.wrap (done) ->
yield postNewVersion({ name: 'Article name', body: 'New body' })
yield postNewVersion({ name: 'Article name', body: 'New new body', version: { major: 0 } })
articles = yield Article.find()
expect(articles.length).toBe(3)
versions = (article.get('version') for article in articles)
articles = (article.toObject() for article in articles)
testArrayEqual(_.pluck(versions, 'major'), [0, 1, 0])
testArrayEqual(_.pluck(versions, 'minor'), [0, 0, 1])
testArrayEqual(_.pluck(versions, 'isLatestMajor'), [false, true, false])
testArrayEqual(_.pluck(versions, 'isLatestMinor'), [false, true, true])
testArrayEqual(_.pluck(articles, 'name'), ['Article name', 'Article name', 'Article name'])
testArrayEqual(_.pluck(articles, 'body'), ['Article body', 'New body', 'New new body'])
testArrayEqual(_.pluck(articles, 'slug'), [undefined, 'article-name', undefined])
testArrayEqual(_.pluck(articles, 'index'), [undefined, true, undefined])
done()
it 'unsets properties which are not included in the request', utils.wrap (done) ->
yield postNewVersion({ name: 'Article name', version: { major: 0 } })
articles = yield Article.find()
expect(articles.length).toBe(2)
expect(articles[1].get('body')).toBeUndefined()
done()
it 'works for artisans', utils.wrap (done) ->
yield utils.logout()
artisan = yield utils.initArtisan()
yield utils.loginUser(artisan)
yield postNewVersion({ name: 'Article name', body: 'New body' })
articles = yield Article.find()
expect(articles.length).toBe(2)
done()
it 'works for normal users submitting translations', utils.wrap (done) ->
yield utils.logout()
user = yield utils.initUser()
yield utils.loginUser(user)
yield postNewVersion({ name: 'Article name', body: 'Article body', i18n: { fr: { name: 'Le Article' }}}, 201)
articles = yield Article.find()
expect(articles.length).toBe(2)
done()
it 'does not work for normal users', utils.wrap (done) ->
yield utils.logout()
user = yield utils.initUser()
yield utils.loginUser(user)
yield postNewVersion({ name: 'Article name', body: 'New body' }, 403)
articles = yield Article.find()
expect(articles.length).toBe(1)
done()
it 'does not work for anonymous users', utils.wrap (done) ->
yield utils.becomeAnonymous()
yield postNewVersion({ name: 'Article name', body: 'New body' }, 401)
articles = yield Article.find()
expect(articles.length).toBe(1)
done()
it 'notifies watchers of changes', utils.wrap (done) ->
sendwithus = require '../../../server/sendwithus'
spyOn(sendwithus.api, 'send').and.callFake (context, cb) ->
expect(context.email_id).toBe(sendwithus.templates.change_made_notify_watcher)
expect(context.recipient.address).toBe('PI:EMAIL:<EMAIL>END_PI')
done()
user = yield User({email: 'PI:EMAIL:<EMAIL>END_PI', name: 'a user'}).save()
article = yield Article.findById(articleID)
article.set('watchers', article.get('watchers').concat([user.get('_id')]))
yield article.save()
yield postNewVersion({ name: 'Article name', body: 'New body', commitMessage: 'Commit message' })
it 'sends a notification to artisan and main Slack channels', utils.wrap (done) ->
slack = require '../../../server/slack'
spyOn(slack, 'sendSlackMessage')
yield postNewVersion({ name: 'Article name', body: 'New body' })
expect(slack.sendSlackMessage).toHaveBeenCalled()
done()
describe 'version fetching endpoints', ->
articleData = { name: 'Original version', body: 'Article body' }
articleOriginal = null
postNewVersion = Promise.promisify (json, expectedStatus=201, done) ->
if _.isFunction(expectedStatus)
done = expectedStatus
expectedStatus = 201
url = getURL("/db/article/#{articleOriginal}/new-version")
request.post { uri: url, json: json }, (err, res) ->
expect(res.statusCode).toBe(expectedStatus)
done(err)
beforeEach utils.wrap (done) ->
yield utils.clearModels([Article])
@admin = yield utils.initAdmin({})
yield utils.loginUser(@admin)
[res, body] = yield request.postAsync { uri: getURL('/db/article'), json: articleData }
expect(res.statusCode).toBe(201)
articleOriginal = body._id
yield postNewVersion({ name: 'Latest minor version', body: 'New body', version: {major: 0} })
yield postNewVersion({ name: 'Latest major version', body: 'New new body' })
done()
describe 'GET /db/article/:handle/version/:version', ->
it 'returns the latest version for the given original article when :version is empty', utils.wrap (done) ->
[res, body] = yield request.getAsync { uri: getURL("/db/article/#{articleOriginal}/version"), json: true }
expect(body.name).toBe('Latest major version')
done()
it 'returns the latest of a given major version when :version is X', utils.wrap (done) ->
[res, body] = yield request.getAsync { uri: getURL("/db/article/#{articleOriginal}/version/0"), json: true }
expect(body.name).toBe('Latest minor version')
done()
it 'returns a specific version when :version is X.Y', utils.wrap (done) ->
[res, body] = yield request.getAsync { uri: getURL("/db/article/#{articleOriginal}/version/0.0"), json: true }
expect(body.name).toBe('Original version')
done()
it 'returns 422 when the original value is invalid', utils.wrap (done) ->
[res, body] = yield request.getAsync { uri: getURL('/db/article/dne/version'), json: true }
expect(res.statusCode).toBe(422)
done()
it 'returns 404 when the original value cannot be found', utils.wrap (done) ->
[res, body] = yield request.getAsync { uri: getURL('/db/article/012345678901234567890123/version'), json: true }
expect(res.statusCode).toBe(404)
done()
describe 'GET /db/article/:handle/versions', ->
it 'returns an array of versions sorted by creation for the given original article', utils.wrap (done) ->
[res, body] = yield request.getAsync { uri: getURL("/db/article/#{articleOriginal}/versions"), json: true }
expect(body.length).toBe(3)
expect(body[0].name).toBe('Latest major version')
expect(body[1].name).toBe('Latest minor version')
expect(body[2].name).toBe('Original version')
done()
it 'projects most properties by default', utils.wrap (done) ->
[res, body] = yield request.getAsync { uri: getURL("/db/article/#{articleOriginal}/versions"), json: true }
expect(body[0].body).toBeUndefined()
done()
describe 'GET /db/article/:handle/files', ->
it 'returns an array of file metadata for the given original article', utils.wrap (done) ->
yield utils.clearModels([Article])
articleData = { name: 'Article', body: 'Article' }
admin = yield utils.initAdmin({})
yield utils.loginUser(admin)
[res, article] = yield request.postAsync { uri: getURL('/db/article'), json: articleData }
expect(res.statusCode).toBe(201)
[res, body] = yield request.postAsync(getURL('/file'), { json: {
url: getURL('/assets/main.html')
filename: 'test.html'
path: 'db/article/'+article.original
mimetype: 'text/html'
}})
[res, body] = yield request.getAsync(getURL('/db/article/'+article.original+'/files'), {json: true})
expect(body.length).toBe(1)
expect(body[0].filename).toBe('test.html')
expect(body[0].metadata.path).toBe('db/article/'+article.original)
done()
describe 'GET and POST /db/article/:handle/names', ->
articleData1 = { name: 'Article 1', body: 'Article 1 body' }
articleData2 = { name: 'Article 2', body: 'Article 2 body' }
it 'returns an object mapping ids to names', utils.wrap (done) ->
yield utils.clearModels([Article])
admin = yield utils.initAdmin({})
yield utils.loginUser(admin)
[res, article1] = yield request.postAsync(getURL('/db/article'), { json: articleData1 })
[res, article2] = yield request.postAsync(getURL('/db/article'), { json: articleData2 })
yield utils.becomeAnonymous()
[res, body] = yield request.getAsync { uri: getURL('/db/article/names?ids='+[article1._id, article2._id].join(',')), json: true }
expect(body.length).toBe(2)
expect(body[0].name).toBe('Article 1')
[res, body] = yield request.postAsync { uri: getURL('/db/article/names?ids='+[article1._id, article2._id].join(',')), json: true }
expect(body.length).toBe(2)
expect(body[0].name).toBe('Article 1')
done()
describe 'GET /db/article/:handle/patches', ->
it 'returns pending patches for the given original article', utils.wrap (done) ->
yield utils.clearModels([Article])
articleData = { name: 'Article', body: 'Article' }
admin = yield utils.initAdmin({})
yield utils.loginUser(admin)
[res, article] = yield request.postAsync { uri: getURL('/db/article'), json: articleData }
expect(res.statusCode).toBe(201)
[res, patch] = yield request.postAsync { uri: getURL('/db/patch'), json: {
delta: {name:['test']}
commitMessage: 'Test commit'
target: {
collection: 'article'
id: article._id
}
}}
expect(res.statusCode).toBe(201)
[res, patches] = yield request.getAsync getURL("/db/article/#{article._id}/patches"), { json: true }
expect(res.statusCode).toBe(200)
expect(patches.length).toBe(1)
expect(patches[0]._id).toBe(patch._id)
done()
it 'returns 404 for handles that do not exist', utils.wrap (done) ->
[res, body] = yield request.getAsync getURL("/db/article/dne/patches"), { json: true }
expect(res.statusCode).toBe(404)
done()
describe 'POST /db/article/:handle/watchers', ->
it 'adds self to the list of watchers, and is idempotent', utils.wrap (done) ->
# create article
yield utils.clearModels([Article])
articleData = { name: 'Article', body: 'Article' }
admin = yield utils.initAdmin({})
yield utils.loginUser(admin)
[res, article] = yield request.postAsync { uri: getURL('/db/article'), json: articleData }
expect(res.statusCode).toBe(201)
# add new user as watcher
yield utils.logout()
user = yield utils.initUser()
yield utils.loginUser(user)
[res, article] = yield request.postAsync { uri: getURL("/db/article/#{article._id}/watchers"), json: true }
expect(res.statusCode).toBe(200)
expect(_.contains(article.watchers, user.id)).toBe(true)
# check idempotence, db
numWatchers = article.watchers.length
[res, article] = yield request.postAsync { uri: getURL("/db/article/#{article._id}/watchers"), json: true }
expect(res.statusCode).toBe(200)
expect(numWatchers).toBe(article.watchers.length)
article = yield Article.findById(article._id)
expect(_.last(article.get('watchers')).toString()).toBe(user.id)
done()
describe 'DELETE /db/article/:handle/watchers', ->
it 'removes self from the list of watchers, and is idempotent', utils.wrap (done) ->
# create article
yield utils.clearModels([Article])
articleData = { name: 'Article', body: 'Article' }
admin = yield utils.initAdmin({})
yield utils.loginUser(admin)
[res, article] = yield request.postAsync { uri: getURL('/db/article'), json: articleData }
expect(res.statusCode).toBe(201)
# add new user as watcher
yield utils.logout()
user = yield utils.initUser()
yield utils.loginUser(user)
[res, article] = yield request.postAsync { uri: getURL("/db/article/#{article._id}/watchers"), json: true }
expect(_.contains(article.watchers, user.id)).toBe(true)
# remove user as watcher
[res, article] = yield request.delAsync { uri: getURL("/db/article/#{article._id}/watchers"), json: true }
expect(res.statusCode).toBe(200)
expect(_.contains(article.watchers, user.id)).toBe(false)
# check idempotence, db
numWatchers = article.watchers.length
[res, article] = yield request.delAsync { uri: getURL("/db/article/#{article._id}/watchers"), json: true }
expect(res.statusCode).toBe(200)
expect(numWatchers).toBe(article.watchers.length)
article = yield Article.findById(article._id)
ids = (id.toString() for id in article.get('watchers'))
expect(_.contains(ids, user.id)).toBe(false)
done()
describe 'POST /db/article/:handle/patch', ->
it 'creates a new version if the changes are translation only', utils.wrap (done) ->
# create article
yield utils.clearModels([Article])
admin = yield utils.initAdmin()
yield utils.loginUser(admin)
article = yield utils.makeArticle()
# submit a translation patch
user = yield utils.initUser()
yield utils.loginUser(user)
originalArticle = article.toObject()
changedArticle = _.extend({}, originalArticle, {i18n: {de: {name: 'Name in German'}}})
json = {
delta: jsondiffpatch.diff(originalArticle, changedArticle)
commitMessage: 'Server test commit'
target: {
collection: 'article'
id: article.id
}
}
url = utils.getURL("/db/article/#{article.id}/patch")
[res, body] = yield request.postAsync({ url, json })
expect(res.statusCode).toBe(201)
[firstArticle, secondArticle] = yield Article.find().sort('_id')
expectedVersion = { isLatestMinor: false, isLatestMajor: false, minor: 0, major: 0 }
expect(_.isEqual(firstArticle.get('version'), expectedVersion)).toBe(true)
expectedVersion = { isLatestMinor: true, isLatestMajor: true, minor: 1, major: 0 }
expect(_.isEqual(secondArticle.get('version'), expectedVersion)).toBe(true)
expect(firstArticle.get('i18n.de.name')).toBe(undefined)
expect(secondArticle.get('i18n.de.name')).toBe('Name in German')
expect(firstArticle.get('creator').equals(admin._id)).toBe(true)
expect(secondArticle.get('creator').equals(user._id)).toBe(true)
expect(firstArticle.get('commitMessage')).toBeUndefined()
expect(secondArticle.get('commitMessage')).toBe('Server test commit')
done()
|
[
{
"context": "core\"\n\nclass URLHandler\n GOOGLE_PUBLIC_API_KEY: \"AIzaSyAtc-2VrI5oKvel7KWTqviHD6LaguPGw70\"\n\n @_shortKeys:\n \"type\": \"T\"\n \"easing\": \"e",
"end": 129,
"score": 0.9997333884239197,
"start": 90,
"tag": "KEY",
"value": "AIzaSyAtc-2VrI5oKvel7KWTqviHD6LaguPGw70"
... | app/scripts/urlhandler.coffee | jaymicrocode/bounce.js | 3,683 | $ = require "jquery"
_ = require "underscore"
class URLHandler
GOOGLE_PUBLIC_API_KEY: "AIzaSyAtc-2VrI5oKvel7KWTqviHD6LaguPGw70"
@_shortKeys:
"type": "T"
"easing": "e"
"duration": "d"
"delay": "D"
"from": "f"
"to": "t"
"bounces": "b"
"stiffness": "s"
@_shortValues:
"bounce": "b"
"sway": "s"
"hardbounce": "B"
"hardsway": "S"
"scale": "c"
"skew": "k"
"translate": "t"
"rotate": "r"
@_longKeys: _.invert URLHandler._shortKeys
@_longValues: _.invert URLHandler._shortValues
encodeURL: (serialized, options = {}) ->
encoded = {}
encoded.l = 1 if options.loop
encoded.s = for opts in serialized
shortKeys = {}
for key, value of opts
shortKeys[URLHandler._shortKeys[key] or key] =
URLHandler._shortValues[value] or value
shortKeys
stringified = JSON.stringify(encoded)
# Remove double quotes in properties
stringified.replace(/(\{|,)"([a-z0-9]+)"(:)/gi, "$1$2$3")
decodeURL: (str) ->
try
str = decodeURIComponent str
catch e
# Add back the double quotes in properties
json = str.replace(/(\{|,)([a-z0-9]+)(:)/gi, "$1\"$2\"$3")
decoded = JSON.parse(json)
unshortened = for options in decoded.s
longKeys = {}
for key, value of options
longKeys[URLHandler._longKeys[key] or key] =
URLHandler._longValues[value] or value
longKeys
{
serialized: unshortened
loop: decoded.l
}
shorten: (url, options = {}) ->
ajaxOptions =
url: "https://www.googleapis.com/urlshortener/v1/url?key=#{@GOOGLE_PUBLIC_API_KEY}"
type: "POST"
data: JSON.stringify(
longUrl: "#{window.location.origin}##{encodeURIComponent(url)}")
dataType: "json"
contentType: "application/json; charset=utf-8"
$.ajax $.extend(ajaxOptions, options)
module.exports = new URLHandler | 71030 | $ = require "jquery"
_ = require "underscore"
class URLHandler
GOOGLE_PUBLIC_API_KEY: "<KEY>"
@_shortKeys:
"type": "T"
"easing": "e"
"duration": "d"
"delay": "D"
"from": "f"
"to": "t"
"bounces": "b"
"stiffness": "s"
@_shortValues:
"bounce": "b"
"sway": "s"
"hardbounce": "B"
"hardsway": "S"
"scale": "c"
"skew": "k"
"translate": "t"
"rotate": "r"
@_longKeys: _.invert URLHandler._shortKeys
@_longValues: _.invert URLHandler._shortValues
encodeURL: (serialized, options = {}) ->
encoded = {}
encoded.l = 1 if options.loop
encoded.s = for opts in serialized
shortKeys = {}
for key, value of opts
shortKeys[URLHandler._shortKeys[key] or key] =
URLHandler._shortValues[value] or value
shortKeys
stringified = JSON.stringify(encoded)
# Remove double quotes in properties
stringified.replace(/(\{|,)"([a-z0-9]+)"(:)/gi, "$1$2$3")
decodeURL: (str) ->
try
str = decodeURIComponent str
catch e
# Add back the double quotes in properties
json = str.replace(/(\{|,)([a-z0-9]+)(:)/gi, "$1\"$2\"$3")
decoded = JSON.parse(json)
unshortened = for options in decoded.s
longKeys = {}
for key, value of options
longKeys[URLHandler._longKeys[key] or key] =
URLHandler._longValues[value] or value
longKeys
{
serialized: unshortened
loop: decoded.l
}
shorten: (url, options = {}) ->
ajaxOptions =
url: "https://www.googleapis.com/urlshortener/v1/url?key=#{@GOOGLE_PUBLIC_API_KEY}"
type: "POST"
data: JSON.stringify(
longUrl: "#{window.location.origin}##{encodeURIComponent(url)}")
dataType: "json"
contentType: "application/json; charset=utf-8"
$.ajax $.extend(ajaxOptions, options)
module.exports = new URLHandler | true | $ = require "jquery"
_ = require "underscore"
class URLHandler
GOOGLE_PUBLIC_API_KEY: "PI:KEY:<KEY>END_PI"
@_shortKeys:
"type": "T"
"easing": "e"
"duration": "d"
"delay": "D"
"from": "f"
"to": "t"
"bounces": "b"
"stiffness": "s"
@_shortValues:
"bounce": "b"
"sway": "s"
"hardbounce": "B"
"hardsway": "S"
"scale": "c"
"skew": "k"
"translate": "t"
"rotate": "r"
@_longKeys: _.invert URLHandler._shortKeys
@_longValues: _.invert URLHandler._shortValues
encodeURL: (serialized, options = {}) ->
encoded = {}
encoded.l = 1 if options.loop
encoded.s = for opts in serialized
shortKeys = {}
for key, value of opts
shortKeys[URLHandler._shortKeys[key] or key] =
URLHandler._shortValues[value] or value
shortKeys
stringified = JSON.stringify(encoded)
# Remove double quotes in properties
stringified.replace(/(\{|,)"([a-z0-9]+)"(:)/gi, "$1$2$3")
decodeURL: (str) ->
try
str = decodeURIComponent str
catch e
# Add back the double quotes in properties
json = str.replace(/(\{|,)([a-z0-9]+)(:)/gi, "$1\"$2\"$3")
decoded = JSON.parse(json)
unshortened = for options in decoded.s
longKeys = {}
for key, value of options
longKeys[URLHandler._longKeys[key] or key] =
URLHandler._longValues[value] or value
longKeys
{
serialized: unshortened
loop: decoded.l
}
shorten: (url, options = {}) ->
ajaxOptions =
url: "https://www.googleapis.com/urlshortener/v1/url?key=#{@GOOGLE_PUBLIC_API_KEY}"
type: "POST"
data: JSON.stringify(
longUrl: "#{window.location.origin}##{encodeURIComponent(url)}")
dataType: "json"
contentType: "application/json; charset=utf-8"
$.ajax $.extend(ajaxOptions, options)
module.exports = new URLHandler |
[
{
"context": "ue: 3\n argument_3:\n value: [ 'Smith', 'Johnson', 'Jackson' ]\n ]\n ]\n\n i",
"end": 1821,
"score": 0.9997161626815796,
"start": 1816,
"tag": "NAME",
"value": "Smith"
},
{
"context": " argument_3:\n value: [ 'Smith', ... | test/callback_ordered.coffee | mpj/beautiful-lies | 1 | chai = require 'chai'
should = chai.should()
expect = chai.expect
after = require('fluent-time').after
beautiful = require '../src/beautiful-lies'
# TODO: Add support for simultaneous calls of another (of) callback
# and the handler callback
#
#
describe 'Ordered callback', ->
liar = null
beforeEach -> beautiful.lie()
afterEach -> delete Object.prototype.lie
describe 'Runs callback', ->
beforeEach ->
liar = {}
liar.lie [
function_name: 'connect'
run_callback: [
argument_2:
value: 'connected'
]
]
it 'should run callback', (done) ->
liar.connect (err, status) ->
status.should.equal 'connected'
done()
describe 'Runs callback (result object instead of array of result object)', ->
beforeEach ->
liar = {}
liar.lie [
function_name: 'connect'
run_callback:
argument_2:
value: 'connected'
]
it 'should run callback', (done) ->
liar.connect (err, status) ->
status.should.equal 'connected'
done()
describe 'Runs callback with error arguments', ->
beforeEach ->
liar = {}
liar.lie [
function_name: 'query'
run_callback: [
argument_1:
value:
message: 'Your query was malformed!'
]
]
it 'should run callback with correct arguments', (done) ->
liar.query (err, status) ->
err.message.should.equal 'Your query was malformed!'
done()
describe 'Runs callback with dual arguments', ->
beforeEach ->
liar = {}
liar.lie [
function_name: 'query'
run_callback: [
argument_2:
value: 3
argument_3:
value: [ 'Smith', 'Johnson', 'Jackson' ]
]
]
it 'should run callback with correct arguments', (done) ->
liar.query (err, pages, result) ->
expect(err).to.be.null
pages.should.equal 3
result.should.deep.equal [ 'Smith', 'Johnson', 'Jackson' ]
done()
describe 'run_callback defined with no_arguments', ->
passedToCallback = null
beforeEach (done) ->
liar = {}
liar.lie [
function_name: 'query',
run_callback: [
no_arguments: true
]
]
liar.query () ->
passedToCallback = arguments
after(50).milliseconds -> done()
it 'runs callback without arguments', ->
passedToCallback.length.should.equal 0
describe 'Runs callback order', ->
it 'should call callbacks in turn', (done) ->
liar = {}
liar.lie [
function_name: 'query'
run_callback: [
{
argument_3:
value: 'ninjas'
},
{
argument_2:
value: 'pirates'
}
]
]
arr = []
liar.query (dummy1, dummy2, str) ->
arr.push str
arr.length.should.equal 0
after(60).milliseconds ->
arr[0].should.equal 'ninjas'
arr.length.should.equal 1
liar.query (dummy1, str) ->
arr.push str
after(60).milliseconds ->
arr[1].should.equal 'pirates'
done()
it 'should work with multiple expectations', (done) ->
liar = {}
liar.lie [
{
function_name: 'count'
run_callback: [
{
argument_1:
value: 'one'
},
{
argument_1:
value: 'two'
}
]
},{
function_name: 'bark',
run_callback: [
{
argument_1:
value: 'woof!'
},
{
argument_1:
value: 'ruff!'
}
]
}
]
liar.count (result) ->
result.should.equal 'one'
liar.bark (result) ->
result.should.equal 'woof!'
liar.count (result) ->
result.should.equal 'two'
liar.bark (result) ->
result.should.equal 'ruff!'
done()
describe 'yeild delay', ->
describe 'when running callback without any delay specified', ->
liar = null
result = null
beforeEach (done) ->
liar = {}
liar.lie [
function_name: 'query'
run_callback: [
{
argument_1:
value: '47 ninjas'
}
]
]
liar.query (r) -> result = r
done()
it 'should not have called back after 49ms', (done) ->
after(10).milliseconds ->
should.not.exist result
done()
it 'should have callbed back after 50ms', (done) ->
after(50).milliseconds ->
result.should.equal '47 ninjas'
done()
describe 'when calling back with a delay of 237 ms', () ->
liar = null
result = null
beforeEach (done) ->
liar = {}
liar.lie [
function_name: 'query'
run_callback: [
{
argument_1:
value: '49 ninjas'
delay: 237
}
]
]
liar.query (r) ->
result = r
done()
it 'should not have called back after 236ms', (done) ->
after(235).milliseconds ->
should.not.exist result
done()
it 'should have called back after 237ms', (done) ->
after(237).milliseconds ->
result.should.equal '49 ninjas'
done()
describe 'run_callback has an "of" property', (done) ->
onLoadResult = null
onErrorResult = null
yielded = null
liar = null
beforeEach ->
onLoadResult = null
onErrorResult = null
yielded = null
liar = {}
describe 'and has one event listener', ->
beforeEach (done) ->
liar.lie [
{
function_name: 'addEventListener'
}, {
function_name: 'loadStuff'
run_callback:
of:
function_name: 'addEventListener'
arguments: [ 'onLoad' ]
argument_2:
value: 'This is a result!'
}
]
liar.addEventListener 'onLoad', (error, result) ->
yielded = result
liar.loadStuff()
after(100).milliseconds -> done()
it 'executes addEventListener callback', ->
yielded.should.equal 'This is a result!'
describe 'and has multiple event listeners', ->
beforeEach ->
liar.lie [
{
function_name: 'addEventListener'
arguments: [ 'onLoad' ]
}, {
function_name: 'addEventListener'
arguments: [ 'onError' ]
}, {
function_name: 'loadStuff'
run_callback: [
{
of:
function_name: 'addEventListener'
arguments: [ 'onLoad' ]
argument_1:
value: 'This is a result!'
},{
of:
function_name: 'addEventListener'
arguments: [ 'onError' ]
argument_1:
value: 'This is an error!'
}
]
}
]
liar.addEventListener 'onLoad', (result) -> onLoadResult = result
liar.addEventListener 'onError', (result) -> onErrorResult = result
describe 'when calling loadStuff the first time', ->
beforeEach (done) ->
liar.loadStuff()
after(60).milliseconds -> done()
it 'gets the result', ->
onLoadResult.should.equal 'This is a result!'
it 'and does not get an error', ->
expect(onErrorResult).to.equal null
describe 'but when calling it a second time', ->
beforeEach (done) ->
liar.loadStuff()
after(100).milliseconds -> done()
it 'gets the error', ->
onErrorResult.should.equal 'This is an error!'
describe 'and defines a single argument (as opposed to array)', ->
beforeEach ->
liar.lie [
{
function_name: 'addEventListener'
arguments: [ 'onResult' ]
},{
function_name: 'loadThings'
run_callback: {
of:
function_name: 'addEventListener'
arguments: 'onResult' # <- Look ma, no []!
argument_1:
value: 'This is a result!'
}
}
]
describe 'and loads things', ->
beforeEach (done) ->
liar.addEventListener 'onResult', (result) -> onLoadResult = result
liar.loadThings()
after(52).milliseconds -> done()
it 'should get the correct result', ->
onLoadResult.should.equal 'This is a result!'
# TODO certain arguments
it 'should support on_value for callback arguments', (done) ->
liar = {}
liar.lie [
function_name: 'connect'
run_callback: [
argument_2:
value:
status: 'open'
on_value: [
function_name: 'query'
returns:
value:
size: 72
]
]
]
liar.connect (err, connection) ->
connection.status.should.equal 'open'
connection.query().size.should.equal 72
done()
it 'should treat simple objects to on_value the same way as an array with 1 item', (done) ->
liar = {}
liar.lie [
function_name: 'connect'
run_callback: [
argument_2:
value:
status: 'open'
on_value: # Same as the above test, but with no array
# passed to on_value.
function_name: 'query'
returns:
value:
size: 72
]
]
liar.connect (err, connection) ->
connection.status.should.equal 'open'
connection.query().size.should.equal 72
done()
describe 'Syntax checking', ->
liar = null
beforeEach -> liar = {}
it 'should have a nice warning when too few callbacks', ->
(->
liar.lie [
function_name: 'kaboom'
run_callback: [
{
argument_1:
value: 'bam!'
},
{
argument_1:
value: 'boom!'
}
]
]
liar.kaboom(()->)
liar.kaboom() # Doesn't have a callback, but should still count.
liar.kaboom(()->)
).should.throw 'kaboom was called 3 times, but only defined 2 run_callback.'
it 'should not display the nice warning when there is only a single callback result', (done) ->
liar.lie [
function_name: 'shoot',
run_callback: [{
argument_1:
value: 'pew!'
}]
]
liar.shoot (res) ->
res.should.equal 'pew!'
liar.shoot (res) ->
res.should.equal 'pew!'
done()
it 'should validate that arguments is an array (on_value)', ->
(->
liar.lie [
function_name: 'birth'
returns:
value: {}
on_value: [
function_name: 'bark'
arguments: 7
returns:
value: 'Yiff!'
]
]
liar.birth().bark(7)
).should.throw 'arguments must be of type Array.'
it 'should validate function_name of on_value', ->
(->
liar.lie [
function_name: 'do_stuff'
returns:
value: {}
on_value: [
functionn_name: "do_something_else" # misspelled
]
]
liar.do_stuff()
).should.throw 'expectation must have property "function_name"'
it 'should verify that of is an object (string)', ->
(->
liar.lie [
function_name: 'do_things'
run_callback:
of: 'otherFunction'
]
liar.do_things()
).should.throw 'run_callback.of property was set to "otherFunction" - must be an object.'
it 'should verify that of is an object (number)', ->
(->
liar.lie [
function_name: 'hello'
run_callback:
of: 871
]
liar.hello()
).should.throw 'run_callback.of property was set to "871" - must be an object.'
it 'should throw pretty error message if an of command does\'nt match any callback', ->
(->
liar.lie [{
function_name: 'addEventListener'
arguments: [ 'onLoad' ]
},{
function_name: 'runLoader'
run_callback:
of:
function_name: 'addEventListener'
arguments: ['onload'] # <- OOPS, a misspelling!
}
]
liar.addEventListener 'onLoad', ->
liar.runLoader()
).should.throw 'Tried to run callback provided to addEventListener along with arguments [ onload ], but didn\'t find any. Did you misspell function_name or arguments, or perhaps the callback was never passed to addEventListener?'
it 'should throw a nice error message if too broad a match', ->
(->
liar.lie [
{
function_name: 'addEventListener'
arguments: [ 'onLoad' ]
},
{
function_name: 'addEventListener'
arguments: [ 'onError' ]
}
,{
function_name: 'doQuery'
run_callback:
of:
function_name: 'addEventListener'
# Oooops, no arguments here!
}
]
liar.addEventListener 'onLoad', -> console.log("oh hai")
liar.addEventListener 'onError', -> console.log("oh hai")
liar.doQuery()
).should.throw 'Tried to run callback provided to addEventListener, but I had multiple choices and could not guess which one was right. You need to provide run_callback.of.arguments.'
describe 'when liar expects a loadProperty with a property_xxx property', ->
liar = {}
beforeEach ->
liar.lie
function_name: 'loadProperty'
arguments: [ 'albumCount' ]
run_callback:
property_albumCount:
value: 56
describe 'and loadProperty is called', ->
beforeEach (done) ->
liar.loadProperty 'albumCount'
setTimeout done, 51
it 'should assign albumCount', ->
liar.albumCount.should.equal 56
describe 'when property_xxx is combined with arguments_x', ->
liar = null
beforeEach ->
liar = {}
liar.lie
function_name: 'loadProperty'
arguments: [ 'artistName' ]
run_callback:
property_artistName:
value: 'Led Zeppelin'
argument_1:
value:
name: 'Led Zeppelin',
type: 'artist'
it 'should assign property before running callback', (done) ->
liar.loadProperty 'artistName', (data) ->
liar.artistName.should.equal 'Led Zeppelin'
data.name.should.equal 'Led Zeppelin'
data.type.should.equal 'artist'
done()
| 146622 | chai = require 'chai'
should = chai.should()
expect = chai.expect
after = require('fluent-time').after
beautiful = require '../src/beautiful-lies'
# TODO: Add support for simultaneous calls of another (of) callback
# and the handler callback
#
#
describe 'Ordered callback', ->
liar = null
beforeEach -> beautiful.lie()
afterEach -> delete Object.prototype.lie
describe 'Runs callback', ->
beforeEach ->
liar = {}
liar.lie [
function_name: 'connect'
run_callback: [
argument_2:
value: 'connected'
]
]
it 'should run callback', (done) ->
liar.connect (err, status) ->
status.should.equal 'connected'
done()
describe 'Runs callback (result object instead of array of result object)', ->
beforeEach ->
liar = {}
liar.lie [
function_name: 'connect'
run_callback:
argument_2:
value: 'connected'
]
it 'should run callback', (done) ->
liar.connect (err, status) ->
status.should.equal 'connected'
done()
describe 'Runs callback with error arguments', ->
beforeEach ->
liar = {}
liar.lie [
function_name: 'query'
run_callback: [
argument_1:
value:
message: 'Your query was malformed!'
]
]
it 'should run callback with correct arguments', (done) ->
liar.query (err, status) ->
err.message.should.equal 'Your query was malformed!'
done()
describe 'Runs callback with dual arguments', ->
beforeEach ->
liar = {}
liar.lie [
function_name: 'query'
run_callback: [
argument_2:
value: 3
argument_3:
value: [ '<NAME>', '<NAME>', '<NAME>' ]
]
]
it 'should run callback with correct arguments', (done) ->
liar.query (err, pages, result) ->
expect(err).to.be.null
pages.should.equal 3
result.should.deep.equal [ '<NAME>', '<NAME>', '<NAME>' ]
done()
describe 'run_callback defined with no_arguments', ->
passedToCallback = null
beforeEach (done) ->
liar = {}
liar.lie [
function_name: 'query',
run_callback: [
no_arguments: true
]
]
liar.query () ->
passedToCallback = arguments
after(50).milliseconds -> done()
it 'runs callback without arguments', ->
passedToCallback.length.should.equal 0
describe 'Runs callback order', ->
it 'should call callbacks in turn', (done) ->
liar = {}
liar.lie [
function_name: 'query'
run_callback: [
{
argument_3:
value: 'ninjas'
},
{
argument_2:
value: 'pirates'
}
]
]
arr = []
liar.query (dummy1, dummy2, str) ->
arr.push str
arr.length.should.equal 0
after(60).milliseconds ->
arr[0].should.equal 'ninjas'
arr.length.should.equal 1
liar.query (dummy1, str) ->
arr.push str
after(60).milliseconds ->
arr[1].should.equal 'pirates'
done()
it 'should work with multiple expectations', (done) ->
liar = {}
liar.lie [
{
function_name: 'count'
run_callback: [
{
argument_1:
value: 'one'
},
{
argument_1:
value: 'two'
}
]
},{
function_name: 'bark',
run_callback: [
{
argument_1:
value: 'woof!'
},
{
argument_1:
value: 'ruff!'
}
]
}
]
liar.count (result) ->
result.should.equal 'one'
liar.bark (result) ->
result.should.equal 'woof!'
liar.count (result) ->
result.should.equal 'two'
liar.bark (result) ->
result.should.equal 'ruff!'
done()
describe 'yeild delay', ->
describe 'when running callback without any delay specified', ->
liar = null
result = null
beforeEach (done) ->
liar = {}
liar.lie [
function_name: 'query'
run_callback: [
{
argument_1:
value: '47 n<NAME>'
}
]
]
liar.query (r) -> result = r
done()
it 'should not have called back after 49ms', (done) ->
after(10).milliseconds ->
should.not.exist result
done()
it 'should have callbed back after 50ms', (done) ->
after(50).milliseconds ->
result.should.equal '47 ninjas'
done()
describe 'when calling back with a delay of 237 ms', () ->
liar = null
result = null
beforeEach (done) ->
liar = {}
liar.lie [
function_name: 'query'
run_callback: [
{
argument_1:
value: '49 ninjas'
delay: 237
}
]
]
liar.query (r) ->
result = r
done()
it 'should not have called back after 236ms', (done) ->
after(235).milliseconds ->
should.not.exist result
done()
it 'should have called back after 237ms', (done) ->
after(237).milliseconds ->
result.should.equal '49 ninjas'
done()
describe 'run_callback has an "of" property', (done) ->
onLoadResult = null
onErrorResult = null
yielded = null
liar = null
beforeEach ->
onLoadResult = null
onErrorResult = null
yielded = null
liar = {}
describe 'and has one event listener', ->
beforeEach (done) ->
liar.lie [
{
function_name: 'addEventListener'
}, {
function_name: 'loadStuff'
run_callback:
of:
function_name: 'addEventListener'
arguments: [ 'onLoad' ]
argument_2:
value: 'This is a result!'
}
]
liar.addEventListener 'onLoad', (error, result) ->
yielded = result
liar.loadStuff()
after(100).milliseconds -> done()
it 'executes addEventListener callback', ->
yielded.should.equal 'This is a result!'
describe 'and has multiple event listeners', ->
beforeEach ->
liar.lie [
{
function_name: 'addEventListener'
arguments: [ 'onLoad' ]
}, {
function_name: 'addEventListener'
arguments: [ 'onError' ]
}, {
function_name: 'loadStuff'
run_callback: [
{
of:
function_name: 'addEventListener'
arguments: [ 'onLoad' ]
argument_1:
value: 'This is a result!'
},{
of:
function_name: 'addEventListener'
arguments: [ 'onError' ]
argument_1:
value: 'This is an error!'
}
]
}
]
liar.addEventListener 'onLoad', (result) -> onLoadResult = result
liar.addEventListener 'onError', (result) -> onErrorResult = result
describe 'when calling loadStuff the first time', ->
beforeEach (done) ->
liar.loadStuff()
after(60).milliseconds -> done()
it 'gets the result', ->
onLoadResult.should.equal 'This is a result!'
it 'and does not get an error', ->
expect(onErrorResult).to.equal null
describe 'but when calling it a second time', ->
beforeEach (done) ->
liar.loadStuff()
after(100).milliseconds -> done()
it 'gets the error', ->
onErrorResult.should.equal 'This is an error!'
describe 'and defines a single argument (as opposed to array)', ->
beforeEach ->
liar.lie [
{
function_name: 'addEventListener'
arguments: [ 'onResult' ]
},{
function_name: 'loadThings'
run_callback: {
of:
function_name: 'addEventListener'
arguments: 'onResult' # <- Look ma, no []!
argument_1:
value: 'This is a result!'
}
}
]
describe 'and loads things', ->
beforeEach (done) ->
liar.addEventListener 'onResult', (result) -> onLoadResult = result
liar.loadThings()
after(52).milliseconds -> done()
it 'should get the correct result', ->
onLoadResult.should.equal 'This is a result!'
# TODO certain arguments
it 'should support on_value for callback arguments', (done) ->
liar = {}
liar.lie [
function_name: 'connect'
run_callback: [
argument_2:
value:
status: 'open'
on_value: [
function_name: 'query'
returns:
value:
size: 72
]
]
]
liar.connect (err, connection) ->
connection.status.should.equal 'open'
connection.query().size.should.equal 72
done()
it 'should treat simple objects to on_value the same way as an array with 1 item', (done) ->
liar = {}
liar.lie [
function_name: 'connect'
run_callback: [
argument_2:
value:
status: 'open'
on_value: # Same as the above test, but with no array
# passed to on_value.
function_name: 'query'
returns:
value:
size: 72
]
]
liar.connect (err, connection) ->
connection.status.should.equal 'open'
connection.query().size.should.equal 72
done()
describe 'Syntax checking', ->
liar = null
beforeEach -> liar = {}
it 'should have a nice warning when too few callbacks', ->
(->
liar.lie [
function_name: 'kaboom'
run_callback: [
{
argument_1:
value: 'bam!'
},
{
argument_1:
value: 'boom!'
}
]
]
liar.kaboom(()->)
liar.kaboom() # Doesn't have a callback, but should still count.
liar.kaboom(()->)
).should.throw 'kaboom was called 3 times, but only defined 2 run_callback.'
it 'should not display the nice warning when there is only a single callback result', (done) ->
liar.lie [
function_name: 'shoot',
run_callback: [{
argument_1:
value: 'pew!'
}]
]
liar.shoot (res) ->
res.should.equal 'pew!'
liar.shoot (res) ->
res.should.equal 'pew!'
done()
it 'should validate that arguments is an array (on_value)', ->
(->
liar.lie [
function_name: 'birth'
returns:
value: {}
on_value: [
function_name: 'bark'
arguments: 7
returns:
value: 'Yiff!'
]
]
liar.birth().bark(7)
).should.throw 'arguments must be of type Array.'
it 'should validate function_name of on_value', ->
(->
liar.lie [
function_name: 'do_stuff'
returns:
value: {}
on_value: [
functionn_name: "do_something_else" # misspelled
]
]
liar.do_stuff()
).should.throw 'expectation must have property "function_name"'
it 'should verify that of is an object (string)', ->
(->
liar.lie [
function_name: 'do_things'
run_callback:
of: 'otherFunction'
]
liar.do_things()
).should.throw 'run_callback.of property was set to "otherFunction" - must be an object.'
it 'should verify that of is an object (number)', ->
(->
liar.lie [
function_name: 'hello'
run_callback:
of: 871
]
liar.hello()
).should.throw 'run_callback.of property was set to "871" - must be an object.'
it 'should throw pretty error message if an of command does\'nt match any callback', ->
(->
liar.lie [{
function_name: 'addEventListener'
arguments: [ 'onLoad' ]
},{
function_name: 'runLoader'
run_callback:
of:
function_name: 'addEventListener'
arguments: ['onload'] # <- OOPS, a misspelling!
}
]
liar.addEventListener 'onLoad', ->
liar.runLoader()
).should.throw 'Tried to run callback provided to addEventListener along with arguments [ onload ], but didn\'t find any. Did you misspell function_name or arguments, or perhaps the callback was never passed to addEventListener?'
it 'should throw a nice error message if too broad a match', ->
(->
liar.lie [
{
function_name: 'addEventListener'
arguments: [ 'onLoad' ]
},
{
function_name: 'addEventListener'
arguments: [ 'onError' ]
}
,{
function_name: 'doQuery'
run_callback:
of:
function_name: 'addEventListener'
# Oooops, no arguments here!
}
]
liar.addEventListener 'onLoad', -> console.log("oh hai")
liar.addEventListener 'onError', -> console.log("oh hai")
liar.doQuery()
).should.throw 'Tried to run callback provided to addEventListener, but I had multiple choices and could not guess which one was right. You need to provide run_callback.of.arguments.'
describe 'when liar expects a loadProperty with a property_xxx property', ->
liar = {}
beforeEach ->
liar.lie
function_name: 'loadProperty'
arguments: [ 'albumCount' ]
run_callback:
property_albumCount:
value: 56
describe 'and loadProperty is called', ->
beforeEach (done) ->
liar.loadProperty 'albumCount'
setTimeout done, 51
it 'should assign albumCount', ->
liar.albumCount.should.equal 56
describe 'when property_xxx is combined with arguments_x', ->
liar = null
beforeEach ->
liar = {}
liar.lie
function_name: 'loadProperty'
arguments: [ 'artistName' ]
run_callback:
property_artistName:
value: 'Led Zeppelin'
argument_1:
value:
name: '<NAME>',
type: 'artist'
it 'should assign property before running callback', (done) ->
liar.loadProperty 'artistName', (data) ->
liar.artistName.should.equal 'Led Zeppelin'
data.name.should.equal '<NAME> Zeppelin'
data.type.should.equal 'artist'
done()
| true | chai = require 'chai'
should = chai.should()
expect = chai.expect
after = require('fluent-time').after
beautiful = require '../src/beautiful-lies'
# TODO: Add support for simultaneous calls of another (of) callback
# and the handler callback
#
#
describe 'Ordered callback', ->
liar = null
beforeEach -> beautiful.lie()
afterEach -> delete Object.prototype.lie
describe 'Runs callback', ->
beforeEach ->
liar = {}
liar.lie [
function_name: 'connect'
run_callback: [
argument_2:
value: 'connected'
]
]
it 'should run callback', (done) ->
liar.connect (err, status) ->
status.should.equal 'connected'
done()
describe 'Runs callback (result object instead of array of result object)', ->
beforeEach ->
liar = {}
liar.lie [
function_name: 'connect'
run_callback:
argument_2:
value: 'connected'
]
it 'should run callback', (done) ->
liar.connect (err, status) ->
status.should.equal 'connected'
done()
describe 'Runs callback with error arguments', ->
beforeEach ->
liar = {}
liar.lie [
function_name: 'query'
run_callback: [
argument_1:
value:
message: 'Your query was malformed!'
]
]
it 'should run callback with correct arguments', (done) ->
liar.query (err, status) ->
err.message.should.equal 'Your query was malformed!'
done()
describe 'Runs callback with dual arguments', ->
beforeEach ->
liar = {}
liar.lie [
function_name: 'query'
run_callback: [
argument_2:
value: 3
argument_3:
value: [ 'PI:NAME:<NAME>END_PI', 'PI:NAME:<NAME>END_PI', 'PI:NAME:<NAME>END_PI' ]
]
]
it 'should run callback with correct arguments', (done) ->
liar.query (err, pages, result) ->
expect(err).to.be.null
pages.should.equal 3
result.should.deep.equal [ 'PI:NAME:<NAME>END_PI', 'PI:NAME:<NAME>END_PI', 'PI:NAME:<NAME>END_PI' ]
done()
describe 'run_callback defined with no_arguments', ->
passedToCallback = null
beforeEach (done) ->
liar = {}
liar.lie [
function_name: 'query',
run_callback: [
no_arguments: true
]
]
liar.query () ->
passedToCallback = arguments
after(50).milliseconds -> done()
it 'runs callback without arguments', ->
passedToCallback.length.should.equal 0
describe 'Runs callback order', ->
it 'should call callbacks in turn', (done) ->
liar = {}
liar.lie [
function_name: 'query'
run_callback: [
{
argument_3:
value: 'ninjas'
},
{
argument_2:
value: 'pirates'
}
]
]
arr = []
liar.query (dummy1, dummy2, str) ->
arr.push str
arr.length.should.equal 0
after(60).milliseconds ->
arr[0].should.equal 'ninjas'
arr.length.should.equal 1
liar.query (dummy1, str) ->
arr.push str
after(60).milliseconds ->
arr[1].should.equal 'pirates'
done()
it 'should work with multiple expectations', (done) ->
liar = {}
liar.lie [
{
function_name: 'count'
run_callback: [
{
argument_1:
value: 'one'
},
{
argument_1:
value: 'two'
}
]
},{
function_name: 'bark',
run_callback: [
{
argument_1:
value: 'woof!'
},
{
argument_1:
value: 'ruff!'
}
]
}
]
liar.count (result) ->
result.should.equal 'one'
liar.bark (result) ->
result.should.equal 'woof!'
liar.count (result) ->
result.should.equal 'two'
liar.bark (result) ->
result.should.equal 'ruff!'
done()
describe 'yeild delay', ->
describe 'when running callback without any delay specified', ->
liar = null
result = null
beforeEach (done) ->
liar = {}
liar.lie [
function_name: 'query'
run_callback: [
{
argument_1:
value: '47 nPI:NAME:<NAME>END_PI'
}
]
]
liar.query (r) -> result = r
done()
it 'should not have called back after 49ms', (done) ->
after(10).milliseconds ->
should.not.exist result
done()
it 'should have callbed back after 50ms', (done) ->
after(50).milliseconds ->
result.should.equal '47 ninjas'
done()
describe 'when calling back with a delay of 237 ms', () ->
liar = null
result = null
beforeEach (done) ->
liar = {}
liar.lie [
function_name: 'query'
run_callback: [
{
argument_1:
value: '49 ninjas'
delay: 237
}
]
]
liar.query (r) ->
result = r
done()
it 'should not have called back after 236ms', (done) ->
after(235).milliseconds ->
should.not.exist result
done()
it 'should have called back after 237ms', (done) ->
after(237).milliseconds ->
result.should.equal '49 ninjas'
done()
describe 'run_callback has an "of" property', (done) ->
onLoadResult = null
onErrorResult = null
yielded = null
liar = null
beforeEach ->
onLoadResult = null
onErrorResult = null
yielded = null
liar = {}
describe 'and has one event listener', ->
beforeEach (done) ->
liar.lie [
{
function_name: 'addEventListener'
}, {
function_name: 'loadStuff'
run_callback:
of:
function_name: 'addEventListener'
arguments: [ 'onLoad' ]
argument_2:
value: 'This is a result!'
}
]
liar.addEventListener 'onLoad', (error, result) ->
yielded = result
liar.loadStuff()
after(100).milliseconds -> done()
it 'executes addEventListener callback', ->
yielded.should.equal 'This is a result!'
describe 'and has multiple event listeners', ->
beforeEach ->
liar.lie [
{
function_name: 'addEventListener'
arguments: [ 'onLoad' ]
}, {
function_name: 'addEventListener'
arguments: [ 'onError' ]
}, {
function_name: 'loadStuff'
run_callback: [
{
of:
function_name: 'addEventListener'
arguments: [ 'onLoad' ]
argument_1:
value: 'This is a result!'
},{
of:
function_name: 'addEventListener'
arguments: [ 'onError' ]
argument_1:
value: 'This is an error!'
}
]
}
]
liar.addEventListener 'onLoad', (result) -> onLoadResult = result
liar.addEventListener 'onError', (result) -> onErrorResult = result
describe 'when calling loadStuff the first time', ->
beforeEach (done) ->
liar.loadStuff()
after(60).milliseconds -> done()
it 'gets the result', ->
onLoadResult.should.equal 'This is a result!'
it 'and does not get an error', ->
expect(onErrorResult).to.equal null
describe 'but when calling it a second time', ->
beforeEach (done) ->
liar.loadStuff()
after(100).milliseconds -> done()
it 'gets the error', ->
onErrorResult.should.equal 'This is an error!'
describe 'and defines a single argument (as opposed to array)', ->
beforeEach ->
liar.lie [
{
function_name: 'addEventListener'
arguments: [ 'onResult' ]
},{
function_name: 'loadThings'
run_callback: {
of:
function_name: 'addEventListener'
arguments: 'onResult' # <- Look ma, no []!
argument_1:
value: 'This is a result!'
}
}
]
describe 'and loads things', ->
beforeEach (done) ->
liar.addEventListener 'onResult', (result) -> onLoadResult = result
liar.loadThings()
after(52).milliseconds -> done()
it 'should get the correct result', ->
onLoadResult.should.equal 'This is a result!'
# TODO certain arguments
it 'should support on_value for callback arguments', (done) ->
liar = {}
liar.lie [
function_name: 'connect'
run_callback: [
argument_2:
value:
status: 'open'
on_value: [
function_name: 'query'
returns:
value:
size: 72
]
]
]
liar.connect (err, connection) ->
connection.status.should.equal 'open'
connection.query().size.should.equal 72
done()
it 'should treat simple objects to on_value the same way as an array with 1 item', (done) ->
liar = {}
liar.lie [
function_name: 'connect'
run_callback: [
argument_2:
value:
status: 'open'
on_value: # Same as the above test, but with no array
# passed to on_value.
function_name: 'query'
returns:
value:
size: 72
]
]
liar.connect (err, connection) ->
connection.status.should.equal 'open'
connection.query().size.should.equal 72
done()
describe 'Syntax checking', ->
liar = null
beforeEach -> liar = {}
it 'should have a nice warning when too few callbacks', ->
(->
liar.lie [
function_name: 'kaboom'
run_callback: [
{
argument_1:
value: 'bam!'
},
{
argument_1:
value: 'boom!'
}
]
]
liar.kaboom(()->)
liar.kaboom() # Doesn't have a callback, but should still count.
liar.kaboom(()->)
).should.throw 'kaboom was called 3 times, but only defined 2 run_callback.'
it 'should not display the nice warning when there is only a single callback result', (done) ->
liar.lie [
function_name: 'shoot',
run_callback: [{
argument_1:
value: 'pew!'
}]
]
liar.shoot (res) ->
res.should.equal 'pew!'
liar.shoot (res) ->
res.should.equal 'pew!'
done()
it 'should validate that arguments is an array (on_value)', ->
(->
liar.lie [
function_name: 'birth'
returns:
value: {}
on_value: [
function_name: 'bark'
arguments: 7
returns:
value: 'Yiff!'
]
]
liar.birth().bark(7)
).should.throw 'arguments must be of type Array.'
it 'should validate function_name of on_value', ->
(->
liar.lie [
function_name: 'do_stuff'
returns:
value: {}
on_value: [
functionn_name: "do_something_else" # misspelled
]
]
liar.do_stuff()
).should.throw 'expectation must have property "function_name"'
it 'should verify that of is an object (string)', ->
(->
liar.lie [
function_name: 'do_things'
run_callback:
of: 'otherFunction'
]
liar.do_things()
).should.throw 'run_callback.of property was set to "otherFunction" - must be an object.'
it 'should verify that of is an object (number)', ->
(->
liar.lie [
function_name: 'hello'
run_callback:
of: 871
]
liar.hello()
).should.throw 'run_callback.of property was set to "871" - must be an object.'
it 'should throw pretty error message if an of command does\'nt match any callback', ->
(->
liar.lie [{
function_name: 'addEventListener'
arguments: [ 'onLoad' ]
},{
function_name: 'runLoader'
run_callback:
of:
function_name: 'addEventListener'
arguments: ['onload'] # <- OOPS, a misspelling!
}
]
liar.addEventListener 'onLoad', ->
liar.runLoader()
).should.throw 'Tried to run callback provided to addEventListener along with arguments [ onload ], but didn\'t find any. Did you misspell function_name or arguments, or perhaps the callback was never passed to addEventListener?'
it 'should throw a nice error message if too broad a match', ->
(->
liar.lie [
{
function_name: 'addEventListener'
arguments: [ 'onLoad' ]
},
{
function_name: 'addEventListener'
arguments: [ 'onError' ]
}
,{
function_name: 'doQuery'
run_callback:
of:
function_name: 'addEventListener'
# Oooops, no arguments here!
}
]
liar.addEventListener 'onLoad', -> console.log("oh hai")
liar.addEventListener 'onError', -> console.log("oh hai")
liar.doQuery()
).should.throw 'Tried to run callback provided to addEventListener, but I had multiple choices and could not guess which one was right. You need to provide run_callback.of.arguments.'
describe 'when liar expects a loadProperty with a property_xxx property', ->
liar = {}
beforeEach ->
liar.lie
function_name: 'loadProperty'
arguments: [ 'albumCount' ]
run_callback:
property_albumCount:
value: 56
describe 'and loadProperty is called', ->
beforeEach (done) ->
liar.loadProperty 'albumCount'
setTimeout done, 51
it 'should assign albumCount', ->
liar.albumCount.should.equal 56
describe 'when property_xxx is combined with arguments_x', ->
liar = null
beforeEach ->
liar = {}
liar.lie
function_name: 'loadProperty'
arguments: [ 'artistName' ]
run_callback:
property_artistName:
value: 'Led Zeppelin'
argument_1:
value:
name: 'PI:NAME:<NAME>END_PI',
type: 'artist'
it 'should assign property before running callback', (done) ->
liar.loadProperty 'artistName', (data) ->
liar.artistName.should.equal 'Led Zeppelin'
data.name.should.equal 'PI:NAME:<NAME>END_PI Zeppelin'
data.type.should.equal 'artist'
done()
|
[
{
"context": "geset.vemail()]\n password: [\n totem_changeset.vpresence(presence: true, message: 'You ",
"end": 1232,
"score": 0.5457372665405273,
"start": 1232,
"tag": "PASSWORD",
"value": ""
},
{
"context": "Date().getTime()\n changeset.set('first_name', 'Test')\n... | src/thinkspace/client/thinkspace-user/addon/components/sign_up.coffee | sixthedge/cellar | 6 | import ember from 'ember'
import totem_changeset from 'totem/changeset'
import ns from 'totem/ns'
import config from 'totem-config/config'
import base from 'thinkspace-base/components/base'
export default base.extend
# # Properties
debug: config.autofill
authenticator: 'authenticator:totem'
# # Computed properties
token: ember.computed.reads 'query_params.token'
email: ember.computed.reads 'query_params.email'
invitable: ember.computed.reads 'query_params.invitable'
has_token: ember.computed.notEmpty 'token'
# {"roles"=>{"student"=>true, "instructor"=>false}}
# # Events
init_base: ->
@set_changeset()
set_changeset: ->
model = ember.Object.create(first_name: null, last_name: null, email: null, password: null)
changeset = totem_changeset.create model,
first_name: [totem_changeset.vpresence(presence: true, message: 'You must enter a first name')]
last_name: [totem_changeset.vpresence(presence: true, message: 'You must enter a last name')]
email: [totem_changeset.vpresence(presence: true, message: 'You must enter an email address'), totem_changeset.vemail()]
password: [
totem_changeset.vpresence(presence: true, message: 'You must enter a password'),
totem_changeset.vlength(min: 8, message: 'Your password must be at least 8 characters long.')
]
roles: [totem_changeset.vpresence(presence: true, message: 'You must select your role')]
@set_debug_changeset(changeset)
changeset.set('email', @get('email')) if @get('has_token') and @get('email')
@set('changeset', changeset)
set_debug_changeset: (changeset) ->
return unless @get('debug')
time = new Date().getTime()
changeset.set('first_name', 'Test')
changeset.set('last_name', time)
changeset.set('email', "#{time}@sixthedge.com")
changeset.set('password', 'password')
authenticate: (user) ->
changeset = @get('changeset')
data = {identification: changeset.get('email'), password: changeset.get('password')}
@set_loading('authenticating')
@get('session').authenticate(@get('authenticator'), data).then =>
# Reset password values so they're not lingering.
user.set('password', null)
changeset.set('password', null)
@reset_loading('authenticating')
@totem_messages.info "Sign in successful!"
, (error) =>
@reset_loading('authenticating')
changeset.show_errors_off()
message = error.responseText or 'Email or password incorrect'
@totem_messages.error message
actions:
submit: ->
changeset = @get('changeset')
changeset.validate().then =>
is_valid = changeset.get('is_valid')
if is_valid
user = @totem_scope.get_store().createRecord ns.to_p('user'),
first_name: changeset.get('first_name')
last_name: changeset.get('last_name')
email: changeset.get('email')
password: changeset.get('password')
profile:
roles: changeset.get('roles')
token = @get('token')
user.set('token', token) if ember.isPresent(token)
@set_loading('submitting')
user.save().then =>
@reset_loading('submitting')
@authenticate(user)
, (error) =>
@reset_loading('submitting')
# TODO: This currently boots them out of the application in the case of an error.
@totem_messages.api_failure error, source: @, model: user, action: 'create'
else
changeset.show_errors_on() | 149829 | import ember from 'ember'
import totem_changeset from 'totem/changeset'
import ns from 'totem/ns'
import config from 'totem-config/config'
import base from 'thinkspace-base/components/base'
export default base.extend
# # Properties
debug: config.autofill
authenticator: 'authenticator:totem'
# # Computed properties
token: ember.computed.reads 'query_params.token'
email: ember.computed.reads 'query_params.email'
invitable: ember.computed.reads 'query_params.invitable'
has_token: ember.computed.notEmpty 'token'
# {"roles"=>{"student"=>true, "instructor"=>false}}
# # Events
init_base: ->
@set_changeset()
set_changeset: ->
model = ember.Object.create(first_name: null, last_name: null, email: null, password: null)
changeset = totem_changeset.create model,
first_name: [totem_changeset.vpresence(presence: true, message: 'You must enter a first name')]
last_name: [totem_changeset.vpresence(presence: true, message: 'You must enter a last name')]
email: [totem_changeset.vpresence(presence: true, message: 'You must enter an email address'), totem_changeset.vemail()]
password: [
totem<PASSWORD>_changeset.vpresence(presence: true, message: 'You must enter a password'),
totem_changeset.vlength(min: 8, message: 'Your password must be at least 8 characters long.')
]
roles: [totem_changeset.vpresence(presence: true, message: 'You must select your role')]
@set_debug_changeset(changeset)
changeset.set('email', @get('email')) if @get('has_token') and @get('email')
@set('changeset', changeset)
set_debug_changeset: (changeset) ->
return unless @get('debug')
time = new Date().getTime()
changeset.set('first_name', '<NAME>')
changeset.set('last_name', time)
changeset.set('email', <EMAIL>")
changeset.set('password', '<PASSWORD>')
authenticate: (user) ->
changeset = @get('changeset')
data = {identification: changeset.get('email'), password: changeset.get('password')}
@set_loading('authenticating')
@get('session').authenticate(@get('authenticator'), data).then =>
# Reset password values so they're not lingering.
user.set('password', null)
changeset.set('password', null)
@reset_loading('authenticating')
@totem_messages.info "Sign in successful!"
, (error) =>
@reset_loading('authenticating')
changeset.show_errors_off()
message = error.responseText or 'Email or password incorrect'
@totem_messages.error message
actions:
submit: ->
changeset = @get('changeset')
changeset.validate().then =>
is_valid = changeset.get('is_valid')
if is_valid
user = @totem_scope.get_store().createRecord ns.to_p('user'),
first_name: changeset.get('first_name')
last_name: changeset.get('last_name')
email: changeset.get('email')
password: <PASSWORD>('<PASSWORD>')
profile:
roles: changeset.get('roles')
token = @get('token')
user.set('token', token) if ember.isPresent(token)
@set_loading('submitting')
user.save().then =>
@reset_loading('submitting')
@authenticate(user)
, (error) =>
@reset_loading('submitting')
# TODO: This currently boots them out of the application in the case of an error.
@totem_messages.api_failure error, source: @, model: user, action: 'create'
else
changeset.show_errors_on() | true | import ember from 'ember'
import totem_changeset from 'totem/changeset'
import ns from 'totem/ns'
import config from 'totem-config/config'
import base from 'thinkspace-base/components/base'
export default base.extend
# # Properties
debug: config.autofill
authenticator: 'authenticator:totem'
# # Computed properties
token: ember.computed.reads 'query_params.token'
email: ember.computed.reads 'query_params.email'
invitable: ember.computed.reads 'query_params.invitable'
has_token: ember.computed.notEmpty 'token'
# {"roles"=>{"student"=>true, "instructor"=>false}}
# # Events
init_base: ->
@set_changeset()
set_changeset: ->
model = ember.Object.create(first_name: null, last_name: null, email: null, password: null)
changeset = totem_changeset.create model,
first_name: [totem_changeset.vpresence(presence: true, message: 'You must enter a first name')]
last_name: [totem_changeset.vpresence(presence: true, message: 'You must enter a last name')]
email: [totem_changeset.vpresence(presence: true, message: 'You must enter an email address'), totem_changeset.vemail()]
password: [
totemPI:PASSWORD:<PASSWORD>END_PI_changeset.vpresence(presence: true, message: 'You must enter a password'),
totem_changeset.vlength(min: 8, message: 'Your password must be at least 8 characters long.')
]
roles: [totem_changeset.vpresence(presence: true, message: 'You must select your role')]
@set_debug_changeset(changeset)
changeset.set('email', @get('email')) if @get('has_token') and @get('email')
@set('changeset', changeset)
set_debug_changeset: (changeset) ->
return unless @get('debug')
time = new Date().getTime()
changeset.set('first_name', 'PI:NAME:<NAME>END_PI')
changeset.set('last_name', time)
changeset.set('email', PI:EMAIL:<EMAIL>END_PI")
changeset.set('password', 'PI:PASSWORD:<PASSWORD>END_PI')
authenticate: (user) ->
changeset = @get('changeset')
data = {identification: changeset.get('email'), password: changeset.get('password')}
@set_loading('authenticating')
@get('session').authenticate(@get('authenticator'), data).then =>
# Reset password values so they're not lingering.
user.set('password', null)
changeset.set('password', null)
@reset_loading('authenticating')
@totem_messages.info "Sign in successful!"
, (error) =>
@reset_loading('authenticating')
changeset.show_errors_off()
message = error.responseText or 'Email or password incorrect'
@totem_messages.error message
actions:
submit: ->
changeset = @get('changeset')
changeset.validate().then =>
is_valid = changeset.get('is_valid')
if is_valid
user = @totem_scope.get_store().createRecord ns.to_p('user'),
first_name: changeset.get('first_name')
last_name: changeset.get('last_name')
email: changeset.get('email')
password: PI:PASSWORD:<PASSWORD>END_PI('PI:PASSWORD:<PASSWORD>END_PI')
profile:
roles: changeset.get('roles')
token = @get('token')
user.set('token', token) if ember.isPresent(token)
@set_loading('submitting')
user.save().then =>
@reset_loading('submitting')
@authenticate(user)
, (error) =>
@reset_loading('submitting')
# TODO: This currently boots them out of the application in the case of an error.
@totem_messages.api_failure error, source: @, model: user, action: 'create'
else
changeset.show_errors_on() |
[
{
"context": "\"ModifierDyingWishRespawnEntity\"\n\n\t@modifierName:\"Dying Wish\"\n\t@description: \"Dying Wish: Resummon this minion",
"end": 295,
"score": 0.8424081206321716,
"start": 285,
"tag": "NAME",
"value": "Dying Wish"
}
] | app/sdk/modifiers/modifierDyingWishRespawnEntity.coffee | willroberts/duelyst | 5 | ModifierDyingWish = require './modifierDyingWish'
PlayCardSilentlyAction = require 'app/sdk/actions/playCardSilentlyAction'
class ModifierDyingWishRespawnEntity extends ModifierDyingWish
type:"ModifierDyingWishRespawnEntity"
@type:"ModifierDyingWishRespawnEntity"
@modifierName:"Dying Wish"
@description: "Dying Wish: Resummon this minion"
fxResource: ["FX.Modifiers.ModifierDyingWish", "FX.Modifiers.ModifierGenericSpawn"]
onDyingWish: (action) ->
super(action)
if @getGameSession().getIsRunningAsAuthoritative()
spawnAction = new PlayCardSilentlyAction(@getGameSession(), @getCard().getOwnerId(), @getCard().getPosition().x, @getCard().getPosition().y, @getCard().createNewCardData())
spawnAction.setSource(@getCard())
@getGameSession().executeAction(spawnAction)
module.exports = ModifierDyingWishRespawnEntity
| 11764 | ModifierDyingWish = require './modifierDyingWish'
PlayCardSilentlyAction = require 'app/sdk/actions/playCardSilentlyAction'
class ModifierDyingWishRespawnEntity extends ModifierDyingWish
type:"ModifierDyingWishRespawnEntity"
@type:"ModifierDyingWishRespawnEntity"
@modifierName:"<NAME>"
@description: "Dying Wish: Resummon this minion"
fxResource: ["FX.Modifiers.ModifierDyingWish", "FX.Modifiers.ModifierGenericSpawn"]
onDyingWish: (action) ->
super(action)
if @getGameSession().getIsRunningAsAuthoritative()
spawnAction = new PlayCardSilentlyAction(@getGameSession(), @getCard().getOwnerId(), @getCard().getPosition().x, @getCard().getPosition().y, @getCard().createNewCardData())
spawnAction.setSource(@getCard())
@getGameSession().executeAction(spawnAction)
module.exports = ModifierDyingWishRespawnEntity
| true | ModifierDyingWish = require './modifierDyingWish'
PlayCardSilentlyAction = require 'app/sdk/actions/playCardSilentlyAction'
class ModifierDyingWishRespawnEntity extends ModifierDyingWish
type:"ModifierDyingWishRespawnEntity"
@type:"ModifierDyingWishRespawnEntity"
@modifierName:"PI:NAME:<NAME>END_PI"
@description: "Dying Wish: Resummon this minion"
fxResource: ["FX.Modifiers.ModifierDyingWish", "FX.Modifiers.ModifierGenericSpawn"]
onDyingWish: (action) ->
super(action)
if @getGameSession().getIsRunningAsAuthoritative()
spawnAction = new PlayCardSilentlyAction(@getGameSession(), @getCard().getOwnerId(), @getCard().getPosition().x, @getCard().getPosition().y, @getCard().createNewCardData())
spawnAction.setSource(@getCard())
@getGameSession().executeAction(spawnAction)
module.exports = ModifierDyingWishRespawnEntity
|
[
{
"context": "ats - proves the lotto has no bias\n#\n# Author:\n# sakatam\n# pcsforeducation\n\n_ = require \"undersc",
"end": 467,
"score": 0.9997325539588928,
"start": 460,
"tag": "USERNAME",
"value": "sakatam"
},
{
"context": " the lotto has no bias\n#\n# Author:\n# ... | src/reviewer-queue.coffee | pcsforeducation/hubot-review-queue | 0 | # Description:
# assigns reviewers in a round robin fashion for pull requests.
#
# Configuration:
# HUBOT_GITHUB_TOKEN (required)
# HUBOT_GITHUB_ORG (required)
# HUBOT_GITHUB_REVIEWER_TEAM (required)
# github team id. this script picks the next eligible reviewer off a queue
#
# Commands:
# hubot reviewer for <repo> <pull> - assigns the next reviewer for pull request
# hubot reviewer show stats - proves the lotto has no bias
#
# Author:
# sakatam
# pcsforeducation
_ = require "underscore"
async = require "async"
GitHubApi = require "github"
weighted = require "weighted"
module.exports = (robot) ->
ghToken = process.env.HUBOT_GITHUB_TOKEN
ghOrg = process.env.HUBOT_GITHUB_ORG
ghReviwerTeam = process.env.HUBOT_GITHUB_REVIEWER_TEAM
ghWithAvatar = process.env.HUBOT_GITHUB_WITH_AVATAR in ["1", "true"]
debug = process.env.HUBOT_REVIEWER_LOTTO_DEBUG in ["1", "true"]
STATS_KEY = 'reviewer-round-robin'
if !ghToken? or !ghOrg? or !ghReviwerTeam?
return robot.logger.error """
reviewer-lottery is not loaded due to missing configuration!
#{__filename}
HUBOT_GITHUB_TOKEN: #{ghToken}
HUBOT_GITHUB_ORG: #{ghOrg}
HUBOT_GITHUB_REVIEWER_TEAM: #{ghReviwerTeam}
"""
robot.respond /reviewer reset stats/i, (msg) ->
robot.brain.set STATS_KEY, {}
msg.reply "Reset reviewer stats!"
robot.respond /reviewer show stats$/i, (msg) ->
stats = robot.brain.get STATS_KEY
msgs = ["login, percentage, num assigned"]
total = 0
for login, count of stats
total += count
for login, count of stats
percentage = Math.floor(count * 100.0 / total)
msgs.push "#{login}, #{percentage}%, #{count}"
msg.reply msgs.join "\n"
robot.respond /reviewer for ([\w-\.]+) (\d+)( polite)?$/i, (msg) ->
repo = msg.match[1]
pr = msg.match[2]
polite = msg.match[3]?
prParams =
user: ghOrg
repo: repo
number: pr
gh = new GitHubApi version: "3.0.0"
gh.authenticate {type: "oauth", token: ghToken}
# mock api if debug mode
if debug
gh.issues.createComment = (params, cb) ->
robot.logger.info "GitHubApi - createComment is called", params
cb null
gh.issues.edit = (params, cb) ->
robot.logger.info "GitHubApi - edit is called", params
cb null
async.waterfall [
(cb) ->
# get team members
params =
id: ghReviwerTeam
per_page: 100
gh.orgs.getTeamMembers params, (err, res) ->
return cb "error on getting team members: #{err.toString()}" if err?
cb null, {reviewers: res}
(ctx, cb) ->
# check if pull req exists
gh.pullRequests.get prParams, (err, res) ->
return cb "error on getting pull request: #{err.toString()}" if err?
ctx['issue'] = res
ctx['creator'] = res.user
ctx['assignee'] = res.assignee
cb null, ctx
(ctx, cb) ->
{reviewers, creator, assignee} = ctx
stats = robot.brain.get STATS_KEY
# (re)initialize stats if necessary
if not stats['reviewers'] || stats['reviewers'].length != reviewers.length
robot.logger.debug '(re)initializing stats'
stats['reviewers'] = reviewers
# pick reviewer
reviewers = stats['reviewers']
reviewers = reviewers.filter (r) -> r.login != creator.login
# exclude current assignee from reviewer candidates
if assignee?
reviewers = reviewers.filter (r) -> r.login != assignee.login
# pick first reviewer from the queue
newReviewer = reviewers[0]
robot.logger.info 'Choose from queue: ' + newReviewer.login
originalIndex = -1
originalIndex = i for r, i in stats['reviewers'] when r.login == newReviewer.login
# move reviewer to the end
stats['reviewers'].splice(originalIndex, 1)
stats['reviewers'].push newReviewer
# save reviewer queue back to robot brain
robot.brain.set STATS_KEY, stats
ctx['reviewer'] = newReviewer
cb null, ctx
(ctx, cb) ->
# change assignee
{reviewer} = ctx
params = _.extend { assignee: reviewer.login }, prParams
gh.issues.edit params, (err, res) -> cb err, ctx
robot.logger.debug 'Would have assigned ' + reviewer.login
(ctx, cb) ->
{reviewer, issue} = ctx
msg.reply "#{reviewer.login} has been assigned for #{issue.html_url} as a reviewer"
if ghWithAvatar
url = reviewer.avatar_url
url = "#{url}t=#{Date.now()}" # cache buster
url = url.replace(/(#.*|$)/, '#.png') # hipchat needs image-ish url to display inline image
msg.send url
# update stats
stats = (robot.brain.get STATS_KEY) or {}
stats[reviewer.login] or= 0
stats[reviewer.login]++
robot.brain.set STATS_KEY, stats
cb null, ctx
], (err, res) ->
if err?
msg.reply "an error occured.\n#{err}"
| 185744 | # Description:
# assigns reviewers in a round robin fashion for pull requests.
#
# Configuration:
# HUBOT_GITHUB_TOKEN (required)
# HUBOT_GITHUB_ORG (required)
# HUBOT_GITHUB_REVIEWER_TEAM (required)
# github team id. this script picks the next eligible reviewer off a queue
#
# Commands:
# hubot reviewer for <repo> <pull> - assigns the next reviewer for pull request
# hubot reviewer show stats - proves the lotto has no bias
#
# Author:
# sakatam
# pcsforeducation
_ = require "underscore"
async = require "async"
GitHubApi = require "github"
weighted = require "weighted"
module.exports = (robot) ->
ghToken = process.env.HUBOT_GITHUB_TOKEN
ghOrg = process.env.HUBOT_GITHUB_ORG
ghReviwerTeam = process.env.HUBOT_GITHUB_REVIEWER_TEAM
ghWithAvatar = process.env.HUBOT_GITHUB_WITH_AVATAR in ["1", "true"]
debug = process.env.HUBOT_REVIEWER_LOTTO_DEBUG in ["1", "true"]
STATS_KEY = '<KEY>'
if !ghToken? or !ghOrg? or !ghReviwerTeam?
return robot.logger.error """
reviewer-lottery is not loaded due to missing configuration!
#{__filename}
HUBOT_GITHUB_TOKEN: #{ghToken}
HUBOT_GITHUB_ORG: #{ghOrg}
HUBOT_GITHUB_REVIEWER_TEAM: #{ghReviwerTeam}
"""
robot.respond /reviewer reset stats/i, (msg) ->
robot.brain.set STATS_KEY, {}
msg.reply "Reset reviewer stats!"
robot.respond /reviewer show stats$/i, (msg) ->
stats = robot.brain.get STATS_KEY
msgs = ["login, percentage, num assigned"]
total = 0
for login, count of stats
total += count
for login, count of stats
percentage = Math.floor(count * 100.0 / total)
msgs.push "#{login}, #{percentage}%, #{count}"
msg.reply msgs.join "\n"
robot.respond /reviewer for ([\w-\.]+) (\d+)( polite)?$/i, (msg) ->
repo = msg.match[1]
pr = msg.match[2]
polite = msg.match[3]?
prParams =
user: ghOrg
repo: repo
number: pr
gh = new GitHubApi version: "3.0.0"
gh.authenticate {type: "oauth", token: ghToken}
# mock api if debug mode
if debug
gh.issues.createComment = (params, cb) ->
robot.logger.info "GitHubApi - createComment is called", params
cb null
gh.issues.edit = (params, cb) ->
robot.logger.info "GitHubApi - edit is called", params
cb null
async.waterfall [
(cb) ->
# get team members
params =
id: ghReviwerTeam
per_page: 100
gh.orgs.getTeamMembers params, (err, res) ->
return cb "error on getting team members: #{err.toString()}" if err?
cb null, {reviewers: res}
(ctx, cb) ->
# check if pull req exists
gh.pullRequests.get prParams, (err, res) ->
return cb "error on getting pull request: #{err.toString()}" if err?
ctx['issue'] = res
ctx['creator'] = res.user
ctx['assignee'] = res.assignee
cb null, ctx
(ctx, cb) ->
{reviewers, creator, assignee} = ctx
stats = robot.brain.get STATS_KEY
# (re)initialize stats if necessary
if not stats['reviewers'] || stats['reviewers'].length != reviewers.length
robot.logger.debug '(re)initializing stats'
stats['reviewers'] = reviewers
# pick reviewer
reviewers = stats['reviewers']
reviewers = reviewers.filter (r) -> r.login != creator.login
# exclude current assignee from reviewer candidates
if assignee?
reviewers = reviewers.filter (r) -> r.login != assignee.login
# pick first reviewer from the queue
newReviewer = reviewers[0]
robot.logger.info 'Choose from queue: ' + newReviewer.login
originalIndex = -1
originalIndex = i for r, i in stats['reviewers'] when r.login == newReviewer.login
# move reviewer to the end
stats['reviewers'].splice(originalIndex, 1)
stats['reviewers'].push newReviewer
# save reviewer queue back to robot brain
robot.brain.set STATS_KEY, stats
ctx['reviewer'] = newReviewer
cb null, ctx
(ctx, cb) ->
# change assignee
{reviewer} = ctx
params = _.extend { assignee: reviewer.login }, prParams
gh.issues.edit params, (err, res) -> cb err, ctx
robot.logger.debug 'Would have assigned ' + reviewer.login
(ctx, cb) ->
{reviewer, issue} = ctx
msg.reply "#{reviewer.login} has been assigned for #{issue.html_url} as a reviewer"
if ghWithAvatar
url = reviewer.avatar_url
url = "#{url}t=#{Date.now()}" # cache buster
url = url.replace(/(#.*|$)/, '#.png') # hipchat needs image-ish url to display inline image
msg.send url
# update stats
stats = (robot.brain.get STATS_KEY) or {}
stats[reviewer.login] or= 0
stats[reviewer.login]++
robot.brain.set STATS_KEY, stats
cb null, ctx
], (err, res) ->
if err?
msg.reply "an error occured.\n#{err}"
| true | # Description:
# assigns reviewers in a round robin fashion for pull requests.
#
# Configuration:
# HUBOT_GITHUB_TOKEN (required)
# HUBOT_GITHUB_ORG (required)
# HUBOT_GITHUB_REVIEWER_TEAM (required)
# github team id. this script picks the next eligible reviewer off a queue
#
# Commands:
# hubot reviewer for <repo> <pull> - assigns the next reviewer for pull request
# hubot reviewer show stats - proves the lotto has no bias
#
# Author:
# sakatam
# pcsforeducation
_ = require "underscore"
async = require "async"
GitHubApi = require "github"
weighted = require "weighted"
module.exports = (robot) ->
ghToken = process.env.HUBOT_GITHUB_TOKEN
ghOrg = process.env.HUBOT_GITHUB_ORG
ghReviwerTeam = process.env.HUBOT_GITHUB_REVIEWER_TEAM
ghWithAvatar = process.env.HUBOT_GITHUB_WITH_AVATAR in ["1", "true"]
debug = process.env.HUBOT_REVIEWER_LOTTO_DEBUG in ["1", "true"]
STATS_KEY = 'PI:KEY:<KEY>END_PI'
if !ghToken? or !ghOrg? or !ghReviwerTeam?
return robot.logger.error """
reviewer-lottery is not loaded due to missing configuration!
#{__filename}
HUBOT_GITHUB_TOKEN: #{ghToken}
HUBOT_GITHUB_ORG: #{ghOrg}
HUBOT_GITHUB_REVIEWER_TEAM: #{ghReviwerTeam}
"""
robot.respond /reviewer reset stats/i, (msg) ->
robot.brain.set STATS_KEY, {}
msg.reply "Reset reviewer stats!"
robot.respond /reviewer show stats$/i, (msg) ->
stats = robot.brain.get STATS_KEY
msgs = ["login, percentage, num assigned"]
total = 0
for login, count of stats
total += count
for login, count of stats
percentage = Math.floor(count * 100.0 / total)
msgs.push "#{login}, #{percentage}%, #{count}"
msg.reply msgs.join "\n"
robot.respond /reviewer for ([\w-\.]+) (\d+)( polite)?$/i, (msg) ->
repo = msg.match[1]
pr = msg.match[2]
polite = msg.match[3]?
prParams =
user: ghOrg
repo: repo
number: pr
gh = new GitHubApi version: "3.0.0"
gh.authenticate {type: "oauth", token: ghToken}
# mock api if debug mode
if debug
gh.issues.createComment = (params, cb) ->
robot.logger.info "GitHubApi - createComment is called", params
cb null
gh.issues.edit = (params, cb) ->
robot.logger.info "GitHubApi - edit is called", params
cb null
async.waterfall [
(cb) ->
# get team members
params =
id: ghReviwerTeam
per_page: 100
gh.orgs.getTeamMembers params, (err, res) ->
return cb "error on getting team members: #{err.toString()}" if err?
cb null, {reviewers: res}
(ctx, cb) ->
# check if pull req exists
gh.pullRequests.get prParams, (err, res) ->
return cb "error on getting pull request: #{err.toString()}" if err?
ctx['issue'] = res
ctx['creator'] = res.user
ctx['assignee'] = res.assignee
cb null, ctx
(ctx, cb) ->
{reviewers, creator, assignee} = ctx
stats = robot.brain.get STATS_KEY
# (re)initialize stats if necessary
if not stats['reviewers'] || stats['reviewers'].length != reviewers.length
robot.logger.debug '(re)initializing stats'
stats['reviewers'] = reviewers
# pick reviewer
reviewers = stats['reviewers']
reviewers = reviewers.filter (r) -> r.login != creator.login
# exclude current assignee from reviewer candidates
if assignee?
reviewers = reviewers.filter (r) -> r.login != assignee.login
# pick first reviewer from the queue
newReviewer = reviewers[0]
robot.logger.info 'Choose from queue: ' + newReviewer.login
originalIndex = -1
originalIndex = i for r, i in stats['reviewers'] when r.login == newReviewer.login
# move reviewer to the end
stats['reviewers'].splice(originalIndex, 1)
stats['reviewers'].push newReviewer
# save reviewer queue back to robot brain
robot.brain.set STATS_KEY, stats
ctx['reviewer'] = newReviewer
cb null, ctx
(ctx, cb) ->
# change assignee
{reviewer} = ctx
params = _.extend { assignee: reviewer.login }, prParams
gh.issues.edit params, (err, res) -> cb err, ctx
robot.logger.debug 'Would have assigned ' + reviewer.login
(ctx, cb) ->
{reviewer, issue} = ctx
msg.reply "#{reviewer.login} has been assigned for #{issue.html_url} as a reviewer"
if ghWithAvatar
url = reviewer.avatar_url
url = "#{url}t=#{Date.now()}" # cache buster
url = url.replace(/(#.*|$)/, '#.png') # hipchat needs image-ish url to display inline image
msg.send url
# update stats
stats = (robot.brain.get STATS_KEY) or {}
stats[reviewer.login] or= 0
stats[reviewer.login]++
robot.brain.set STATS_KEY, stats
cb null, ctx
], (err, res) ->
if err?
msg.reply "an error occured.\n#{err}"
|
[
{
"context": "dar widget script\n # by w3widgets\n #\n # Author: Lukasz Kokoszkiewicz\n #\n # Copyright © w3widgets 2013 All Rights Res",
"end": 96,
"score": 0.9998463988304138,
"start": 76,
"tag": "NAME",
"value": "Lukasz Kokoszkiewicz"
}
] | public/calendar/js/coffee/responsive-calendar.coffee | antlev/examen | 5 | ###!
# Responsive Celendar widget script
# by w3widgets
#
# Author: Lukasz Kokoszkiewicz
#
# Copyright © w3widgets 2013 All Rights Reserved
###
do ($ = jQuery) ->
"use strict"
Calendar = ( element, options ) ->
@$element = element
@options = options
@weekDays = ['sun', 'mon', 'tue', 'wed', 'thu', 'fri', 'sat', 'sun']
@time = new Date()
@currentYear = @time.getFullYear()
@currentMonth = @time.getMonth()
if @options.time
time = @splitDateString( @options.time )
@currentYear = time.year
@currentMonth = time.month
# Do the initial draw
@initialDraw()
null
Calendar.prototype =
addLeadingZero: (num) ->
(if num < 10 then "0" + num else "" + num)
applyTransition: ( $el, transition ) ->
$el.css 'transition', transition
$el.css '-ms-transition', '-ms-' + transition
$el.css '-moz-transition', '-moz-' + transition
$el.css '-webkit-transition', '-webkit-' + transition
applyBackfaceVisibility: ( $el ) ->
$el.css 'backface-visibility', 'hidden'
$el.css '-ms-backface-visibility', 'hidden'
$el.css '-moz-backface-visibility', 'hidden'
$el.css '-webkit-backface-visibility', 'hidden'
applyTransform: ( $el, transform ) ->
$el.css 'transform', transform
$el.css '-ms-transform', transform
$el.css '-moz-transform', transform
$el.css '-webkit-transform', transform
splitDateString: ( dateString ) ->
time = dateString.split( '-' )
year = parseInt time[0]
month = parseInt time[1] - 1
day = parseInt time[2]
time =
year: year
month: month
day: day
initialDraw: () ->
@drawDays @currentYear, @currentMonth
editDays: ( events ) ->
for dateString, dayEvents of events
@options.events[ dateString ] = events[ dateString ]
time = @splitDateString( dateString )
day = @$element.find( '[data-year="' + time.year + '"][data-month="' + ( time.month + 1 ) + '"][data-day="' + time.day + '"]' ).parent( '.day' )
day.removeClass( 'active' )
day.find( '.badge' ).remove()
day.find( 'a' ).removeAttr( 'href' )
@makeActive( day, dayEvents ) if this.currentMonth == time.month || this.options.activateNonCurrentMonths
clearDays: ( days ) ->
for dateString in days
delete @options.events[ dateString ]
time = @splitDateString( dateString )
day = @$element.find( '[data-year="' + time.year + '"][data-month="' + ( time.month + 1 ) + '"][data-day="' + time.day + '"]' ).parent( '.day' )
day.removeClass( 'active' )
day.find( '.badge' ).remove()
day.find( 'a' ).removeAttr( 'href' )
clearAll: () ->
@options.events = {}
days = @$element.find('[data-group="days"] .day')
for day, i in days
$(day).removeClass( 'active' )
$(day).find( '.badge' ).remove()
$(day).find( 'a' ).removeAttr( 'href' )
setMonthYear: ( dateString ) ->
time = @splitDateString( dateString )
@currentMonth = @drawDays( time.year, time.month )
@currentYear = time.year
prev: () ->
if @currentMonth - 1 < 0
@currentYear = @currentYear - 1
@currentMonth = 11
else
@currentMonth = @currentMonth - 1
@drawDays @currentYear, @currentMonth
# callback function
if @options.onMonthChange
@options.onMonthChange.call @
null
next: () ->
if @currentMonth + 1 > 11
@currentYear = @currentYear + 1
@currentMonth = 0
else
@currentMonth = @currentMonth + 1
@drawDays @currentYear, @currentMonth
# callback function
if @options.onMonthChange
@options.onMonthChange.call @
null
curr: () ->
@currentYear = @time.getFullYear()
@currentMonth = @time.getMonth()
@drawDays @currentYear, @currentMonth
# callback function
if @options.onMonthChange
@options.onMonthChange.call @
null
addOthers: ( day, dayEvents ) ->
# if events word is an object (array)
# create badge with the number of events
if typeof dayEvents is "object"
# add badge
if dayEvents.number?
badge = $("<span></span>").html(dayEvents.number).addClass("badge")
if dayEvents.badgeClass?
badge.addClass(dayEvents.badgeClass)
day.append badge
# add url
if dayEvents.url
day.find("a").attr "href", dayEvents.url
day
makeActive: ( day, dayEvents ) ->
# if event exists for the given day ...
if dayEvents
# ... add class `active`
if dayEvents.class
classes = dayEvents.class.split " "
day.addClass eventClass for eventClass, i in classes
else
day.addClass "active"
# add badge
day = @addOthers day, dayEvents
day
getDaysInMonth: ( year, month ) ->
new Date(year, month + 1, 0).getDate();
drawDay: ( lastDayOfMonth, yearNum, monthNum, dayNum, i ) ->
day = $("<div></div>").addClass("day")
dateNow = new Date()
dateNow.setHours( 0, 0, 0, 0 )
dayDate = new Date(yearNum, monthNum - 1, dayNum)
if dayDate.getTime() < dateNow.getTime()
pastFutureClass = "past"
else if dayDate.getTime() == dateNow.getTime()
pastFutureClass = "today"
else
pastFutureClass = "future"
day.addClass( @weekDays[ i % 7 ] )
day.addClass( pastFutureClass )
dateString = yearNum + "-" + @addLeadingZero(monthNum) + "-" + @addLeadingZero(dayNum)
# starts drawing days from the appropriate day of the week
if dayNum <= 0 or dayNum > lastDayOfMonth
calcDate = new Date(yearNum, monthNum - 1, dayNum)
dayNum = calcDate.getDate()
monthNum = calcDate.getMonth() + 1
yearNum = calcDate.getFullYear()
day.addClass("not-current")
.addClass(pastFutureClass)
if @options.activateNonCurrentMonths
# create date string to access `events` options dictionary
dateString = yearNum + "-" + @addLeadingZero(monthNum) + "-" + @addLeadingZero(dayNum)
day.append $("<a>" + dayNum + "</a>")
.attr("data-day", dayNum)
.attr("data-month", monthNum)
.attr("data-year", yearNum)
if @options.monthChangeAnimation
@applyTransform day, 'rotateY(180deg)'
@applyBackfaceVisibility day
# make active if event for a day exists
day = @makeActive( day, @options.events[ dateString ] )
@$element.find('[data-group="days"]').append day
drawDays: (year, month) ->
thisRef = @
# set initial time parameters
time = new Date(year, month)
currentMonth = time.getMonth() # count from 0
monthNum = time.getMonth() + 1 # count from 1
yearNum = time.getFullYear()
# get week day for the first day of the current month
time.setDate 1
firstDayOfMonth = if @options.startFromSunday then (time.getDay() + 1) else ( time.getDay() || 7 ) # sunday fix
# get week day for the last day of the current month
lastDayOfMonth = @getDaysInMonth year, month
# out animation
timeout = 0
if @options.monthChangeAnimation
days = @$element.find('[data-group="days"] .day')
for day, i in days
delay = i * 0.01
@applyTransition $(day), 'transform .5s ease ' + delay + 's'
@applyTransform $(day), 'rotateY(180deg)'
@applyBackfaceVisibility $(day)
timeout = (delay + 0.1) * 1000
dayBase = 2
# celculate loop base / number of possible calendar day cells
if @options.allRows
loopBase = 42
else
multiplier = Math.ceil( ( firstDayOfMonth - ( dayBase - 1 ) + lastDayOfMonth ) / 7 )
loopBase = multiplier * 7
#@$element.find(".timeInfo").html time.getFullYear() + " " + @options.translateMonths[time.getMonth()]
@$element.find("[data-head-year]").html time.getFullYear()
@$element.find("[data-head-month]").html @options.translateMonths[time.getMonth()]
draw = () ->
thisRef.$element.find('[data-group="days"]').empty()
# fill callendar
dayNum = dayBase - firstDayOfMonth
i = if thisRef.options.startFromSunday then 0 else 1
while dayNum < loopBase - firstDayOfMonth + dayBase
thisRef.drawDay lastDayOfMonth, yearNum, monthNum, dayNum, i
dayNum = dayNum + 1
i = i + 1
setEvents = () ->
days = thisRef.$element.find('[data-group="days"] .day')
for day, i in days
thisRef.applyTransition $(day), 'transform .5s ease ' + ( i * 0.01 ) + 's'
thisRef.applyTransform $(day), 'rotateY(0deg)'
if thisRef.options.onDayClick
thisRef.$element.find('[data-group="days"] .day a').click ->
thisRef.options.onDayClick.call this, thisRef.options.events
if thisRef.options.onDayHover
thisRef.$element.find('[data-group="days"] .day a').hover ->
thisRef.options.onDayHover.call this, thisRef.options.events
if thisRef.options.onActiveDayClick
thisRef.$element.find('[data-group="days"] .day.active a').click ->
thisRef.options.onActiveDayClick.call this, thisRef.options.events
if thisRef.options.onActiveDayHover
thisRef.$element.find('[data-group="days"] .day.active a').hover ->
thisRef.options.onActiveDayHover.call this, thisRef.options.events
setTimeout setEvents, 0
setTimeout( draw, timeout )
currentMonth
$.fn.responsiveCalendar = ( option, params ) ->
options = $.extend {}, $.fn.responsiveCalendar.defaults, typeof option == 'object' && option
publicFunc =
next: 'next'
prev: 'prev'
edit: 'editDays'
clear: 'clearDays'
clearAll: 'clearAll'
getYearMonth: 'getYearMonth'
jump: 'jump'
curr: 'curr'
init = ( $this ) ->
# support for metadata plugin
options = if $.metadata then $.extend( {}, options, $this.metadata() ) else options
$this.data 'calendar', ( data = new Calendar $this, options )
# call onInit function
if options.onInit
options.onInit.call data
# create events for manual month change
$this.find("[data-go]").click ->
if $(this).data("go") is "prev"
data.prev()
if $(this).data("go") is "next"
data.next()
@each ->
$this = $(this)
# create "calendar" data variable
data = $this.data 'calendar'
# create calendar object on init
if !data
init $this
else if typeof option == 'string'
if publicFunc[option]?
data[ publicFunc[option] ]( params )
else
data.setMonthYear( option ) # sets month to display, format "YYYY-MM"
else if typeof option == 'number'
data.jump Math.abs( option ) + 1
null
# plugin defaults - added as a property on our plugin function
$.fn.responsiveCalendar.defaults =
translateMonths: ["January", "February", "March", "April", "May", "June", "July", "August", "September", "October", "November", "December"]
events: {}
time: undefined # string - example: "2013-04"
allRows: true
startFromSunday: false
activateNonCurrentMonths: false
monthChangeAnimation: true
# callback functions
onInit: undefined
onDayClick: undefined
onDayHover: undefined
onActiveDayClick: undefined
onActiveDayHover: undefined
onMonthChange: undefined
spy = $('[data-spy="responsive-calendar"]')
if ( spy.length )
opts = {}
if (spy.data 'translate-months')? then opts.translateMonths = spy.data( 'translate-months' ).split ','
#if (spy.data 'events')? then opts.events = spy.data 'events'
if (spy.data 'time')? then opts.time = spy.data 'time'
if (spy.data 'all-rows')? then opts.allRows = spy.data 'all-rows'
if (spy.data 'start-from-sunday')? then opts.startFromSunday = spy.data 'start-from-sunday'
if (spy.data 'activate-non-current-months')? then opts.activateNonCurrentMonths = spy.data 'activate-non-current-months'
if (spy.data 'month-change-animation')? then opts.monthChangeAnimation = spy.data 'month-change-animation'
spy.responsiveCalendar( opts )
| 40861 | ###!
# Responsive Celendar widget script
# by w3widgets
#
# Author: <NAME>
#
# Copyright © w3widgets 2013 All Rights Reserved
###
do ($ = jQuery) ->
"use strict"
Calendar = ( element, options ) ->
@$element = element
@options = options
@weekDays = ['sun', 'mon', 'tue', 'wed', 'thu', 'fri', 'sat', 'sun']
@time = new Date()
@currentYear = @time.getFullYear()
@currentMonth = @time.getMonth()
if @options.time
time = @splitDateString( @options.time )
@currentYear = time.year
@currentMonth = time.month
# Do the initial draw
@initialDraw()
null
Calendar.prototype =
addLeadingZero: (num) ->
(if num < 10 then "0" + num else "" + num)
applyTransition: ( $el, transition ) ->
$el.css 'transition', transition
$el.css '-ms-transition', '-ms-' + transition
$el.css '-moz-transition', '-moz-' + transition
$el.css '-webkit-transition', '-webkit-' + transition
applyBackfaceVisibility: ( $el ) ->
$el.css 'backface-visibility', 'hidden'
$el.css '-ms-backface-visibility', 'hidden'
$el.css '-moz-backface-visibility', 'hidden'
$el.css '-webkit-backface-visibility', 'hidden'
applyTransform: ( $el, transform ) ->
$el.css 'transform', transform
$el.css '-ms-transform', transform
$el.css '-moz-transform', transform
$el.css '-webkit-transform', transform
splitDateString: ( dateString ) ->
time = dateString.split( '-' )
year = parseInt time[0]
month = parseInt time[1] - 1
day = parseInt time[2]
time =
year: year
month: month
day: day
initialDraw: () ->
@drawDays @currentYear, @currentMonth
editDays: ( events ) ->
for dateString, dayEvents of events
@options.events[ dateString ] = events[ dateString ]
time = @splitDateString( dateString )
day = @$element.find( '[data-year="' + time.year + '"][data-month="' + ( time.month + 1 ) + '"][data-day="' + time.day + '"]' ).parent( '.day' )
day.removeClass( 'active' )
day.find( '.badge' ).remove()
day.find( 'a' ).removeAttr( 'href' )
@makeActive( day, dayEvents ) if this.currentMonth == time.month || this.options.activateNonCurrentMonths
clearDays: ( days ) ->
for dateString in days
delete @options.events[ dateString ]
time = @splitDateString( dateString )
day = @$element.find( '[data-year="' + time.year + '"][data-month="' + ( time.month + 1 ) + '"][data-day="' + time.day + '"]' ).parent( '.day' )
day.removeClass( 'active' )
day.find( '.badge' ).remove()
day.find( 'a' ).removeAttr( 'href' )
clearAll: () ->
@options.events = {}
days = @$element.find('[data-group="days"] .day')
for day, i in days
$(day).removeClass( 'active' )
$(day).find( '.badge' ).remove()
$(day).find( 'a' ).removeAttr( 'href' )
setMonthYear: ( dateString ) ->
time = @splitDateString( dateString )
@currentMonth = @drawDays( time.year, time.month )
@currentYear = time.year
prev: () ->
if @currentMonth - 1 < 0
@currentYear = @currentYear - 1
@currentMonth = 11
else
@currentMonth = @currentMonth - 1
@drawDays @currentYear, @currentMonth
# callback function
if @options.onMonthChange
@options.onMonthChange.call @
null
next: () ->
if @currentMonth + 1 > 11
@currentYear = @currentYear + 1
@currentMonth = 0
else
@currentMonth = @currentMonth + 1
@drawDays @currentYear, @currentMonth
# callback function
if @options.onMonthChange
@options.onMonthChange.call @
null
curr: () ->
@currentYear = @time.getFullYear()
@currentMonth = @time.getMonth()
@drawDays @currentYear, @currentMonth
# callback function
if @options.onMonthChange
@options.onMonthChange.call @
null
addOthers: ( day, dayEvents ) ->
# if events word is an object (array)
# create badge with the number of events
if typeof dayEvents is "object"
# add badge
if dayEvents.number?
badge = $("<span></span>").html(dayEvents.number).addClass("badge")
if dayEvents.badgeClass?
badge.addClass(dayEvents.badgeClass)
day.append badge
# add url
if dayEvents.url
day.find("a").attr "href", dayEvents.url
day
makeActive: ( day, dayEvents ) ->
# if event exists for the given day ...
if dayEvents
# ... add class `active`
if dayEvents.class
classes = dayEvents.class.split " "
day.addClass eventClass for eventClass, i in classes
else
day.addClass "active"
# add badge
day = @addOthers day, dayEvents
day
getDaysInMonth: ( year, month ) ->
new Date(year, month + 1, 0).getDate();
drawDay: ( lastDayOfMonth, yearNum, monthNum, dayNum, i ) ->
day = $("<div></div>").addClass("day")
dateNow = new Date()
dateNow.setHours( 0, 0, 0, 0 )
dayDate = new Date(yearNum, monthNum - 1, dayNum)
if dayDate.getTime() < dateNow.getTime()
pastFutureClass = "past"
else if dayDate.getTime() == dateNow.getTime()
pastFutureClass = "today"
else
pastFutureClass = "future"
day.addClass( @weekDays[ i % 7 ] )
day.addClass( pastFutureClass )
dateString = yearNum + "-" + @addLeadingZero(monthNum) + "-" + @addLeadingZero(dayNum)
# starts drawing days from the appropriate day of the week
if dayNum <= 0 or dayNum > lastDayOfMonth
calcDate = new Date(yearNum, monthNum - 1, dayNum)
dayNum = calcDate.getDate()
monthNum = calcDate.getMonth() + 1
yearNum = calcDate.getFullYear()
day.addClass("not-current")
.addClass(pastFutureClass)
if @options.activateNonCurrentMonths
# create date string to access `events` options dictionary
dateString = yearNum + "-" + @addLeadingZero(monthNum) + "-" + @addLeadingZero(dayNum)
day.append $("<a>" + dayNum + "</a>")
.attr("data-day", dayNum)
.attr("data-month", monthNum)
.attr("data-year", yearNum)
if @options.monthChangeAnimation
@applyTransform day, 'rotateY(180deg)'
@applyBackfaceVisibility day
# make active if event for a day exists
day = @makeActive( day, @options.events[ dateString ] )
@$element.find('[data-group="days"]').append day
drawDays: (year, month) ->
thisRef = @
# set initial time parameters
time = new Date(year, month)
currentMonth = time.getMonth() # count from 0
monthNum = time.getMonth() + 1 # count from 1
yearNum = time.getFullYear()
# get week day for the first day of the current month
time.setDate 1
firstDayOfMonth = if @options.startFromSunday then (time.getDay() + 1) else ( time.getDay() || 7 ) # sunday fix
# get week day for the last day of the current month
lastDayOfMonth = @getDaysInMonth year, month
# out animation
timeout = 0
if @options.monthChangeAnimation
days = @$element.find('[data-group="days"] .day')
for day, i in days
delay = i * 0.01
@applyTransition $(day), 'transform .5s ease ' + delay + 's'
@applyTransform $(day), 'rotateY(180deg)'
@applyBackfaceVisibility $(day)
timeout = (delay + 0.1) * 1000
dayBase = 2
# celculate loop base / number of possible calendar day cells
if @options.allRows
loopBase = 42
else
multiplier = Math.ceil( ( firstDayOfMonth - ( dayBase - 1 ) + lastDayOfMonth ) / 7 )
loopBase = multiplier * 7
#@$element.find(".timeInfo").html time.getFullYear() + " " + @options.translateMonths[time.getMonth()]
@$element.find("[data-head-year]").html time.getFullYear()
@$element.find("[data-head-month]").html @options.translateMonths[time.getMonth()]
draw = () ->
thisRef.$element.find('[data-group="days"]').empty()
# fill callendar
dayNum = dayBase - firstDayOfMonth
i = if thisRef.options.startFromSunday then 0 else 1
while dayNum < loopBase - firstDayOfMonth + dayBase
thisRef.drawDay lastDayOfMonth, yearNum, monthNum, dayNum, i
dayNum = dayNum + 1
i = i + 1
setEvents = () ->
days = thisRef.$element.find('[data-group="days"] .day')
for day, i in days
thisRef.applyTransition $(day), 'transform .5s ease ' + ( i * 0.01 ) + 's'
thisRef.applyTransform $(day), 'rotateY(0deg)'
if thisRef.options.onDayClick
thisRef.$element.find('[data-group="days"] .day a').click ->
thisRef.options.onDayClick.call this, thisRef.options.events
if thisRef.options.onDayHover
thisRef.$element.find('[data-group="days"] .day a').hover ->
thisRef.options.onDayHover.call this, thisRef.options.events
if thisRef.options.onActiveDayClick
thisRef.$element.find('[data-group="days"] .day.active a').click ->
thisRef.options.onActiveDayClick.call this, thisRef.options.events
if thisRef.options.onActiveDayHover
thisRef.$element.find('[data-group="days"] .day.active a').hover ->
thisRef.options.onActiveDayHover.call this, thisRef.options.events
setTimeout setEvents, 0
setTimeout( draw, timeout )
currentMonth
$.fn.responsiveCalendar = ( option, params ) ->
options = $.extend {}, $.fn.responsiveCalendar.defaults, typeof option == 'object' && option
publicFunc =
next: 'next'
prev: 'prev'
edit: 'editDays'
clear: 'clearDays'
clearAll: 'clearAll'
getYearMonth: 'getYearMonth'
jump: 'jump'
curr: 'curr'
init = ( $this ) ->
# support for metadata plugin
options = if $.metadata then $.extend( {}, options, $this.metadata() ) else options
$this.data 'calendar', ( data = new Calendar $this, options )
# call onInit function
if options.onInit
options.onInit.call data
# create events for manual month change
$this.find("[data-go]").click ->
if $(this).data("go") is "prev"
data.prev()
if $(this).data("go") is "next"
data.next()
@each ->
$this = $(this)
# create "calendar" data variable
data = $this.data 'calendar'
# create calendar object on init
if !data
init $this
else if typeof option == 'string'
if publicFunc[option]?
data[ publicFunc[option] ]( params )
else
data.setMonthYear( option ) # sets month to display, format "YYYY-MM"
else if typeof option == 'number'
data.jump Math.abs( option ) + 1
null
# plugin defaults - added as a property on our plugin function
$.fn.responsiveCalendar.defaults =
translateMonths: ["January", "February", "March", "April", "May", "June", "July", "August", "September", "October", "November", "December"]
events: {}
time: undefined # string - example: "2013-04"
allRows: true
startFromSunday: false
activateNonCurrentMonths: false
monthChangeAnimation: true
# callback functions
onInit: undefined
onDayClick: undefined
onDayHover: undefined
onActiveDayClick: undefined
onActiveDayHover: undefined
onMonthChange: undefined
spy = $('[data-spy="responsive-calendar"]')
if ( spy.length )
opts = {}
if (spy.data 'translate-months')? then opts.translateMonths = spy.data( 'translate-months' ).split ','
#if (spy.data 'events')? then opts.events = spy.data 'events'
if (spy.data 'time')? then opts.time = spy.data 'time'
if (spy.data 'all-rows')? then opts.allRows = spy.data 'all-rows'
if (spy.data 'start-from-sunday')? then opts.startFromSunday = spy.data 'start-from-sunday'
if (spy.data 'activate-non-current-months')? then opts.activateNonCurrentMonths = spy.data 'activate-non-current-months'
if (spy.data 'month-change-animation')? then opts.monthChangeAnimation = spy.data 'month-change-animation'
spy.responsiveCalendar( opts )
| true | ###!
# Responsive Celendar widget script
# by w3widgets
#
# Author: PI:NAME:<NAME>END_PI
#
# Copyright © w3widgets 2013 All Rights Reserved
###
do ($ = jQuery) ->
"use strict"
Calendar = ( element, options ) ->
@$element = element
@options = options
@weekDays = ['sun', 'mon', 'tue', 'wed', 'thu', 'fri', 'sat', 'sun']
@time = new Date()
@currentYear = @time.getFullYear()
@currentMonth = @time.getMonth()
if @options.time
time = @splitDateString( @options.time )
@currentYear = time.year
@currentMonth = time.month
# Do the initial draw
@initialDraw()
null
Calendar.prototype =
addLeadingZero: (num) ->
(if num < 10 then "0" + num else "" + num)
applyTransition: ( $el, transition ) ->
$el.css 'transition', transition
$el.css '-ms-transition', '-ms-' + transition
$el.css '-moz-transition', '-moz-' + transition
$el.css '-webkit-transition', '-webkit-' + transition
applyBackfaceVisibility: ( $el ) ->
$el.css 'backface-visibility', 'hidden'
$el.css '-ms-backface-visibility', 'hidden'
$el.css '-moz-backface-visibility', 'hidden'
$el.css '-webkit-backface-visibility', 'hidden'
applyTransform: ( $el, transform ) ->
$el.css 'transform', transform
$el.css '-ms-transform', transform
$el.css '-moz-transform', transform
$el.css '-webkit-transform', transform
splitDateString: ( dateString ) ->
time = dateString.split( '-' )
year = parseInt time[0]
month = parseInt time[1] - 1
day = parseInt time[2]
time =
year: year
month: month
day: day
initialDraw: () ->
@drawDays @currentYear, @currentMonth
editDays: ( events ) ->
for dateString, dayEvents of events
@options.events[ dateString ] = events[ dateString ]
time = @splitDateString( dateString )
day = @$element.find( '[data-year="' + time.year + '"][data-month="' + ( time.month + 1 ) + '"][data-day="' + time.day + '"]' ).parent( '.day' )
day.removeClass( 'active' )
day.find( '.badge' ).remove()
day.find( 'a' ).removeAttr( 'href' )
@makeActive( day, dayEvents ) if this.currentMonth == time.month || this.options.activateNonCurrentMonths
clearDays: ( days ) ->
for dateString in days
delete @options.events[ dateString ]
time = @splitDateString( dateString )
day = @$element.find( '[data-year="' + time.year + '"][data-month="' + ( time.month + 1 ) + '"][data-day="' + time.day + '"]' ).parent( '.day' )
day.removeClass( 'active' )
day.find( '.badge' ).remove()
day.find( 'a' ).removeAttr( 'href' )
clearAll: () ->
@options.events = {}
days = @$element.find('[data-group="days"] .day')
for day, i in days
$(day).removeClass( 'active' )
$(day).find( '.badge' ).remove()
$(day).find( 'a' ).removeAttr( 'href' )
setMonthYear: ( dateString ) ->
time = @splitDateString( dateString )
@currentMonth = @drawDays( time.year, time.month )
@currentYear = time.year
prev: () ->
if @currentMonth - 1 < 0
@currentYear = @currentYear - 1
@currentMonth = 11
else
@currentMonth = @currentMonth - 1
@drawDays @currentYear, @currentMonth
# callback function
if @options.onMonthChange
@options.onMonthChange.call @
null
next: () ->
if @currentMonth + 1 > 11
@currentYear = @currentYear + 1
@currentMonth = 0
else
@currentMonth = @currentMonth + 1
@drawDays @currentYear, @currentMonth
# callback function
if @options.onMonthChange
@options.onMonthChange.call @
null
curr: () ->
@currentYear = @time.getFullYear()
@currentMonth = @time.getMonth()
@drawDays @currentYear, @currentMonth
# callback function
if @options.onMonthChange
@options.onMonthChange.call @
null
addOthers: ( day, dayEvents ) ->
# if events word is an object (array)
# create badge with the number of events
if typeof dayEvents is "object"
# add badge
if dayEvents.number?
badge = $("<span></span>").html(dayEvents.number).addClass("badge")
if dayEvents.badgeClass?
badge.addClass(dayEvents.badgeClass)
day.append badge
# add url
if dayEvents.url
day.find("a").attr "href", dayEvents.url
day
makeActive: ( day, dayEvents ) ->
# if event exists for the given day ...
if dayEvents
# ... add class `active`
if dayEvents.class
classes = dayEvents.class.split " "
day.addClass eventClass for eventClass, i in classes
else
day.addClass "active"
# add badge
day = @addOthers day, dayEvents
day
getDaysInMonth: ( year, month ) ->
new Date(year, month + 1, 0).getDate();
drawDay: ( lastDayOfMonth, yearNum, monthNum, dayNum, i ) ->
day = $("<div></div>").addClass("day")
dateNow = new Date()
dateNow.setHours( 0, 0, 0, 0 )
dayDate = new Date(yearNum, monthNum - 1, dayNum)
if dayDate.getTime() < dateNow.getTime()
pastFutureClass = "past"
else if dayDate.getTime() == dateNow.getTime()
pastFutureClass = "today"
else
pastFutureClass = "future"
day.addClass( @weekDays[ i % 7 ] )
day.addClass( pastFutureClass )
dateString = yearNum + "-" + @addLeadingZero(monthNum) + "-" + @addLeadingZero(dayNum)
# starts drawing days from the appropriate day of the week
if dayNum <= 0 or dayNum > lastDayOfMonth
calcDate = new Date(yearNum, monthNum - 1, dayNum)
dayNum = calcDate.getDate()
monthNum = calcDate.getMonth() + 1
yearNum = calcDate.getFullYear()
day.addClass("not-current")
.addClass(pastFutureClass)
if @options.activateNonCurrentMonths
# create date string to access `events` options dictionary
dateString = yearNum + "-" + @addLeadingZero(monthNum) + "-" + @addLeadingZero(dayNum)
day.append $("<a>" + dayNum + "</a>")
.attr("data-day", dayNum)
.attr("data-month", monthNum)
.attr("data-year", yearNum)
if @options.monthChangeAnimation
@applyTransform day, 'rotateY(180deg)'
@applyBackfaceVisibility day
# make active if event for a day exists
day = @makeActive( day, @options.events[ dateString ] )
@$element.find('[data-group="days"]').append day
drawDays: (year, month) ->
thisRef = @
# set initial time parameters
time = new Date(year, month)
currentMonth = time.getMonth() # count from 0
monthNum = time.getMonth() + 1 # count from 1
yearNum = time.getFullYear()
# get week day for the first day of the current month
time.setDate 1
firstDayOfMonth = if @options.startFromSunday then (time.getDay() + 1) else ( time.getDay() || 7 ) # sunday fix
# get week day for the last day of the current month
lastDayOfMonth = @getDaysInMonth year, month
# out animation
timeout = 0
if @options.monthChangeAnimation
days = @$element.find('[data-group="days"] .day')
for day, i in days
delay = i * 0.01
@applyTransition $(day), 'transform .5s ease ' + delay + 's'
@applyTransform $(day), 'rotateY(180deg)'
@applyBackfaceVisibility $(day)
timeout = (delay + 0.1) * 1000
dayBase = 2
# celculate loop base / number of possible calendar day cells
if @options.allRows
loopBase = 42
else
multiplier = Math.ceil( ( firstDayOfMonth - ( dayBase - 1 ) + lastDayOfMonth ) / 7 )
loopBase = multiplier * 7
#@$element.find(".timeInfo").html time.getFullYear() + " " + @options.translateMonths[time.getMonth()]
@$element.find("[data-head-year]").html time.getFullYear()
@$element.find("[data-head-month]").html @options.translateMonths[time.getMonth()]
draw = () ->
thisRef.$element.find('[data-group="days"]').empty()
# fill callendar
dayNum = dayBase - firstDayOfMonth
i = if thisRef.options.startFromSunday then 0 else 1
while dayNum < loopBase - firstDayOfMonth + dayBase
thisRef.drawDay lastDayOfMonth, yearNum, monthNum, dayNum, i
dayNum = dayNum + 1
i = i + 1
setEvents = () ->
days = thisRef.$element.find('[data-group="days"] .day')
for day, i in days
thisRef.applyTransition $(day), 'transform .5s ease ' + ( i * 0.01 ) + 's'
thisRef.applyTransform $(day), 'rotateY(0deg)'
if thisRef.options.onDayClick
thisRef.$element.find('[data-group="days"] .day a').click ->
thisRef.options.onDayClick.call this, thisRef.options.events
if thisRef.options.onDayHover
thisRef.$element.find('[data-group="days"] .day a').hover ->
thisRef.options.onDayHover.call this, thisRef.options.events
if thisRef.options.onActiveDayClick
thisRef.$element.find('[data-group="days"] .day.active a').click ->
thisRef.options.onActiveDayClick.call this, thisRef.options.events
if thisRef.options.onActiveDayHover
thisRef.$element.find('[data-group="days"] .day.active a').hover ->
thisRef.options.onActiveDayHover.call this, thisRef.options.events
setTimeout setEvents, 0
setTimeout( draw, timeout )
currentMonth
$.fn.responsiveCalendar = ( option, params ) ->
options = $.extend {}, $.fn.responsiveCalendar.defaults, typeof option == 'object' && option
publicFunc =
next: 'next'
prev: 'prev'
edit: 'editDays'
clear: 'clearDays'
clearAll: 'clearAll'
getYearMonth: 'getYearMonth'
jump: 'jump'
curr: 'curr'
init = ( $this ) ->
# support for metadata plugin
options = if $.metadata then $.extend( {}, options, $this.metadata() ) else options
$this.data 'calendar', ( data = new Calendar $this, options )
# call onInit function
if options.onInit
options.onInit.call data
# create events for manual month change
$this.find("[data-go]").click ->
if $(this).data("go") is "prev"
data.prev()
if $(this).data("go") is "next"
data.next()
@each ->
$this = $(this)
# create "calendar" data variable
data = $this.data 'calendar'
# create calendar object on init
if !data
init $this
else if typeof option == 'string'
if publicFunc[option]?
data[ publicFunc[option] ]( params )
else
data.setMonthYear( option ) # sets month to display, format "YYYY-MM"
else if typeof option == 'number'
data.jump Math.abs( option ) + 1
null
# plugin defaults - added as a property on our plugin function
$.fn.responsiveCalendar.defaults =
translateMonths: ["January", "February", "March", "April", "May", "June", "July", "August", "September", "October", "November", "December"]
events: {}
time: undefined # string - example: "2013-04"
allRows: true
startFromSunday: false
activateNonCurrentMonths: false
monthChangeAnimation: true
# callback functions
onInit: undefined
onDayClick: undefined
onDayHover: undefined
onActiveDayClick: undefined
onActiveDayHover: undefined
onMonthChange: undefined
spy = $('[data-spy="responsive-calendar"]')
if ( spy.length )
opts = {}
if (spy.data 'translate-months')? then opts.translateMonths = spy.data( 'translate-months' ).split ','
#if (spy.data 'events')? then opts.events = spy.data 'events'
if (spy.data 'time')? then opts.time = spy.data 'time'
if (spy.data 'all-rows')? then opts.allRows = spy.data 'all-rows'
if (spy.data 'start-from-sunday')? then opts.startFromSunday = spy.data 'start-from-sunday'
if (spy.data 'activate-non-current-months')? then opts.activateNonCurrentMonths = spy.data 'activate-non-current-months'
if (spy.data 'month-change-animation')? then opts.monthChangeAnimation = spy.data 'month-change-animation'
spy.responsiveCalendar( opts )
|
[
{
"context": "de=&CurrentView=Course&__RequestVerificationToken=ImS3Q-N9kqOn_SuVXXyia8Og-auOw6TsghSKSxCwKqEkmhpLn0bDdbYMQxYygmHcLXhQYJbeK6yr76xz39SwM4KejJEJ4xNYfOeuEFnSu9w1'\n courses = [] # many courses are duplicate ",
"end": 1030,
"score": 0.9931235313415527,
"start": 922,
"tag": "KEY",
... | server/scripts/ucas_courses.coffee | leviathanindustries/noddy | 2 |
import Future from 'fibers/future'
import fs from 'fs'
API.add 'scripts/ucas/courses',
get:
authRequired: 'root'
action: () ->
institutions = JSON.parse fs.readFileSync '/home/cloo/institutions.json'
addr = 'https://digital.ucas.com/search/results?SearchText=&SubjectText=&ProviderText=&AutoSuggestType=&SearchType=&SortOrder=CourseTitleAtoZ&PreviouslyAppliedFilters=SMM_0_Full-time__QM_2_Bachelor+degrees+%28with+or+without+Honours%29__QM_3_Masters+degrees__EP_6_1__&AcademicYearId=2019&ClearingOptOut=True&vacancy-rb=rba&filters=Destination_Undergraduate&UcasTariffPointsMin=0&UcasTariffPointsMax=144%2B&ProviderText=&SubjectText=&filters=StudyModeMapped_Full-time&PointOfEntry=1&filters=QualificationMapped_Bachelor+degrees+%28with+or+without+Honours%29&filters=QualificationMapped_Masters+degrees&DistanceFromPostcode=25mi&RegionDistancePostcode=&CurrentView=Course&__RequestVerificationToken=ImS3Q-N9kqOn_SuVXXyia8Og-auOw6TsghSKSxCwKqEkmhpLn0bDdbYMQxYygmHcLXhQYJbeK6yr76xz39SwM4KejJEJ4xNYfOeuEFnSu9w1'
courses = [] # many courses are duplicate names at different unis, so this will not be as big as the loop below
errors = []
instituted = []
for institution in institutions
iaddr = addr.replace 'ProviderText=&', 'ProviderText=' + institution.replace(/ /g,'+') + '&'
counter = 0
pg = API.http.puppeteer iaddr
len = parseInt pg.split(' courses from ')[0].split('>').pop()
pages = Math.floor len/30
while counter <= pages
# 30 results per page, 29413 courses to get
# but discovered it only pages to 333 then errors out (search index prob caps at 10k) ...
# hence why we iterate over a list of institutions then scrape within those searches
console.log institution + ' ' + counter + ' of ' + len + ' in ' + pages + ' pages, ' + courses.length + ' total courses and ' + errors.length + ' errors'
try
parts = pg.split '<h3 class="course-title heading--snug" data-course-primary-id="'
parts.shift()
for p in parts
course = p.split('>')[1].split('</h3>')[0].trim().split('\n')[0].replace(/&/g,'&')
courses.push(course) if course not in courses
catch
errors.push caddr
counter += 1
caddr = iaddr + '&pageNumber=' + counter
pg = API.http.puppeteer caddr
instituted.push institution
future = new Future()
Meteor.setTimeout (() -> future.return()), 1000
future.wait()
courses.sort()
try
fs.writeFileSync '/home/cloo/ucas_courses.json', JSON.stringify courses, null, 2
try
fs.writeFileSync '/home/cloo/ucas_errors.json', JSON.stringify errors, null, 2
API.mail.send
to: 'alert@cottagelabs.com'
subject: 'UCAS courses complete'
text: 'courses: ' + courses.length + '\nerrors: ' + errors.length + '\ninstitutions: ' + instituted.length
return courses
| 213471 |
import Future from 'fibers/future'
import fs from 'fs'
API.add 'scripts/ucas/courses',
get:
authRequired: 'root'
action: () ->
institutions = JSON.parse fs.readFileSync '/home/cloo/institutions.json'
addr = 'https://digital.ucas.com/search/results?SearchText=&SubjectText=&ProviderText=&AutoSuggestType=&SearchType=&SortOrder=CourseTitleAtoZ&PreviouslyAppliedFilters=SMM_0_Full-time__QM_2_Bachelor+degrees+%28with+or+without+Honours%29__QM_3_Masters+degrees__EP_6_1__&AcademicYearId=2019&ClearingOptOut=True&vacancy-rb=rba&filters=Destination_Undergraduate&UcasTariffPointsMin=0&UcasTariffPointsMax=144%2B&ProviderText=&SubjectText=&filters=StudyModeMapped_Full-time&PointOfEntry=1&filters=QualificationMapped_Bachelor+degrees+%28with+or+without+Honours%29&filters=QualificationMapped_Masters+degrees&DistanceFromPostcode=25mi&RegionDistancePostcode=&CurrentView=Course&__RequestVerificationToken=<KEY>'
courses = [] # many courses are duplicate names at different unis, so this will not be as big as the loop below
errors = []
instituted = []
for institution in institutions
iaddr = addr.replace 'ProviderText=&', 'ProviderText=' + institution.replace(/ /g,'+') + '&'
counter = 0
pg = API.http.puppeteer iaddr
len = parseInt pg.split(' courses from ')[0].split('>').pop()
pages = Math.floor len/30
while counter <= pages
# 30 results per page, 29413 courses to get
# but discovered it only pages to 333 then errors out (search index prob caps at 10k) ...
# hence why we iterate over a list of institutions then scrape within those searches
console.log institution + ' ' + counter + ' of ' + len + ' in ' + pages + ' pages, ' + courses.length + ' total courses and ' + errors.length + ' errors'
try
parts = pg.split '<h3 class="course-title heading--snug" data-course-primary-id="'
parts.shift()
for p in parts
course = p.split('>')[1].split('</h3>')[0].trim().split('\n')[0].replace(/&/g,'&')
courses.push(course) if course not in courses
catch
errors.push caddr
counter += 1
caddr = iaddr + '&pageNumber=' + counter
pg = API.http.puppeteer caddr
instituted.push institution
future = new Future()
Meteor.setTimeout (() -> future.return()), 1000
future.wait()
courses.sort()
try
fs.writeFileSync '/home/cloo/ucas_courses.json', JSON.stringify courses, null, 2
try
fs.writeFileSync '/home/cloo/ucas_errors.json', JSON.stringify errors, null, 2
API.mail.send
to: '<EMAIL>'
subject: 'UCAS courses complete'
text: 'courses: ' + courses.length + '\nerrors: ' + errors.length + '\ninstitutions: ' + instituted.length
return courses
| true |
import Future from 'fibers/future'
import fs from 'fs'
API.add 'scripts/ucas/courses',
get:
authRequired: 'root'
action: () ->
institutions = JSON.parse fs.readFileSync '/home/cloo/institutions.json'
addr = 'https://digital.ucas.com/search/results?SearchText=&SubjectText=&ProviderText=&AutoSuggestType=&SearchType=&SortOrder=CourseTitleAtoZ&PreviouslyAppliedFilters=SMM_0_Full-time__QM_2_Bachelor+degrees+%28with+or+without+Honours%29__QM_3_Masters+degrees__EP_6_1__&AcademicYearId=2019&ClearingOptOut=True&vacancy-rb=rba&filters=Destination_Undergraduate&UcasTariffPointsMin=0&UcasTariffPointsMax=144%2B&ProviderText=&SubjectText=&filters=StudyModeMapped_Full-time&PointOfEntry=1&filters=QualificationMapped_Bachelor+degrees+%28with+or+without+Honours%29&filters=QualificationMapped_Masters+degrees&DistanceFromPostcode=25mi&RegionDistancePostcode=&CurrentView=Course&__RequestVerificationToken=PI:KEY:<KEY>END_PI'
courses = [] # many courses are duplicate names at different unis, so this will not be as big as the loop below
errors = []
instituted = []
for institution in institutions
iaddr = addr.replace 'ProviderText=&', 'ProviderText=' + institution.replace(/ /g,'+') + '&'
counter = 0
pg = API.http.puppeteer iaddr
len = parseInt pg.split(' courses from ')[0].split('>').pop()
pages = Math.floor len/30
while counter <= pages
# 30 results per page, 29413 courses to get
# but discovered it only pages to 333 then errors out (search index prob caps at 10k) ...
# hence why we iterate over a list of institutions then scrape within those searches
console.log institution + ' ' + counter + ' of ' + len + ' in ' + pages + ' pages, ' + courses.length + ' total courses and ' + errors.length + ' errors'
try
parts = pg.split '<h3 class="course-title heading--snug" data-course-primary-id="'
parts.shift()
for p in parts
course = p.split('>')[1].split('</h3>')[0].trim().split('\n')[0].replace(/&/g,'&')
courses.push(course) if course not in courses
catch
errors.push caddr
counter += 1
caddr = iaddr + '&pageNumber=' + counter
pg = API.http.puppeteer caddr
instituted.push institution
future = new Future()
Meteor.setTimeout (() -> future.return()), 1000
future.wait()
courses.sort()
try
fs.writeFileSync '/home/cloo/ucas_courses.json', JSON.stringify courses, null, 2
try
fs.writeFileSync '/home/cloo/ucas_errors.json', JSON.stringify errors, null, 2
API.mail.send
to: 'PI:EMAIL:<EMAIL>END_PI'
subject: 'UCAS courses complete'
text: 'courses: ' + courses.length + '\nerrors: ' + errors.length + '\ninstitutions: ' + instituted.length
return courses
|
[
{
"context": "ry few town crier events, if at all.\n *\n * @name Earplugs\n * @prerequisite Receive 10 town crier events\n ",
"end": 153,
"score": 0.6218532919883728,
"start": 145,
"tag": "NAME",
"value": "Earplugs"
}
] | src/character/personalities/Earplugs.coffee | jawsome/IdleLands | 3 |
Personality = require "../base/Personality"
`/**
* This personality makes it so you see very few town crier events, if at all.
*
* @name Earplugs
* @prerequisite Receive 10 town crier events
* @category Personalities
* @package Player
*/`
class Earplugs extends Personality
constructor: ->
eventModifier: (player, event) -> if event.type is "towncrier" then -500
@canUse = (player) ->
player.statistics["event towncrier"] >= 10
@desc = "Receive 10 town crier events"
module.exports = exports = Earplugs | 191193 |
Personality = require "../base/Personality"
`/**
* This personality makes it so you see very few town crier events, if at all.
*
* @name <NAME>
* @prerequisite Receive 10 town crier events
* @category Personalities
* @package Player
*/`
class Earplugs extends Personality
constructor: ->
eventModifier: (player, event) -> if event.type is "towncrier" then -500
@canUse = (player) ->
player.statistics["event towncrier"] >= 10
@desc = "Receive 10 town crier events"
module.exports = exports = Earplugs | true |
Personality = require "../base/Personality"
`/**
* This personality makes it so you see very few town crier events, if at all.
*
* @name PI:NAME:<NAME>END_PI
* @prerequisite Receive 10 town crier events
* @category Personalities
* @package Player
*/`
class Earplugs extends Personality
constructor: ->
eventModifier: (player, event) -> if event.type is "towncrier" then -500
@canUse = (player) ->
player.statistics["event towncrier"] >= 10
@desc = "Receive 10 town crier events"
module.exports = exports = Earplugs |
[
{
"context": "st'\nstackTrace = require 'stack-trace'\nAPI_KEY = '67df35116e8250ccc8c4a3e081882b35'\n\nStackTraceCache = new WeakMap\n\nbuildNotificatio",
"end": 154,
"score": 0.9997369647026062,
"start": 122,
"tag": "KEY",
"value": "67df35116e8250ccc8c4a3e081882b35"
}
] | lib/reporter.coffee | particle-iot/particle-dev-exception-reporting | 0 | _ = require 'underscore-plus'
os = require 'os'
request = require 'request'
stackTrace = require 'stack-trace'
API_KEY = '67df35116e8250ccc8c4a3e081882b35'
StackTraceCache = new WeakMap
buildNotificationJSON = (error, params) ->
apiKey: API_KEY
notifier:
name: 'Particle Dev'
version: params.appVersion
url: 'https://particle.io'
events: [{
payloadVersion: "2"
exceptions: [buildExceptionJSON(error, params.projectRoot)]
severity: params.severity
user:
id: params.userId
app:
version: params.appVersion
releaseStage: params.releaseStage
device:
osVersion: params.osVersion
metaData: error.metadata
}]
buildExceptionJSON = (error, projectRoot) ->
errorClass: error.constructor.name
message: error.message
stacktrace: buildStackTraceJSON(error, projectRoot)
buildStackTraceJSON = (error, projectRoot) ->
projectRootRegex = ///^#{_.escapeRegExp(projectRoot)}[\/\\]///i
parseStackTrace(error).map (callSite) ->
file: callSite.getFileName().replace(projectRootRegex, '')
method: callSite.getMethodName() ? callSite.getFunctionName() ? "none"
lineNumber: callSite.getLineNumber()
columnNumber: callSite.getColumnNumber()
inProject: not /node_modules/.test(callSite.getFileName())
getDefaultNotificationParams = ->
userId: atom.config.get('exception-reporting.userId')
appVersion: atom.getVersion()
releaseStage: if atom.isReleasedVersion() then 'production' else 'development'
projectRoot: atom.getLoadSettings().resourcePath
osVersion: "#{os.platform()}-#{os.arch()}-#{os.release()}"
performRequest = (json) ->
options =
method: 'POST'
url: 'https://notify.bugsnag.com'
headers: 'Content-Type': 'application/json'
body: JSON.stringify(json)
request options, -> # Empty callback prevents errors from going to the console
shouldReport = (error) ->
return true if global.alwaysReportToBugsnag # Used to test reports in dev mode
return true if exports.alwaysReport # Used in specs
return false if atom.inDevMode()
if topFrame = parseStackTrace(error)[0]
# only report exceptions that originate from the application bundle
topFrame.getFileName()?.indexOf(atom.getLoadSettings().resourcePath) is 0
else
false
parseStackTrace = (error) ->
if callSites = StackTraceCache.get(error)
callSites
else
callSites = stackTrace.parse(error)
StackTraceCache.set(error, callSites)
callSites
requestPrivateMetadataConsent = (error, message, reportFn) ->
reportWithoutPrivateMetadata = ->
dismissSubscription?.dispose()
delete error.privateMetadata
delete error.privateMetadataDescription
reportFn(error)
notification?.dismiss()
reportWithPrivateMetadata = ->
error.metadata ?= {}
for key, value of error.privateMetadata
error.metadata[key] = value
reportWithoutPrivateMetadata()
if name = error.privateMetadataRequestName
if localStorage.getItem("private-metadata-request:#{name}")
return reportWithoutPrivateMetadata(error)
else
localStorage.setItem("private-metadata-request:#{name}", true)
notification = atom.notifications.addInfo message,
detail: error.privateMetadataDescription
description: "Are you willing to submit this information to a private server for debugging purposes?"
dismissable: true
buttons: [
{
text: "No"
onDidClick: reportWithoutPrivateMetadata
}
{
text: "Yes, Submit For Debugging"
onDidClick: reportWithPrivateMetadata
}
]
dismissSubscription = notification.onDidDismiss(reportWithoutPrivateMetadata)
exports.reportUncaughtException = (error) ->
return unless shouldReport(error)
if error.privateMetadata? and error.privateMetadataDescription?
message = "The Atom team would like to collect the following information to resolve this error:"
requestPrivateMetadataConsent(error, message, exports.reportUncaughtException)
return
params = getDefaultNotificationParams()
params.severity = "error"
json = buildNotificationJSON(error, params)
performRequest(json)
exports.reportFailedAssertion = (error) ->
return unless shouldReport(error)
if error.privateMetadata? and error.privateMetadataDescription?
message = "The Atom team would like to collect some information to resolve an unexpected condition:"
requestPrivateMetadataConsent(error, message, exports.reportFailedAssertion)
return
params = getDefaultNotificationParams()
params.severity = "warning"
json = buildNotificationJSON(error, params)
performRequest(json)
# Used in specs
exports.setRequestFunction = (requestFunction) ->
request = requestFunction
exports.API_KEY = API_KEY
| 141775 | _ = require 'underscore-plus'
os = require 'os'
request = require 'request'
stackTrace = require 'stack-trace'
API_KEY = '<KEY>'
StackTraceCache = new WeakMap
buildNotificationJSON = (error, params) ->
apiKey: API_KEY
notifier:
name: 'Particle Dev'
version: params.appVersion
url: 'https://particle.io'
events: [{
payloadVersion: "2"
exceptions: [buildExceptionJSON(error, params.projectRoot)]
severity: params.severity
user:
id: params.userId
app:
version: params.appVersion
releaseStage: params.releaseStage
device:
osVersion: params.osVersion
metaData: error.metadata
}]
buildExceptionJSON = (error, projectRoot) ->
errorClass: error.constructor.name
message: error.message
stacktrace: buildStackTraceJSON(error, projectRoot)
buildStackTraceJSON = (error, projectRoot) ->
projectRootRegex = ///^#{_.escapeRegExp(projectRoot)}[\/\\]///i
parseStackTrace(error).map (callSite) ->
file: callSite.getFileName().replace(projectRootRegex, '')
method: callSite.getMethodName() ? callSite.getFunctionName() ? "none"
lineNumber: callSite.getLineNumber()
columnNumber: callSite.getColumnNumber()
inProject: not /node_modules/.test(callSite.getFileName())
getDefaultNotificationParams = ->
userId: atom.config.get('exception-reporting.userId')
appVersion: atom.getVersion()
releaseStage: if atom.isReleasedVersion() then 'production' else 'development'
projectRoot: atom.getLoadSettings().resourcePath
osVersion: "#{os.platform()}-#{os.arch()}-#{os.release()}"
performRequest = (json) ->
options =
method: 'POST'
url: 'https://notify.bugsnag.com'
headers: 'Content-Type': 'application/json'
body: JSON.stringify(json)
request options, -> # Empty callback prevents errors from going to the console
shouldReport = (error) ->
return true if global.alwaysReportToBugsnag # Used to test reports in dev mode
return true if exports.alwaysReport # Used in specs
return false if atom.inDevMode()
if topFrame = parseStackTrace(error)[0]
# only report exceptions that originate from the application bundle
topFrame.getFileName()?.indexOf(atom.getLoadSettings().resourcePath) is 0
else
false
parseStackTrace = (error) ->
if callSites = StackTraceCache.get(error)
callSites
else
callSites = stackTrace.parse(error)
StackTraceCache.set(error, callSites)
callSites
requestPrivateMetadataConsent = (error, message, reportFn) ->
reportWithoutPrivateMetadata = ->
dismissSubscription?.dispose()
delete error.privateMetadata
delete error.privateMetadataDescription
reportFn(error)
notification?.dismiss()
reportWithPrivateMetadata = ->
error.metadata ?= {}
for key, value of error.privateMetadata
error.metadata[key] = value
reportWithoutPrivateMetadata()
if name = error.privateMetadataRequestName
if localStorage.getItem("private-metadata-request:#{name}")
return reportWithoutPrivateMetadata(error)
else
localStorage.setItem("private-metadata-request:#{name}", true)
notification = atom.notifications.addInfo message,
detail: error.privateMetadataDescription
description: "Are you willing to submit this information to a private server for debugging purposes?"
dismissable: true
buttons: [
{
text: "No"
onDidClick: reportWithoutPrivateMetadata
}
{
text: "Yes, Submit For Debugging"
onDidClick: reportWithPrivateMetadata
}
]
dismissSubscription = notification.onDidDismiss(reportWithoutPrivateMetadata)
exports.reportUncaughtException = (error) ->
return unless shouldReport(error)
if error.privateMetadata? and error.privateMetadataDescription?
message = "The Atom team would like to collect the following information to resolve this error:"
requestPrivateMetadataConsent(error, message, exports.reportUncaughtException)
return
params = getDefaultNotificationParams()
params.severity = "error"
json = buildNotificationJSON(error, params)
performRequest(json)
exports.reportFailedAssertion = (error) ->
return unless shouldReport(error)
if error.privateMetadata? and error.privateMetadataDescription?
message = "The Atom team would like to collect some information to resolve an unexpected condition:"
requestPrivateMetadataConsent(error, message, exports.reportFailedAssertion)
return
params = getDefaultNotificationParams()
params.severity = "warning"
json = buildNotificationJSON(error, params)
performRequest(json)
# Used in specs
exports.setRequestFunction = (requestFunction) ->
request = requestFunction
exports.API_KEY = API_KEY
| true | _ = require 'underscore-plus'
os = require 'os'
request = require 'request'
stackTrace = require 'stack-trace'
API_KEY = 'PI:KEY:<KEY>END_PI'
StackTraceCache = new WeakMap
buildNotificationJSON = (error, params) ->
apiKey: API_KEY
notifier:
name: 'Particle Dev'
version: params.appVersion
url: 'https://particle.io'
events: [{
payloadVersion: "2"
exceptions: [buildExceptionJSON(error, params.projectRoot)]
severity: params.severity
user:
id: params.userId
app:
version: params.appVersion
releaseStage: params.releaseStage
device:
osVersion: params.osVersion
metaData: error.metadata
}]
buildExceptionJSON = (error, projectRoot) ->
errorClass: error.constructor.name
message: error.message
stacktrace: buildStackTraceJSON(error, projectRoot)
buildStackTraceJSON = (error, projectRoot) ->
projectRootRegex = ///^#{_.escapeRegExp(projectRoot)}[\/\\]///i
parseStackTrace(error).map (callSite) ->
file: callSite.getFileName().replace(projectRootRegex, '')
method: callSite.getMethodName() ? callSite.getFunctionName() ? "none"
lineNumber: callSite.getLineNumber()
columnNumber: callSite.getColumnNumber()
inProject: not /node_modules/.test(callSite.getFileName())
getDefaultNotificationParams = ->
userId: atom.config.get('exception-reporting.userId')
appVersion: atom.getVersion()
releaseStage: if atom.isReleasedVersion() then 'production' else 'development'
projectRoot: atom.getLoadSettings().resourcePath
osVersion: "#{os.platform()}-#{os.arch()}-#{os.release()}"
performRequest = (json) ->
options =
method: 'POST'
url: 'https://notify.bugsnag.com'
headers: 'Content-Type': 'application/json'
body: JSON.stringify(json)
request options, -> # Empty callback prevents errors from going to the console
shouldReport = (error) ->
return true if global.alwaysReportToBugsnag # Used to test reports in dev mode
return true if exports.alwaysReport # Used in specs
return false if atom.inDevMode()
if topFrame = parseStackTrace(error)[0]
# only report exceptions that originate from the application bundle
topFrame.getFileName()?.indexOf(atom.getLoadSettings().resourcePath) is 0
else
false
parseStackTrace = (error) ->
if callSites = StackTraceCache.get(error)
callSites
else
callSites = stackTrace.parse(error)
StackTraceCache.set(error, callSites)
callSites
requestPrivateMetadataConsent = (error, message, reportFn) ->
reportWithoutPrivateMetadata = ->
dismissSubscription?.dispose()
delete error.privateMetadata
delete error.privateMetadataDescription
reportFn(error)
notification?.dismiss()
reportWithPrivateMetadata = ->
error.metadata ?= {}
for key, value of error.privateMetadata
error.metadata[key] = value
reportWithoutPrivateMetadata()
if name = error.privateMetadataRequestName
if localStorage.getItem("private-metadata-request:#{name}")
return reportWithoutPrivateMetadata(error)
else
localStorage.setItem("private-metadata-request:#{name}", true)
notification = atom.notifications.addInfo message,
detail: error.privateMetadataDescription
description: "Are you willing to submit this information to a private server for debugging purposes?"
dismissable: true
buttons: [
{
text: "No"
onDidClick: reportWithoutPrivateMetadata
}
{
text: "Yes, Submit For Debugging"
onDidClick: reportWithPrivateMetadata
}
]
dismissSubscription = notification.onDidDismiss(reportWithoutPrivateMetadata)
exports.reportUncaughtException = (error) ->
return unless shouldReport(error)
if error.privateMetadata? and error.privateMetadataDescription?
message = "The Atom team would like to collect the following information to resolve this error:"
requestPrivateMetadataConsent(error, message, exports.reportUncaughtException)
return
params = getDefaultNotificationParams()
params.severity = "error"
json = buildNotificationJSON(error, params)
performRequest(json)
exports.reportFailedAssertion = (error) ->
return unless shouldReport(error)
if error.privateMetadata? and error.privateMetadataDescription?
message = "The Atom team would like to collect some information to resolve an unexpected condition:"
requestPrivateMetadataConsent(error, message, exports.reportFailedAssertion)
return
params = getDefaultNotificationParams()
params.severity = "warning"
json = buildNotificationJSON(error, params)
performRequest(json)
# Used in specs
exports.setRequestFunction = (requestFunction) ->
request = requestFunction
exports.API_KEY = API_KEY
|
[
{
"context": "ase()\n @user = msg.message.user.name\n @key = \"#{CIRCLE_ALERT_PRE}-#{@repo}-#{@branch}\"\n\n addWatcher: ->\n watchers = @robot.brain.get",
"end": 648,
"score": 0.977962851524353,
"start": 607,
"tag": "KEY",
"value": "\"#{CIRCLE_ALERT_PRE}-#{@repo}-#{@branch}\""
}... | src/circle-ci-notify.coffee | IndieGoGo/hubot-circleci-notify | 3 | # Description:
# Get notifications when your CircleCI builds finish
#
# Commands:
# hubot ci[rcle] alert <repo> <branch> - Receive a DM when CircleCI completes a build for your branch
# hubot ci[rcle] rm alert <repo> <branch> - Remove notifications for CircleCI builds of your branch
# hubot ci[rcle] build alert <build number> - Receive a DM when CircleCI completes a specific build
CIRCLE_ALERT_PRE = 'circleci-alert'
class CircleCIAlert
constructor: (@robot, msg) ->
@repo = msg.match[1].toLowerCase()
@branch = msg.match[2].toLowerCase()
@user = msg.message.user.name
@key = "#{CIRCLE_ALERT_PRE}-#{@repo}-#{@branch}"
addWatcher: ->
watchers = @robot.brain.get(@key) or []
if @user in watchers
@_dm 'You already receive alerts for this branch.'
else
watchers.push @user
@_setWatchers watchers
@_dm "You will receive alerts when builds for #{@repo} - #{@branch} finish."
removeWatcher: ->
watchers = @robot.brain.get(@key) or []
if @user not in watchers
@_dm 'You are not receiving alerts for this branch.'
else
@_setWatchers watchers.filter (u) => u isnt @user
@_dm "You will no longer receive alerts when builds for #{@repo} - #{@branch} finish."
# private
_dm: (msg) -> @robot.send {room: @user}, msg
_setWatchers: (watchers) ->
@robot.brain.set @key, watchers
@robot.brain.save
module.exports = (robot) ->
robot.respond /(?:ci|circle) build alert (\d+)/i, (msg) ->
user = msg.message.user.name
build = msg.match[1]
watchers = robot.brain.get("#{CIRCLE_ALERT_PRE}-#{build}") or []
if user in watchers
robot.send {room: user}, 'You are already watching this build.'
else
watchers.push user
robot.brain.set "#{CIRCLE_ALERT_PRE}-#{build}", watchers
robot.brain.save
robot.send {room: user}, 'You will receive an alert when this build finishes.'
robot.respond /(?:ci|circle) alert ([\w\d\/\.\-]+) ([\w\d\/\.\-]+)/i, (msg) ->
alert = new CircleCIAlert robot, msg
alert.addWatcher()
robot.respond /(?:ci|circle) (?:rm|remove) alert ([\w\d\/\.\-]+) ([\w\d\/\.\-]+)/i, (msg) ->
alert = new CircleCIAlert robot, msg
alert.removeWatcher()
robot.router.post '/hubot/circleci', (req, res) ->
payload = req.body.payload
branch = payload.branch.toLowerCase()
repo = payload.reponame.toLowerCase()
watchers = robot.brain.get("#{CIRCLE_ALERT_PRE}-#{repo}-#{branch}") or []
buildWatchers = robot.brain.get "#{CIRCLE_ALERT_PRE}-#{payload.build_num}"
if buildWatchers?
watchers = watchers.concat buildWatchers
robot.brain.remove "#{CIRCLE_ALERT_PRE}-#{payload.build_num}"
robot.brain.save
return res.send 'No users are watching this branch.' if watchers.length is 0
message = "Build for #{repo} - #{branch}: #{payload.outcome.toUpperCase()}. See more at #{payload.build_url}"
robot.send {room: user}, message for user in watchers
res.send 'Users alerted of build status.'
| 113472 | # Description:
# Get notifications when your CircleCI builds finish
#
# Commands:
# hubot ci[rcle] alert <repo> <branch> - Receive a DM when CircleCI completes a build for your branch
# hubot ci[rcle] rm alert <repo> <branch> - Remove notifications for CircleCI builds of your branch
# hubot ci[rcle] build alert <build number> - Receive a DM when CircleCI completes a specific build
CIRCLE_ALERT_PRE = 'circleci-alert'
class CircleCIAlert
constructor: (@robot, msg) ->
@repo = msg.match[1].toLowerCase()
@branch = msg.match[2].toLowerCase()
@user = msg.message.user.name
@key = <KEY>
addWatcher: ->
watchers = @robot.brain.get(@key) or []
if @user in watchers
@_dm 'You already receive alerts for this branch.'
else
watchers.push @user
@_setWatchers watchers
@_dm "You will receive alerts when builds for #{@repo} - #{@branch} finish."
removeWatcher: ->
watchers = @robot.brain.get(@key) or []
if @user not in watchers
@_dm 'You are not receiving alerts for this branch.'
else
@_setWatchers watchers.filter (u) => u isnt @user
@_dm "You will no longer receive alerts when builds for #{@repo} - #{@branch} finish."
# private
_dm: (msg) -> @robot.send {room: @user}, msg
_setWatchers: (watchers) ->
@robot.brain.set @key, watchers
@robot.brain.save
module.exports = (robot) ->
robot.respond /(?:ci|circle) build alert (\d+)/i, (msg) ->
user = msg.message.user.name
build = msg.match[1]
watchers = robot.brain.get("#{CIRCLE_ALERT_PRE}-#{build}") or []
if user in watchers
robot.send {room: user}, 'You are already watching this build.'
else
watchers.push user
robot.brain.set "#{CIRCLE_ALERT_PRE}-#{build}", watchers
robot.brain.save
robot.send {room: user}, 'You will receive an alert when this build finishes.'
robot.respond /(?:ci|circle) alert ([\w\d\/\.\-]+) ([\w\d\/\.\-]+)/i, (msg) ->
alert = new CircleCIAlert robot, msg
alert.addWatcher()
robot.respond /(?:ci|circle) (?:rm|remove) alert ([\w\d\/\.\-]+) ([\w\d\/\.\-]+)/i, (msg) ->
alert = new CircleCIAlert robot, msg
alert.removeWatcher()
robot.router.post '/hubot/circleci', (req, res) ->
payload = req.body.payload
branch = payload.branch.toLowerCase()
repo = payload.reponame.toLowerCase()
watchers = robot.brain.get("#{CIRCLE_ALERT_PRE}-#{repo}-#{branch}") or []
buildWatchers = robot.brain.get "#{CIRCLE_ALERT_PRE}-#{payload.build_num}"
if buildWatchers?
watchers = watchers.concat buildWatchers
robot.brain.remove "#{CIRCLE_ALERT_PRE}-#{payload.build_num}"
robot.brain.save
return res.send 'No users are watching this branch.' if watchers.length is 0
message = "Build for #{repo} - #{branch}: #{payload.outcome.toUpperCase()}. See more at #{payload.build_url}"
robot.send {room: user}, message for user in watchers
res.send 'Users alerted of build status.'
| true | # Description:
# Get notifications when your CircleCI builds finish
#
# Commands:
# hubot ci[rcle] alert <repo> <branch> - Receive a DM when CircleCI completes a build for your branch
# hubot ci[rcle] rm alert <repo> <branch> - Remove notifications for CircleCI builds of your branch
# hubot ci[rcle] build alert <build number> - Receive a DM when CircleCI completes a specific build
CIRCLE_ALERT_PRE = 'circleci-alert'
class CircleCIAlert
constructor: (@robot, msg) ->
@repo = msg.match[1].toLowerCase()
@branch = msg.match[2].toLowerCase()
@user = msg.message.user.name
@key = PI:KEY:<KEY>END_PI
addWatcher: ->
watchers = @robot.brain.get(@key) or []
if @user in watchers
@_dm 'You already receive alerts for this branch.'
else
watchers.push @user
@_setWatchers watchers
@_dm "You will receive alerts when builds for #{@repo} - #{@branch} finish."
removeWatcher: ->
watchers = @robot.brain.get(@key) or []
if @user not in watchers
@_dm 'You are not receiving alerts for this branch.'
else
@_setWatchers watchers.filter (u) => u isnt @user
@_dm "You will no longer receive alerts when builds for #{@repo} - #{@branch} finish."
# private
_dm: (msg) -> @robot.send {room: @user}, msg
_setWatchers: (watchers) ->
@robot.brain.set @key, watchers
@robot.brain.save
module.exports = (robot) ->
robot.respond /(?:ci|circle) build alert (\d+)/i, (msg) ->
user = msg.message.user.name
build = msg.match[1]
watchers = robot.brain.get("#{CIRCLE_ALERT_PRE}-#{build}") or []
if user in watchers
robot.send {room: user}, 'You are already watching this build.'
else
watchers.push user
robot.brain.set "#{CIRCLE_ALERT_PRE}-#{build}", watchers
robot.brain.save
robot.send {room: user}, 'You will receive an alert when this build finishes.'
robot.respond /(?:ci|circle) alert ([\w\d\/\.\-]+) ([\w\d\/\.\-]+)/i, (msg) ->
alert = new CircleCIAlert robot, msg
alert.addWatcher()
robot.respond /(?:ci|circle) (?:rm|remove) alert ([\w\d\/\.\-]+) ([\w\d\/\.\-]+)/i, (msg) ->
alert = new CircleCIAlert robot, msg
alert.removeWatcher()
robot.router.post '/hubot/circleci', (req, res) ->
payload = req.body.payload
branch = payload.branch.toLowerCase()
repo = payload.reponame.toLowerCase()
watchers = robot.brain.get("#{CIRCLE_ALERT_PRE}-#{repo}-#{branch}") or []
buildWatchers = robot.brain.get "#{CIRCLE_ALERT_PRE}-#{payload.build_num}"
if buildWatchers?
watchers = watchers.concat buildWatchers
robot.brain.remove "#{CIRCLE_ALERT_PRE}-#{payload.build_num}"
robot.brain.save
return res.send 'No users are watching this branch.' if watchers.length is 0
message = "Build for #{repo} - #{branch}: #{payload.outcome.toUpperCase()}. See more at #{payload.build_url}"
robot.send {room: user}, message for user in watchers
res.send 'Users alerted of build status.'
|
[
{
"context": "amespace: @\n\n productJSON =\n id: 1\n name: 'Product One'\n cost: 10\n store_id: 1\n\n MockRequest.",
"end": 1774,
"score": 0.6833209991455078,
"start": 1767,
"tag": "NAME",
"value": "Product"
},
{
"context": "subject_type: 'Store'\n }, {\n id: 2\n ... | tests/batman/storage_adapter/rails_storage_test.coffee | airhorns/batman | 1 | if typeof require isnt 'undefined'
{restStorageTestSuite} = require('./rest_storage_helper')
else
{restStorageTestSuite} = window
MockRequest = restStorageTestSuite.MockRequest
oldRequest = Batman.Request
oldExpectedForUrl = MockRequest.getExpectedForUrl
QUnit.module "Batman.RailsStorage"
setup: ->
MockRequest.getExpectedForUrl = (url) ->
@expects[url.slice(0,-5)] || [] # cut off the .json so the fixtures from the test suite work fine
Batman.Request = MockRequest
MockRequest.reset()
class @Store extends Batman.Model
@encode 'id', 'name'
@storeAdapter = new Batman.RailsStorage(@Store)
@Store.persist @storeAdapter
class @Product extends Batman.Model
@encode 'id', 'name', 'cost'
@productAdapter = new Batman.RailsStorage(@Product)
@Product.persist @productAdapter
@adapter = @productAdapter # for restStorageTestSuite
teardown: ->
Batman.Request = oldRequest
MockRequest.getExpectedForUrl = oldExpectedForUrl
restStorageTestSuite.testOptionsGeneration('.json')
restStorageTestSuite()
asyncTest 'creating in storage: should callback with the record with errors on it if server side validation fails', ->
MockRequest.expect
url: '/products'
method: 'POST'
, error:
status: 422
response: JSON.stringify
name: ["can't be test", "must be valid"]
product = new @Product(name: "test")
@productAdapter.perform 'create', product, {}, (err, record) =>
ok err instanceof Batman.ErrorsSet
ok record
equal record.get('errors').length, 2
QUnit.start()
asyncTest 'hasOne formats the URL to /roots/id/singular', 1, ->
@Store.hasOne 'product', namespace: @
@Product.belongsTo 'store', namespace: @
productJSON =
id: 1
name: 'Product One'
cost: 10
store_id: 1
MockRequest.expect {
url: '/stores/1/product' # .json is cut off in setup
method: 'GET'
}, [productJSON]
MockRequest.expect {
url: '/stores/1'
method: 'GET'
}, [{
id: 1
name: 'Store One'
}]
store = new @Store id: 1
product = store.get('product')
delay ->
deepEqual product.toJSON(), productJSON
asyncTest 'hasMany formats the URL to /roots/id/plural', 1, ->
@Store.hasMany 'products', namespace: @
@Product.belongsTo 'store', namespace: @
productsJSON = [{
id: 1
name: 'Product One'
cost: 10
store_id: 1
}, {
id: 2
name: 'Product Two'
cost: 10
store_id: 1
}]
MockRequest.expect {
url: '/stores/1/products' # .json is cut off in setup
method: 'GET'
}, productsJSON
MockRequest.expect {
url: '/stores/1'
method: 'GET'
}, [
id: 1
name: 'Store One'
]
store = new @Store id: 1
products = store.get('products')
delay ->
deepEqual (products.map (product) -> product.toJSON()), productsJSON
asyncTest 'hasMany formats the URL to /roots/id/plural when polymorphic', 1, ->
@Store.hasMany 'products', {namespace: @, as: 'subject'}
@Product.belongsTo 'subject', {namespace: @, polymorphic: true}
productsJSON = [{
id: 1
name: 'Product One'
cost: 10
subject_id: 1
subject_type: 'Store'
}, {
id: 2
name: 'Product Two'
cost: 10
subject_id: 1
subject_type: 'Store'
}]
MockRequest.expect {
url: '/stores/1/products' # .json is cut off in setup
method: 'GET'
}, productsJSON
MockRequest.expect {
url: '/stores/1'
method: 'GET'
}, [
id: 1
name: 'Store One'
]
store = new @Store id: 1
products = store.get('products')
delay ->
deepEqual (products.map (product) -> product.toJSON()), productsJSON
productJSON =
product:
name: 'test'
id: 10
asyncTest 'updating in storage: should serialize array data without indicies', 1, ->
MockRequest.expect
url: '/products'
method: 'POST'
data: "product%5Bname%5D%5B%5D=a&product%5Bname%5D%5B%5D=b"
, productJSON
MockRequest.expect
url: '/products/10'
method: 'PUT'
data: "product%5Bid%5D=10&product%5Bname%5D=test&product%5Bcost%5D=10"
, product:
name: 'test'
cost: 10
MockRequest.expect
url: '/products/10'
method: 'GET'
, product:
name: 'test'
cost: 10
product = new @Product(name: ["a", "b"])
@adapter.perform 'create', product, {}, (err, createdRecord) =>
throw err if err
product.set('cost', 10)
@adapter.perform 'update', product, {}, (err, updatedProduct) =>
throw err if err
@adapter.perform 'read', product, {}, (err, readProduct) ->
throw err if err
equal readProduct.get('cost', 10), 10
QUnit.start()
| 102239 | if typeof require isnt 'undefined'
{restStorageTestSuite} = require('./rest_storage_helper')
else
{restStorageTestSuite} = window
MockRequest = restStorageTestSuite.MockRequest
oldRequest = Batman.Request
oldExpectedForUrl = MockRequest.getExpectedForUrl
QUnit.module "Batman.RailsStorage"
setup: ->
MockRequest.getExpectedForUrl = (url) ->
@expects[url.slice(0,-5)] || [] # cut off the .json so the fixtures from the test suite work fine
Batman.Request = MockRequest
MockRequest.reset()
class @Store extends Batman.Model
@encode 'id', 'name'
@storeAdapter = new Batman.RailsStorage(@Store)
@Store.persist @storeAdapter
class @Product extends Batman.Model
@encode 'id', 'name', 'cost'
@productAdapter = new Batman.RailsStorage(@Product)
@Product.persist @productAdapter
@adapter = @productAdapter # for restStorageTestSuite
teardown: ->
Batman.Request = oldRequest
MockRequest.getExpectedForUrl = oldExpectedForUrl
restStorageTestSuite.testOptionsGeneration('.json')
restStorageTestSuite()
asyncTest 'creating in storage: should callback with the record with errors on it if server side validation fails', ->
MockRequest.expect
url: '/products'
method: 'POST'
, error:
status: 422
response: JSON.stringify
name: ["can't be test", "must be valid"]
product = new @Product(name: "test")
@productAdapter.perform 'create', product, {}, (err, record) =>
ok err instanceof Batman.ErrorsSet
ok record
equal record.get('errors').length, 2
QUnit.start()
asyncTest 'hasOne formats the URL to /roots/id/singular', 1, ->
@Store.hasOne 'product', namespace: @
@Product.belongsTo 'store', namespace: @
productJSON =
id: 1
name: '<NAME> One'
cost: 10
store_id: 1
MockRequest.expect {
url: '/stores/1/product' # .json is cut off in setup
method: 'GET'
}, [productJSON]
MockRequest.expect {
url: '/stores/1'
method: 'GET'
}, [{
id: 1
name: 'Store One'
}]
store = new @Store id: 1
product = store.get('product')
delay ->
deepEqual product.toJSON(), productJSON
asyncTest 'hasMany formats the URL to /roots/id/plural', 1, ->
@Store.hasMany 'products', namespace: @
@Product.belongsTo 'store', namespace: @
productsJSON = [{
id: 1
name: 'Product One'
cost: 10
store_id: 1
}, {
id: 2
name: 'Product Two'
cost: 10
store_id: 1
}]
MockRequest.expect {
url: '/stores/1/products' # .json is cut off in setup
method: 'GET'
}, productsJSON
MockRequest.expect {
url: '/stores/1'
method: 'GET'
}, [
id: 1
name: 'Store One'
]
store = new @Store id: 1
products = store.get('products')
delay ->
deepEqual (products.map (product) -> product.toJSON()), productsJSON
asyncTest 'hasMany formats the URL to /roots/id/plural when polymorphic', 1, ->
@Store.hasMany 'products', {namespace: @, as: 'subject'}
@Product.belongsTo 'subject', {namespace: @, polymorphic: true}
productsJSON = [{
id: 1
name: 'Product One'
cost: 10
subject_id: 1
subject_type: 'Store'
}, {
id: 2
name: '<NAME>'
cost: 10
subject_id: 1
subject_type: 'Store'
}]
MockRequest.expect {
url: '/stores/1/products' # .json is cut off in setup
method: 'GET'
}, productsJSON
MockRequest.expect {
url: '/stores/1'
method: 'GET'
}, [
id: 1
name: 'Store One'
]
store = new @Store id: 1
products = store.get('products')
delay ->
deepEqual (products.map (product) -> product.toJSON()), productsJSON
productJSON =
product:
name: '<NAME>'
id: 10
asyncTest 'updating in storage: should serialize array data without indicies', 1, ->
MockRequest.expect
url: '/products'
method: 'POST'
data: "product%5Bname%5D%5B%5D=a&product%5Bname%5D%5B%5D=b"
, productJSON
MockRequest.expect
url: '/products/10'
method: 'PUT'
data: "product%5Bid%5D=10&product%5Bname%5D=test&product%5Bcost%5D=10"
, product:
name: 'test'
cost: 10
MockRequest.expect
url: '/products/10'
method: 'GET'
, product:
name: '<NAME>'
cost: 10
product = new @Product(name: ["a", "b"])
@adapter.perform 'create', product, {}, (err, createdRecord) =>
throw err if err
product.set('cost', 10)
@adapter.perform 'update', product, {}, (err, updatedProduct) =>
throw err if err
@adapter.perform 'read', product, {}, (err, readProduct) ->
throw err if err
equal readProduct.get('cost', 10), 10
QUnit.start()
| true | if typeof require isnt 'undefined'
{restStorageTestSuite} = require('./rest_storage_helper')
else
{restStorageTestSuite} = window
MockRequest = restStorageTestSuite.MockRequest
oldRequest = Batman.Request
oldExpectedForUrl = MockRequest.getExpectedForUrl
QUnit.module "Batman.RailsStorage"
setup: ->
MockRequest.getExpectedForUrl = (url) ->
@expects[url.slice(0,-5)] || [] # cut off the .json so the fixtures from the test suite work fine
Batman.Request = MockRequest
MockRequest.reset()
class @Store extends Batman.Model
@encode 'id', 'name'
@storeAdapter = new Batman.RailsStorage(@Store)
@Store.persist @storeAdapter
class @Product extends Batman.Model
@encode 'id', 'name', 'cost'
@productAdapter = new Batman.RailsStorage(@Product)
@Product.persist @productAdapter
@adapter = @productAdapter # for restStorageTestSuite
teardown: ->
Batman.Request = oldRequest
MockRequest.getExpectedForUrl = oldExpectedForUrl
restStorageTestSuite.testOptionsGeneration('.json')
restStorageTestSuite()
asyncTest 'creating in storage: should callback with the record with errors on it if server side validation fails', ->
MockRequest.expect
url: '/products'
method: 'POST'
, error:
status: 422
response: JSON.stringify
name: ["can't be test", "must be valid"]
product = new @Product(name: "test")
@productAdapter.perform 'create', product, {}, (err, record) =>
ok err instanceof Batman.ErrorsSet
ok record
equal record.get('errors').length, 2
QUnit.start()
asyncTest 'hasOne formats the URL to /roots/id/singular', 1, ->
@Store.hasOne 'product', namespace: @
@Product.belongsTo 'store', namespace: @
productJSON =
id: 1
name: 'PI:NAME:<NAME>END_PI One'
cost: 10
store_id: 1
MockRequest.expect {
url: '/stores/1/product' # .json is cut off in setup
method: 'GET'
}, [productJSON]
MockRequest.expect {
url: '/stores/1'
method: 'GET'
}, [{
id: 1
name: 'Store One'
}]
store = new @Store id: 1
product = store.get('product')
delay ->
deepEqual product.toJSON(), productJSON
asyncTest 'hasMany formats the URL to /roots/id/plural', 1, ->
@Store.hasMany 'products', namespace: @
@Product.belongsTo 'store', namespace: @
productsJSON = [{
id: 1
name: 'Product One'
cost: 10
store_id: 1
}, {
id: 2
name: 'Product Two'
cost: 10
store_id: 1
}]
MockRequest.expect {
url: '/stores/1/products' # .json is cut off in setup
method: 'GET'
}, productsJSON
MockRequest.expect {
url: '/stores/1'
method: 'GET'
}, [
id: 1
name: 'Store One'
]
store = new @Store id: 1
products = store.get('products')
delay ->
deepEqual (products.map (product) -> product.toJSON()), productsJSON
asyncTest 'hasMany formats the URL to /roots/id/plural when polymorphic', 1, ->
@Store.hasMany 'products', {namespace: @, as: 'subject'}
@Product.belongsTo 'subject', {namespace: @, polymorphic: true}
productsJSON = [{
id: 1
name: 'Product One'
cost: 10
subject_id: 1
subject_type: 'Store'
}, {
id: 2
name: 'PI:NAME:<NAME>END_PI'
cost: 10
subject_id: 1
subject_type: 'Store'
}]
MockRequest.expect {
url: '/stores/1/products' # .json is cut off in setup
method: 'GET'
}, productsJSON
MockRequest.expect {
url: '/stores/1'
method: 'GET'
}, [
id: 1
name: 'Store One'
]
store = new @Store id: 1
products = store.get('products')
delay ->
deepEqual (products.map (product) -> product.toJSON()), productsJSON
productJSON =
product:
name: 'PI:NAME:<NAME>END_PI'
id: 10
asyncTest 'updating in storage: should serialize array data without indicies', 1, ->
MockRequest.expect
url: '/products'
method: 'POST'
data: "product%5Bname%5D%5B%5D=a&product%5Bname%5D%5B%5D=b"
, productJSON
MockRequest.expect
url: '/products/10'
method: 'PUT'
data: "product%5Bid%5D=10&product%5Bname%5D=test&product%5Bcost%5D=10"
, product:
name: 'test'
cost: 10
MockRequest.expect
url: '/products/10'
method: 'GET'
, product:
name: 'PI:NAME:<NAME>END_PI'
cost: 10
product = new @Product(name: ["a", "b"])
@adapter.perform 'create', product, {}, (err, createdRecord) =>
throw err if err
product.set('cost', 10)
@adapter.perform 'update', product, {}, (err, updatedProduct) =>
throw err if err
@adapter.perform 'read', product, {}, (err, readProduct) ->
throw err if err
equal readProduct.get('cost', 10), 10
QUnit.start()
|
[
{
"context": "ll\n @update()\n \n update: ->\n i18nKey = 'code.'+@word\n translation = @componentTranslations[@w",
"end": 2613,
"score": 0.9771827459335327,
"start": 2607,
"tag": "KEY",
"value": "code.'"
}
] | app/views/play/level/tome/SpellTranslationView.coffee | cihatislamdede/codecombat | 4,858 | CocoView = require 'views/core/CocoView'
LevelComponent = require 'models/LevelComponent'
template = require 'templates/play/level/tome/spell_translation'
ace = require('lib/aceContainer')
Range = ace.require('ace/range').Range
TokenIterator = ace.require('ace/token_iterator').TokenIterator
utils = require 'core/utils'
module.exports = class SpellTranslationView extends CocoView
className: 'spell-translation-view'
template: template
events:
'mousemove': ->
@$el.hide()
constructor: (options) ->
super options
@ace = options.ace
levelComponents = @supermodel.getModels LevelComponent
@componentTranslations = levelComponents.reduce((acc, lc) ->
for doc in (lc.get('propertyDocumentation') ? [])
translated = utils.i18n(doc, 'name', null, false)
acc[doc.name] = translated if translated isnt doc.name
acc
, {})
@onMouseMove = _.throttle @onMouseMove, 25
afterRender: ->
super()
@ace.on 'mousemove', @onMouseMove
setTooltipText: (text) =>
@$el.find('code').text text
@$el.show().css(@pos)
isIdentifier: (t) ->
t and (_.any([/identifier/, /keyword/], (regex) -> regex.test(t.type)) or t.value is 'this')
onMouseMove: (e) =>
return if @destroyed
pos = e.getDocumentPosition()
it = new TokenIterator e.editor.session, pos.row, pos.column
endOfLine = it.getCurrentToken()?.index is it.$rowTokens.length - 1
while it.getCurrentTokenRow() is pos.row and not @isIdentifier(token = it.getCurrentToken())
break if endOfLine or not token # Don't iterate beyond end or beginning of line
it.stepBackward()
unless @isIdentifier(token)
@word = null
@update()
return
try
# Ace was breaking under some (?) conditions, dependent on mouse location.
# with $rowTokens = [] (but should have things)
start = it.getCurrentTokenColumn()
catch error
start = 0
end = start + token.value.length
if @isIdentifier(token)
@word = token.value
@markerRange = new Range pos.row, start, pos.row, end
@reposition(e.domEvent)
@update()
reposition: (e) ->
offsetX = e.offsetX ? e.clientX - $(e.target).offset().left
offsetY = e.offsetY ? e.clientY - $(e.target).offset().top
w = $(document).width() - 20
offsetX = w - $(e.target).offset().left - @$el.width() if e.clientX + @$el.width() > w
@pos = {left: offsetX + 80, top: offsetY - 20}
@$el.css(@pos)
onMouseOut: ->
@word = null
@markerRange = null
@update()
update: ->
i18nKey = 'code.'+@word
translation = @componentTranslations[@word] or $.t(i18nKey)
if @word and translation and translation not in [i18nKey, @word]
@setTooltipText translation
else
@$el.hide()
destroy: ->
@ace?.removeEventListener 'mousemove', @onMouseMove
super()
| 71294 | CocoView = require 'views/core/CocoView'
LevelComponent = require 'models/LevelComponent'
template = require 'templates/play/level/tome/spell_translation'
ace = require('lib/aceContainer')
Range = ace.require('ace/range').Range
TokenIterator = ace.require('ace/token_iterator').TokenIterator
utils = require 'core/utils'
module.exports = class SpellTranslationView extends CocoView
className: 'spell-translation-view'
template: template
events:
'mousemove': ->
@$el.hide()
constructor: (options) ->
super options
@ace = options.ace
levelComponents = @supermodel.getModels LevelComponent
@componentTranslations = levelComponents.reduce((acc, lc) ->
for doc in (lc.get('propertyDocumentation') ? [])
translated = utils.i18n(doc, 'name', null, false)
acc[doc.name] = translated if translated isnt doc.name
acc
, {})
@onMouseMove = _.throttle @onMouseMove, 25
afterRender: ->
super()
@ace.on 'mousemove', @onMouseMove
setTooltipText: (text) =>
@$el.find('code').text text
@$el.show().css(@pos)
isIdentifier: (t) ->
t and (_.any([/identifier/, /keyword/], (regex) -> regex.test(t.type)) or t.value is 'this')
onMouseMove: (e) =>
return if @destroyed
pos = e.getDocumentPosition()
it = new TokenIterator e.editor.session, pos.row, pos.column
endOfLine = it.getCurrentToken()?.index is it.$rowTokens.length - 1
while it.getCurrentTokenRow() is pos.row and not @isIdentifier(token = it.getCurrentToken())
break if endOfLine or not token # Don't iterate beyond end or beginning of line
it.stepBackward()
unless @isIdentifier(token)
@word = null
@update()
return
try
# Ace was breaking under some (?) conditions, dependent on mouse location.
# with $rowTokens = [] (but should have things)
start = it.getCurrentTokenColumn()
catch error
start = 0
end = start + token.value.length
if @isIdentifier(token)
@word = token.value
@markerRange = new Range pos.row, start, pos.row, end
@reposition(e.domEvent)
@update()
reposition: (e) ->
offsetX = e.offsetX ? e.clientX - $(e.target).offset().left
offsetY = e.offsetY ? e.clientY - $(e.target).offset().top
w = $(document).width() - 20
offsetX = w - $(e.target).offset().left - @$el.width() if e.clientX + @$el.width() > w
@pos = {left: offsetX + 80, top: offsetY - 20}
@$el.css(@pos)
onMouseOut: ->
@word = null
@markerRange = null
@update()
update: ->
i18nKey = '<KEY>+@word
translation = @componentTranslations[@word] or $.t(i18nKey)
if @word and translation and translation not in [i18nKey, @word]
@setTooltipText translation
else
@$el.hide()
destroy: ->
@ace?.removeEventListener 'mousemove', @onMouseMove
super()
| true | CocoView = require 'views/core/CocoView'
LevelComponent = require 'models/LevelComponent'
template = require 'templates/play/level/tome/spell_translation'
ace = require('lib/aceContainer')
Range = ace.require('ace/range').Range
TokenIterator = ace.require('ace/token_iterator').TokenIterator
utils = require 'core/utils'
module.exports = class SpellTranslationView extends CocoView
className: 'spell-translation-view'
template: template
events:
'mousemove': ->
@$el.hide()
constructor: (options) ->
super options
@ace = options.ace
levelComponents = @supermodel.getModels LevelComponent
@componentTranslations = levelComponents.reduce((acc, lc) ->
for doc in (lc.get('propertyDocumentation') ? [])
translated = utils.i18n(doc, 'name', null, false)
acc[doc.name] = translated if translated isnt doc.name
acc
, {})
@onMouseMove = _.throttle @onMouseMove, 25
afterRender: ->
super()
@ace.on 'mousemove', @onMouseMove
setTooltipText: (text) =>
@$el.find('code').text text
@$el.show().css(@pos)
isIdentifier: (t) ->
t and (_.any([/identifier/, /keyword/], (regex) -> regex.test(t.type)) or t.value is 'this')
onMouseMove: (e) =>
return if @destroyed
pos = e.getDocumentPosition()
it = new TokenIterator e.editor.session, pos.row, pos.column
endOfLine = it.getCurrentToken()?.index is it.$rowTokens.length - 1
while it.getCurrentTokenRow() is pos.row and not @isIdentifier(token = it.getCurrentToken())
break if endOfLine or not token # Don't iterate beyond end or beginning of line
it.stepBackward()
unless @isIdentifier(token)
@word = null
@update()
return
try
# Ace was breaking under some (?) conditions, dependent on mouse location.
# with $rowTokens = [] (but should have things)
start = it.getCurrentTokenColumn()
catch error
start = 0
end = start + token.value.length
if @isIdentifier(token)
@word = token.value
@markerRange = new Range pos.row, start, pos.row, end
@reposition(e.domEvent)
@update()
reposition: (e) ->
offsetX = e.offsetX ? e.clientX - $(e.target).offset().left
offsetY = e.offsetY ? e.clientY - $(e.target).offset().top
w = $(document).width() - 20
offsetX = w - $(e.target).offset().left - @$el.width() if e.clientX + @$el.width() > w
@pos = {left: offsetX + 80, top: offsetY - 20}
@$el.css(@pos)
onMouseOut: ->
@word = null
@markerRange = null
@update()
update: ->
i18nKey = 'PI:KEY:<KEY>END_PI+@word
translation = @componentTranslations[@word] or $.t(i18nKey)
if @word and translation and translation not in [i18nKey, @word]
@setTooltipText translation
else
@$el.hide()
destroy: ->
@ace?.removeEventListener 'mousemove', @onMouseMove
super()
|
[
{
"context": "# Copyright (c) ppy Pty Ltd <contact@ppy.sh>. Licensed under the GNU Affero General Public Li",
"end": 43,
"score": 0.9999140501022339,
"start": 29,
"tag": "EMAIL",
"value": "contact@ppy.sh"
}
] | resources/assets/coffee/react/beatmap-discussions/message-length-counter.coffee | osu-katakuna/osu-katakuna-web | 5 | # Copyright (c) ppy Pty Ltd <contact@ppy.sh>. Licensed under the GNU Affero General Public License v3.0.
# See the LICENCE file in the repository root for full licence text.
import * as React from 'react'
import { div } from 'react-dom-factories'
bn = 'beatmap-discussion-message-length-counter'
export MessageLengthCounter = ({message, isTimeline}) ->
return null if !isTimeline
maxLength = BeatmapDiscussionHelper.MAX_LENGTH_TIMELINE
counterClass = bn
if message.length > maxLength
counterClass += " #{bn}--over"
else if message.length > (maxLength * 0.95)
counterClass += " #{bn}--almost-over"
div
className: counterClass
"#{message.length} / #{maxLength}"
| 80514 | # Copyright (c) ppy Pty Ltd <<EMAIL>>. Licensed under the GNU Affero General Public License v3.0.
# See the LICENCE file in the repository root for full licence text.
import * as React from 'react'
import { div } from 'react-dom-factories'
bn = 'beatmap-discussion-message-length-counter'
export MessageLengthCounter = ({message, isTimeline}) ->
return null if !isTimeline
maxLength = BeatmapDiscussionHelper.MAX_LENGTH_TIMELINE
counterClass = bn
if message.length > maxLength
counterClass += " #{bn}--over"
else if message.length > (maxLength * 0.95)
counterClass += " #{bn}--almost-over"
div
className: counterClass
"#{message.length} / #{maxLength}"
| true | # Copyright (c) ppy Pty Ltd <PI:EMAIL:<EMAIL>END_PI>. Licensed under the GNU Affero General Public License v3.0.
# See the LICENCE file in the repository root for full licence text.
import * as React from 'react'
import { div } from 'react-dom-factories'
bn = 'beatmap-discussion-message-length-counter'
export MessageLengthCounter = ({message, isTimeline}) ->
return null if !isTimeline
maxLength = BeatmapDiscussionHelper.MAX_LENGTH_TIMELINE
counterClass = bn
if message.length > maxLength
counterClass += " #{bn}--over"
else if message.length > (maxLength * 0.95)
counterClass += " #{bn}--almost-over"
div
className: counterClass
"#{message.length} / #{maxLength}"
|
[
{
"context": "all products owned by the specified user.\r\n@author Nathan Klick\r\n@copyright QRef 2012\r\n###\r\nclass UserProductsRou",
"end": 679,
"score": 0.9998611807823181,
"start": 667,
"tag": "NAME",
"value": "Nathan Klick"
}
] | Workspace/QRef/NodeServer/src/router/routes/ajax/user/UserProductsRoute.coffee | qrefdev/qref | 0 | AjaxRoute = require('../../../AjaxRoute')
AjaxResponse = require('../../../../serialization/AjaxResponse')
UserAuth = require('../../../../security/UserAuth')
QRefDatabase = require('../../../../db/QRefDatabase')
###
Service route that allows the retrieval of all products owned by a specific user.
@example Service Methods
Request Format: application/json
Response Format: application/json
GET /services/ajax/user/:userId/products?token=:token
:userId - (Required) The user for which to get a list of owned products.
:token - (Required) A valid authentication token.
Retrieves all products owned by the specified user.
@author Nathan Klick
@copyright QRef 2012
###
class UserProductsRoute extends AjaxRoute
constructor: () ->
super [{ method: 'POST', path: ':userId/products' }, { method: 'GET', path: ':userId/products' }]
get: (req, res) =>
if not @.isValidRequest(req)
resp = new AjaxResponse()
resp.failure('Bad Request', 400)
res.json(resp, 200)
return
db = QRefDatabase.instance()
token = req.param('token')
userId = req.params.userId
UserAuth.validateToken(token, (err, isTokenValid) ->
if err? or not isTokenValid == true
resp = new AjaxResponse()
resp.failure('Not Authorized', 403)
res.json(resp, 200)
return
db.User.findById(userId, (err, usr) ->
if err?
resp = new AjaxResponse()
resp.failure(err, 500)
res.json(resp, 200)
return
if not usr?
resp = new AjaxResponse()
resp.failure('Not Found', 404)
res.json(resp, 200)
return
query = db.UserProduct.find()
query = query.where('user').equals(userId).populate('product')
if req.query?.pageSize? and req.query?.page?
query = query.skip(req.query.page * req.query.pageSize).limit(req.query.pageSize)
else if req.query?.pageSize? and not req.query?.page?
query = query.limit(req.query.pageSize)
else if not req.query?.pageSize? and req.query?.page?
query = query.skip(req.query.page * 25).limit(25)
query.exec((err, arrObjs) ->
if err?
resp = new AjaxResponse()
resp.failure('Internal Error', 500)
res.json(resp, 200)
return
db.UserProduct.where('user')
.equals(userId)
.count((err, count) ->
if err?
resp = new AjaxResponse()
resp.failure('Internal Error', 500)
res.json(resp, 200)
return
resp = new AjaxResponse()
arrProducts = []
for uProd in arrObjs
arrProducts.push(uProd.product.toObject())
mgr = new ProductManager()
mgr.expandAll(arrProducts, (err, eArrProducts) ->
if err?
resp.failure(err, 500)
res.json(resp, 200)
return
resp.addRecords(eArrProducts)
resp.setTotal(count)
res.json(resp, 200)
)
)
)
)
)
post: (req, res) =>
resp = new AjaxResponse()
resp.failure('Forbidden', 403)
res.json(resp, 200)
return
isValidRequest: (req) ->
if (req.query? and req.query?.token? and req.params?.userId?)
true
else
false
module.exports = new UserProductsRoute() | 121159 | AjaxRoute = require('../../../AjaxRoute')
AjaxResponse = require('../../../../serialization/AjaxResponse')
UserAuth = require('../../../../security/UserAuth')
QRefDatabase = require('../../../../db/QRefDatabase')
###
Service route that allows the retrieval of all products owned by a specific user.
@example Service Methods
Request Format: application/json
Response Format: application/json
GET /services/ajax/user/:userId/products?token=:token
:userId - (Required) The user for which to get a list of owned products.
:token - (Required) A valid authentication token.
Retrieves all products owned by the specified user.
@author <NAME>
@copyright QRef 2012
###
class UserProductsRoute extends AjaxRoute
constructor: () ->
super [{ method: 'POST', path: ':userId/products' }, { method: 'GET', path: ':userId/products' }]
get: (req, res) =>
if not @.isValidRequest(req)
resp = new AjaxResponse()
resp.failure('Bad Request', 400)
res.json(resp, 200)
return
db = QRefDatabase.instance()
token = req.param('token')
userId = req.params.userId
UserAuth.validateToken(token, (err, isTokenValid) ->
if err? or not isTokenValid == true
resp = new AjaxResponse()
resp.failure('Not Authorized', 403)
res.json(resp, 200)
return
db.User.findById(userId, (err, usr) ->
if err?
resp = new AjaxResponse()
resp.failure(err, 500)
res.json(resp, 200)
return
if not usr?
resp = new AjaxResponse()
resp.failure('Not Found', 404)
res.json(resp, 200)
return
query = db.UserProduct.find()
query = query.where('user').equals(userId).populate('product')
if req.query?.pageSize? and req.query?.page?
query = query.skip(req.query.page * req.query.pageSize).limit(req.query.pageSize)
else if req.query?.pageSize? and not req.query?.page?
query = query.limit(req.query.pageSize)
else if not req.query?.pageSize? and req.query?.page?
query = query.skip(req.query.page * 25).limit(25)
query.exec((err, arrObjs) ->
if err?
resp = new AjaxResponse()
resp.failure('Internal Error', 500)
res.json(resp, 200)
return
db.UserProduct.where('user')
.equals(userId)
.count((err, count) ->
if err?
resp = new AjaxResponse()
resp.failure('Internal Error', 500)
res.json(resp, 200)
return
resp = new AjaxResponse()
arrProducts = []
for uProd in arrObjs
arrProducts.push(uProd.product.toObject())
mgr = new ProductManager()
mgr.expandAll(arrProducts, (err, eArrProducts) ->
if err?
resp.failure(err, 500)
res.json(resp, 200)
return
resp.addRecords(eArrProducts)
resp.setTotal(count)
res.json(resp, 200)
)
)
)
)
)
post: (req, res) =>
resp = new AjaxResponse()
resp.failure('Forbidden', 403)
res.json(resp, 200)
return
isValidRequest: (req) ->
if (req.query? and req.query?.token? and req.params?.userId?)
true
else
false
module.exports = new UserProductsRoute() | true | AjaxRoute = require('../../../AjaxRoute')
AjaxResponse = require('../../../../serialization/AjaxResponse')
UserAuth = require('../../../../security/UserAuth')
QRefDatabase = require('../../../../db/QRefDatabase')
###
Service route that allows the retrieval of all products owned by a specific user.
@example Service Methods
Request Format: application/json
Response Format: application/json
GET /services/ajax/user/:userId/products?token=:token
:userId - (Required) The user for which to get a list of owned products.
:token - (Required) A valid authentication token.
Retrieves all products owned by the specified user.
@author PI:NAME:<NAME>END_PI
@copyright QRef 2012
###
class UserProductsRoute extends AjaxRoute
constructor: () ->
super [{ method: 'POST', path: ':userId/products' }, { method: 'GET', path: ':userId/products' }]
get: (req, res) =>
if not @.isValidRequest(req)
resp = new AjaxResponse()
resp.failure('Bad Request', 400)
res.json(resp, 200)
return
db = QRefDatabase.instance()
token = req.param('token')
userId = req.params.userId
UserAuth.validateToken(token, (err, isTokenValid) ->
if err? or not isTokenValid == true
resp = new AjaxResponse()
resp.failure('Not Authorized', 403)
res.json(resp, 200)
return
db.User.findById(userId, (err, usr) ->
if err?
resp = new AjaxResponse()
resp.failure(err, 500)
res.json(resp, 200)
return
if not usr?
resp = new AjaxResponse()
resp.failure('Not Found', 404)
res.json(resp, 200)
return
query = db.UserProduct.find()
query = query.where('user').equals(userId).populate('product')
if req.query?.pageSize? and req.query?.page?
query = query.skip(req.query.page * req.query.pageSize).limit(req.query.pageSize)
else if req.query?.pageSize? and not req.query?.page?
query = query.limit(req.query.pageSize)
else if not req.query?.pageSize? and req.query?.page?
query = query.skip(req.query.page * 25).limit(25)
query.exec((err, arrObjs) ->
if err?
resp = new AjaxResponse()
resp.failure('Internal Error', 500)
res.json(resp, 200)
return
db.UserProduct.where('user')
.equals(userId)
.count((err, count) ->
if err?
resp = new AjaxResponse()
resp.failure('Internal Error', 500)
res.json(resp, 200)
return
resp = new AjaxResponse()
arrProducts = []
for uProd in arrObjs
arrProducts.push(uProd.product.toObject())
mgr = new ProductManager()
mgr.expandAll(arrProducts, (err, eArrProducts) ->
if err?
resp.failure(err, 500)
res.json(resp, 200)
return
resp.addRecords(eArrProducts)
resp.setTotal(count)
res.json(resp, 200)
)
)
)
)
)
post: (req, res) =>
resp = new AjaxResponse()
resp.failure('Forbidden', 403)
res.json(resp, 200)
return
isValidRequest: (req) ->
if (req.query? and req.query?.token? and req.params?.userId?)
true
else
false
module.exports = new UserProductsRoute() |
[
{
"context": "ats.coffee\n# lotto-ionic\n# v0.0.2\n# Copyright 2016 Andreja Tonevski, https://github.com/atonevski/lotto-ionic\n# For l",
"end": 73,
"score": 0.9998828768730164,
"start": 57,
"tag": "NAME",
"value": "Andreja Tonevski"
},
{
"context": "pyright 2016 Andreja Tonevski, ht... | www/coffee/stats.coffee | atonevski/lotto-ionic | 0 | #
# stats.coffee
# lotto-ionic
# v0.0.2
# Copyright 2016 Andreja Tonevski, https://github.com/atonevski/lotto-ionic
# For license information see LICENSE in the repository
#
angular.module 'app.stats', []
.controller 'LottoStats', ($scope, $http, $ionicLoading) ->
$scope.hideChart = true
$scope.sbarChart = { }
$scope.sbarChart.title = 'Bar chart title'
$scope.sbarChart.width = $scope.width
$scope.sbarChart.height = $scope.height
# A: draw #, B: date, P..W: winning column lotto, X: winning column joker
query = """SELECT A, B, P, Q, R, S, T, U, V, W
ORDER BY B"""
$ionicLoading.show()
$http.get $scope.qurl(query)
.success (data, status) ->
res = $scope.to_json data
$scope.winColumns = res.table.rows.map (r) ->
a = $scope.eval_row r
{
draw: a[0]
date: a[1]
lotto: [ a[2], a[3], a[4], a[5], a[6], a[7], a[8], a[9] ]
}
$buildLottoFreqs()
$ionicLoading.hide()
.error (err) ->
$ionicLoading.show({
template: "Не може да се вчитаат добитните комбинации. Пробај подоцна."
duration: 3000
})
# change this method to have a parameter 'all', 'stresa', 'venus'
# and to produce different tables
$buildLottoFreqs = () ->
arr = [0..34].map (e) -> ([0..7].map () -> 0)
for row in $scope.winColumns
for i, n of row.lotto
arr[n][i]++
for i, a of arr
arr[i].push a.reduce (t, e) -> t + e
$scope.freqs = arr[1..-1].map (a, i) ->
{
number: (i + 1)
'1ви': a[0]
'2ри': a[1]
'3ти': a[2]
'4ти': a[3]
'5ти': a[4]
'6ти': a[5]
'7ми': a[6]
'доп.': a[7]
total: a[8]
}
$scope.sbarChart.data = $scope.freqs
$scope.sbarChart.labels = 'number'
$scope.sbarChart.labVals = [1..34].filter (v) -> v%2 isnt 0
$scope.sbarChart.categories = [
'1ви', '2ри', '3ти', '4ти', '5ти', '6ти', '7ми', 'доп.'
]
.controller 'JokerStats', ($scope, $http, $ionicLoading) ->
$scope.hideChart = true
$scope.sbarChart = { }
$scope.sbarChart.title = 'Bar chart title'
$scope.sbarChart.width = $scope.width
$scope.sbarChart.height = $scope.height
# A: draw #, B: date, P..W: winning column lotto, X: winning column joker
query = """SELECT A, B, X
ORDER BY B"""
$ionicLoading.show()
$http.get $scope.qurl(query)
.success (data, status) ->
res = $scope.to_json data
$scope.winColumns = res.table.rows.map (r) ->
a = $scope.eval_row r
{
draw: a[0]
date: a[1]
joker: a[2].split ''
}
$buildJokerFreqs()
$ionicLoading.hide()
.error (err) ->
$ionicLoading.show({
template: "Не може да се вчитаат добитните комбинации. Пробај подоцна."
duration: 3000
})
# change this method to have a parameter 'all', 'stresa', 'venus'
# and to produce different tables
$buildJokerFreqs = () ->
arr = [0..9].map (e) -> ([0..5].map () -> 0)
for row in $scope.winColumns
for i, n of row.joker
arr[n][i]++
for i, a of arr
arr[i].push a.reduce (t, e) -> t + e
$scope.freqs = arr.map (a, i) ->
{
number: i
'1ви': a[0]
'2ри': a[1]
'3ти': a[2]
'4ти': a[3]
'5ти': a[4]
'6ти': a[5]
total: a[6]
}
$scope.sbarChart.data = $scope.freqs
$scope.sbarChart.labels = 'number'
# $scope.sbarChart.labVals = [1..34].filter (v) -> v%2 isnt 0
$scope.sbarChart.categories = [
'1ви', '2ри', '3ти', '4ти', '5ти', '6ти' ]
.controller 'WinnersStats', ($scope, $http, $ionicLoading, $ionicPosition, $ionicScrollDelegate) ->
$scope.bubbleVisible = no
$scope.bubble = d3.select '#stats-list'
.append 'div'
.attr 'class', 'bubble bubble-left'
.attr 'id', 'winners-bubble'
.on('click', ()->
$scope.bubble.transition().duration 1000
.style 'opacity', 0
$scope.bubbleVisible = no
)
.style 'opacity', 0
$scope.showBubble = (event, idx) ->
return if $scope.bubbleVisible
event.stopPropagation()
$scope.bubbleVisible = yes
t = """
<div class='row row-no-padding'>
<div class='col col-offset-80 text-right positive'>
<small><i class='ion-close-round'></i></small>
</div>
</div>
<dl class='dl-horizontal'>
<dt>Година:</dt>
<dd>#{ $scope.winners[idx].year }</dd>
<dt>Вкупно кола:</dt>
<dd>#{ $scope.winners[idx].draws }</dd>
<hr />
<dt>Лото:</dt><dd></dd>
<hr />
<dt>Најмала уплата:</dt>
<dd>#{ $scope.thou_sep $scope.winners[idx].min }</dd>
<dt>Просечна уплата:</dt>
<dd>#{ $scope.thou_sep $scope.winners[idx].avg }</dd>
<dt>Најголема уплата:</dt>
<dd>#{ $scope.thou_sep $scope.winners[idx].max }</dd>
<hr />
<dt>Џокер:</dt><dd></dd>
<hr />
<dt>Најмала уплата:</dt>
<dd>#{ $scope.thou_sep $scope.winners[idx].jmin }</dd>
<dt>Просечна уплата:</dt>
<dd>#{ $scope.thou_sep $scope.winners[idx].javg }</dd>
<dt>Најголема уплата:</dt>
<dd>#{ $scope.thou_sep $scope.winners[idx].jmax }</dd>
</dl>
"""
el = angular.element document.querySelector "\#winners-#{$scope.winners[idx].year}"
offset = $ionicPosition.offset el
new_top = offset.top + $ionicScrollDelegate.getScrollPosition().top
$scope.bubble.html t
.style 'left', (event.pageX + 10) + 'px'
.style 'top', (new_top - 60) + 'px'
.style 'opacity', 1
# load data
$ionicLoading.show()
# count x7
qx7 = """
SELECT
YEAR(B), COUNT(D)
WHERE D > 0
GROUP BY YEAR(B)
ORDER BY YEAR(B)
"""
$http.get $scope.qurl(qx7)
.success (data, status) ->
res = $scope.to_json data
$scope.winX7 = { }
res.table.rows.forEach (r) ->
a = $scope.eval_row r
$scope.winX7[a[0]] = a[1]
.error (err) ->
$ionicLoading.show({
template: "Не може да се вчитаат добитници x7. Пробај подоцна."
duration: 3000
})
# count x6p
qx6p= """
SELECT
YEAR(B), COUNT(E)
WHERE E > 0
GROUP BY YEAR(B)
ORDER BY YEAR(B)
"""
$http.get $scope.qurl(qx6p)
.success (data, status) ->
res = $scope.to_json data
$scope.winX6p = { }
res.table.rows.forEach (r) ->
a = $scope.eval_row r
$scope.winX6p[a[0]] = a[1]
.error (err) ->
$ionicLoading.show({
template: "Не може да се вчитаат добитници x6+1. Пробај подоцна."
duration: 3000
})
# count x6
qx6 = """
SELECT
YEAR(B), COUNT(F)
WHERE F > 0
GROUP BY YEAR(B)
ORDER BY YEAR(B)
"""
$http.get $scope.qurl(qx6)
.success (data, status) ->
res = $scope.to_json data
$scope.winX6 = { }
res.table.rows.forEach (r) ->
a = $scope.eval_row r
$scope.winX6[a[0]] = a[1]
.error (err) ->
$ionicLoading.show({
template: "Не може да се вчитаат добитници x6. Пробај подоцна."
duration: 3000
})
# A: draw #, B: date, P..W: winning column lotto, X: winning column joker
query = """
SELECT
YEAR(B), COUNT(A), MIN(C), MAX(C), AVG(C),
SUM(D), SUM(E), AVG(F), AVG(G), AVG(H),
MIN(I), MAX(I), AVG(I)
GROUP BY YEAR(B)
ORDER BY YEAR(B)
"""
$http.get $scope.qurl(query)
.success (data, status) ->
res = $scope.to_json data
$scope.winners = res.table.rows.map (r) ->
a = $scope.eval_row r
{
year: a[0]
draws: a[1]
min: a[2]
max: a[3]
avg: Math.round a[4]
jmin: a[10]
jmax: a[11]
javg: Math.round a[12]
x7: a[5]
'x6+1': a[6]
x6: Math.round a[7]
x5: Math.round a[8]
x4: Math.round a[9]
}
$ionicLoading.hide()
.error (err) ->
$ionicLoading.show({
template: "Не може да се вчита статистика на добитници. Пробај подоцна."
duration: 3000
})
| 136995 | #
# stats.coffee
# lotto-ionic
# v0.0.2
# Copyright 2016 <NAME>, https://github.com/atonevski/lotto-ionic
# For license information see LICENSE in the repository
#
angular.module 'app.stats', []
.controller 'LottoStats', ($scope, $http, $ionicLoading) ->
$scope.hideChart = true
$scope.sbarChart = { }
$scope.sbarChart.title = 'Bar chart title'
$scope.sbarChart.width = $scope.width
$scope.sbarChart.height = $scope.height
# A: draw #, B: date, P..W: winning column lotto, X: winning column joker
query = """SELECT A, B, P, Q, R, S, T, U, V, W
ORDER BY B"""
$ionicLoading.show()
$http.get $scope.qurl(query)
.success (data, status) ->
res = $scope.to_json data
$scope.winColumns = res.table.rows.map (r) ->
a = $scope.eval_row r
{
draw: a[0]
date: a[1]
lotto: [ a[2], a[3], a[4], a[5], a[6], a[7], a[8], a[9] ]
}
$buildLottoFreqs()
$ionicLoading.hide()
.error (err) ->
$ionicLoading.show({
template: "Не може да се вчитаат добитните комбинации. Пробај подоцна."
duration: 3000
})
# change this method to have a parameter 'all', 'stresa', 'venus'
# and to produce different tables
$buildLottoFreqs = () ->
arr = [0..34].map (e) -> ([0..7].map () -> 0)
for row in $scope.winColumns
for i, n of row.lotto
arr[n][i]++
for i, a of arr
arr[i].push a.reduce (t, e) -> t + e
$scope.freqs = arr[1..-1].map (a, i) ->
{
number: (i + 1)
'1ви': a[0]
'2ри': a[1]
'3ти': a[2]
'4ти': a[3]
'5ти': a[4]
'6ти': a[5]
'7ми': a[6]
'доп.': a[7]
total: a[8]
}
$scope.sbarChart.data = $scope.freqs
$scope.sbarChart.labels = 'number'
$scope.sbarChart.labVals = [1..34].filter (v) -> v%2 isnt 0
$scope.sbarChart.categories = [
'1ви', '2ри', '3ти', '4ти', '5ти', '6ти', '7ми', 'доп.'
]
.controller 'JokerStats', ($scope, $http, $ionicLoading) ->
$scope.hideChart = true
$scope.sbarChart = { }
$scope.sbarChart.title = 'Bar chart title'
$scope.sbarChart.width = $scope.width
$scope.sbarChart.height = $scope.height
# A: draw #, B: date, P..W: winning column lotto, X: winning column joker
query = """SELECT A, B, X
ORDER BY B"""
$ionicLoading.show()
$http.get $scope.qurl(query)
.success (data, status) ->
res = $scope.to_json data
$scope.winColumns = res.table.rows.map (r) ->
a = $scope.eval_row r
{
draw: a[0]
date: a[1]
joker: a[2].split ''
}
$buildJokerFreqs()
$ionicLoading.hide()
.error (err) ->
$ionicLoading.show({
template: "Не може да се вчитаат добитните комбинации. Пробај подоцна."
duration: 3000
})
# change this method to have a parameter 'all', 'stresa', 'venus'
# and to produce different tables
$buildJokerFreqs = () ->
arr = [0..9].map (e) -> ([0..5].map () -> 0)
for row in $scope.winColumns
for i, n of row.joker
arr[n][i]++
for i, a of arr
arr[i].push a.reduce (t, e) -> t + e
$scope.freqs = arr.map (a, i) ->
{
number: i
'1ви': a[0]
'2ри': a[1]
'3ти': a[2]
'4ти': a[3]
'5ти': a[4]
'6ти': a[5]
total: a[6]
}
$scope.sbarChart.data = $scope.freqs
$scope.sbarChart.labels = 'number'
# $scope.sbarChart.labVals = [1..34].filter (v) -> v%2 isnt 0
$scope.sbarChart.categories = [
'1ви', '2ри', '3ти', '4ти', '5ти', '6ти' ]
.controller 'WinnersStats', ($scope, $http, $ionicLoading, $ionicPosition, $ionicScrollDelegate) ->
$scope.bubbleVisible = no
$scope.bubble = d3.select '#stats-list'
.append 'div'
.attr 'class', 'bubble bubble-left'
.attr 'id', 'winners-bubble'
.on('click', ()->
$scope.bubble.transition().duration 1000
.style 'opacity', 0
$scope.bubbleVisible = no
)
.style 'opacity', 0
$scope.showBubble = (event, idx) ->
return if $scope.bubbleVisible
event.stopPropagation()
$scope.bubbleVisible = yes
t = """
<div class='row row-no-padding'>
<div class='col col-offset-80 text-right positive'>
<small><i class='ion-close-round'></i></small>
</div>
</div>
<dl class='dl-horizontal'>
<dt>Година:</dt>
<dd>#{ $scope.winners[idx].year }</dd>
<dt>Вкупно кола:</dt>
<dd>#{ $scope.winners[idx].draws }</dd>
<hr />
<dt>Лото:</dt><dd></dd>
<hr />
<dt>Најмала уплата:</dt>
<dd>#{ $scope.thou_sep $scope.winners[idx].min }</dd>
<dt>Просечна уплата:</dt>
<dd>#{ $scope.thou_sep $scope.winners[idx].avg }</dd>
<dt>Најголема уплата:</dt>
<dd>#{ $scope.thou_sep $scope.winners[idx].max }</dd>
<hr />
<dt>Џокер:</dt><dd></dd>
<hr />
<dt>Најмала уплата:</dt>
<dd>#{ $scope.thou_sep $scope.winners[idx].jmin }</dd>
<dt>Просечна уплата:</dt>
<dd>#{ $scope.thou_sep $scope.winners[idx].javg }</dd>
<dt>Најголема уплата:</dt>
<dd>#{ $scope.thou_sep $scope.winners[idx].jmax }</dd>
</dl>
"""
el = angular.element document.querySelector "\#winners-#{$scope.winners[idx].year}"
offset = $ionicPosition.offset el
new_top = offset.top + $ionicScrollDelegate.getScrollPosition().top
$scope.bubble.html t
.style 'left', (event.pageX + 10) + 'px'
.style 'top', (new_top - 60) + 'px'
.style 'opacity', 1
# load data
$ionicLoading.show()
# count x7
qx7 = """
SELECT
YEAR(B), COUNT(D)
WHERE D > 0
GROUP BY YEAR(B)
ORDER BY YEAR(B)
"""
$http.get $scope.qurl(qx7)
.success (data, status) ->
res = $scope.to_json data
$scope.winX7 = { }
res.table.rows.forEach (r) ->
a = $scope.eval_row r
$scope.winX7[a[0]] = a[1]
.error (err) ->
$ionicLoading.show({
template: "Не може да се вчитаат добитници x7. Пробај подоцна."
duration: 3000
})
# count x6p
qx6p= """
SELECT
YEAR(B), COUNT(E)
WHERE E > 0
GROUP BY YEAR(B)
ORDER BY YEAR(B)
"""
$http.get $scope.qurl(qx6p)
.success (data, status) ->
res = $scope.to_json data
$scope.winX6p = { }
res.table.rows.forEach (r) ->
a = $scope.eval_row r
$scope.winX6p[a[0]] = a[1]
.error (err) ->
$ionicLoading.show({
template: "Не може да се вчитаат добитници x6+1. Пробај подоцна."
duration: 3000
})
# count x6
qx6 = """
SELECT
YEAR(B), COUNT(F)
WHERE F > 0
GROUP BY YEAR(B)
ORDER BY YEAR(B)
"""
$http.get $scope.qurl(qx6)
.success (data, status) ->
res = $scope.to_json data
$scope.winX6 = { }
res.table.rows.forEach (r) ->
a = $scope.eval_row r
$scope.winX6[a[0]] = a[1]
.error (err) ->
$ionicLoading.show({
template: "Не може да се вчитаат добитници x6. Пробај подоцна."
duration: 3000
})
# A: draw #, B: date, P..W: winning column lotto, X: winning column joker
query = """
SELECT
YEAR(B), COUNT(A), MIN(C), MAX(C), AVG(C),
SUM(D), SUM(E), AVG(F), AVG(G), AVG(H),
MIN(I), MAX(I), AVG(I)
GROUP BY YEAR(B)
ORDER BY YEAR(B)
"""
$http.get $scope.qurl(query)
.success (data, status) ->
res = $scope.to_json data
$scope.winners = res.table.rows.map (r) ->
a = $scope.eval_row r
{
year: a[0]
draws: a[1]
min: a[2]
max: a[3]
avg: Math.round a[4]
jmin: a[10]
jmax: a[11]
javg: Math.round a[12]
x7: a[5]
'x6+1': a[6]
x6: Math.round a[7]
x5: Math.round a[8]
x4: Math.round a[9]
}
$ionicLoading.hide()
.error (err) ->
$ionicLoading.show({
template: "Не може да се вчита статистика на добитници. Пробај подоцна."
duration: 3000
})
| true | #
# stats.coffee
# lotto-ionic
# v0.0.2
# Copyright 2016 PI:NAME:<NAME>END_PI, https://github.com/atonevski/lotto-ionic
# For license information see LICENSE in the repository
#
angular.module 'app.stats', []
.controller 'LottoStats', ($scope, $http, $ionicLoading) ->
$scope.hideChart = true
$scope.sbarChart = { }
$scope.sbarChart.title = 'Bar chart title'
$scope.sbarChart.width = $scope.width
$scope.sbarChart.height = $scope.height
# A: draw #, B: date, P..W: winning column lotto, X: winning column joker
query = """SELECT A, B, P, Q, R, S, T, U, V, W
ORDER BY B"""
$ionicLoading.show()
$http.get $scope.qurl(query)
.success (data, status) ->
res = $scope.to_json data
$scope.winColumns = res.table.rows.map (r) ->
a = $scope.eval_row r
{
draw: a[0]
date: a[1]
lotto: [ a[2], a[3], a[4], a[5], a[6], a[7], a[8], a[9] ]
}
$buildLottoFreqs()
$ionicLoading.hide()
.error (err) ->
$ionicLoading.show({
template: "Не може да се вчитаат добитните комбинации. Пробај подоцна."
duration: 3000
})
# change this method to have a parameter 'all', 'stresa', 'venus'
# and to produce different tables
$buildLottoFreqs = () ->
arr = [0..34].map (e) -> ([0..7].map () -> 0)
for row in $scope.winColumns
for i, n of row.lotto
arr[n][i]++
for i, a of arr
arr[i].push a.reduce (t, e) -> t + e
$scope.freqs = arr[1..-1].map (a, i) ->
{
number: (i + 1)
'1ви': a[0]
'2ри': a[1]
'3ти': a[2]
'4ти': a[3]
'5ти': a[4]
'6ти': a[5]
'7ми': a[6]
'доп.': a[7]
total: a[8]
}
$scope.sbarChart.data = $scope.freqs
$scope.sbarChart.labels = 'number'
$scope.sbarChart.labVals = [1..34].filter (v) -> v%2 isnt 0
$scope.sbarChart.categories = [
'1ви', '2ри', '3ти', '4ти', '5ти', '6ти', '7ми', 'доп.'
]
.controller 'JokerStats', ($scope, $http, $ionicLoading) ->
$scope.hideChart = true
$scope.sbarChart = { }
$scope.sbarChart.title = 'Bar chart title'
$scope.sbarChart.width = $scope.width
$scope.sbarChart.height = $scope.height
# A: draw #, B: date, P..W: winning column lotto, X: winning column joker
query = """SELECT A, B, X
ORDER BY B"""
$ionicLoading.show()
$http.get $scope.qurl(query)
.success (data, status) ->
res = $scope.to_json data
$scope.winColumns = res.table.rows.map (r) ->
a = $scope.eval_row r
{
draw: a[0]
date: a[1]
joker: a[2].split ''
}
$buildJokerFreqs()
$ionicLoading.hide()
.error (err) ->
$ionicLoading.show({
template: "Не може да се вчитаат добитните комбинации. Пробај подоцна."
duration: 3000
})
# change this method to have a parameter 'all', 'stresa', 'venus'
# and to produce different tables
$buildJokerFreqs = () ->
arr = [0..9].map (e) -> ([0..5].map () -> 0)
for row in $scope.winColumns
for i, n of row.joker
arr[n][i]++
for i, a of arr
arr[i].push a.reduce (t, e) -> t + e
$scope.freqs = arr.map (a, i) ->
{
number: i
'1ви': a[0]
'2ри': a[1]
'3ти': a[2]
'4ти': a[3]
'5ти': a[4]
'6ти': a[5]
total: a[6]
}
$scope.sbarChart.data = $scope.freqs
$scope.sbarChart.labels = 'number'
# $scope.sbarChart.labVals = [1..34].filter (v) -> v%2 isnt 0
$scope.sbarChart.categories = [
'1ви', '2ри', '3ти', '4ти', '5ти', '6ти' ]
.controller 'WinnersStats', ($scope, $http, $ionicLoading, $ionicPosition, $ionicScrollDelegate) ->
$scope.bubbleVisible = no
$scope.bubble = d3.select '#stats-list'
.append 'div'
.attr 'class', 'bubble bubble-left'
.attr 'id', 'winners-bubble'
.on('click', ()->
$scope.bubble.transition().duration 1000
.style 'opacity', 0
$scope.bubbleVisible = no
)
.style 'opacity', 0
$scope.showBubble = (event, idx) ->
return if $scope.bubbleVisible
event.stopPropagation()
$scope.bubbleVisible = yes
t = """
<div class='row row-no-padding'>
<div class='col col-offset-80 text-right positive'>
<small><i class='ion-close-round'></i></small>
</div>
</div>
<dl class='dl-horizontal'>
<dt>Година:</dt>
<dd>#{ $scope.winners[idx].year }</dd>
<dt>Вкупно кола:</dt>
<dd>#{ $scope.winners[idx].draws }</dd>
<hr />
<dt>Лото:</dt><dd></dd>
<hr />
<dt>Најмала уплата:</dt>
<dd>#{ $scope.thou_sep $scope.winners[idx].min }</dd>
<dt>Просечна уплата:</dt>
<dd>#{ $scope.thou_sep $scope.winners[idx].avg }</dd>
<dt>Најголема уплата:</dt>
<dd>#{ $scope.thou_sep $scope.winners[idx].max }</dd>
<hr />
<dt>Џокер:</dt><dd></dd>
<hr />
<dt>Најмала уплата:</dt>
<dd>#{ $scope.thou_sep $scope.winners[idx].jmin }</dd>
<dt>Просечна уплата:</dt>
<dd>#{ $scope.thou_sep $scope.winners[idx].javg }</dd>
<dt>Најголема уплата:</dt>
<dd>#{ $scope.thou_sep $scope.winners[idx].jmax }</dd>
</dl>
"""
el = angular.element document.querySelector "\#winners-#{$scope.winners[idx].year}"
offset = $ionicPosition.offset el
new_top = offset.top + $ionicScrollDelegate.getScrollPosition().top
$scope.bubble.html t
.style 'left', (event.pageX + 10) + 'px'
.style 'top', (new_top - 60) + 'px'
.style 'opacity', 1
# load data
$ionicLoading.show()
# count x7
qx7 = """
SELECT
YEAR(B), COUNT(D)
WHERE D > 0
GROUP BY YEAR(B)
ORDER BY YEAR(B)
"""
$http.get $scope.qurl(qx7)
.success (data, status) ->
res = $scope.to_json data
$scope.winX7 = { }
res.table.rows.forEach (r) ->
a = $scope.eval_row r
$scope.winX7[a[0]] = a[1]
.error (err) ->
$ionicLoading.show({
template: "Не може да се вчитаат добитници x7. Пробај подоцна."
duration: 3000
})
# count x6p
qx6p= """
SELECT
YEAR(B), COUNT(E)
WHERE E > 0
GROUP BY YEAR(B)
ORDER BY YEAR(B)
"""
$http.get $scope.qurl(qx6p)
.success (data, status) ->
res = $scope.to_json data
$scope.winX6p = { }
res.table.rows.forEach (r) ->
a = $scope.eval_row r
$scope.winX6p[a[0]] = a[1]
.error (err) ->
$ionicLoading.show({
template: "Не може да се вчитаат добитници x6+1. Пробај подоцна."
duration: 3000
})
# count x6
qx6 = """
SELECT
YEAR(B), COUNT(F)
WHERE F > 0
GROUP BY YEAR(B)
ORDER BY YEAR(B)
"""
$http.get $scope.qurl(qx6)
.success (data, status) ->
res = $scope.to_json data
$scope.winX6 = { }
res.table.rows.forEach (r) ->
a = $scope.eval_row r
$scope.winX6[a[0]] = a[1]
.error (err) ->
$ionicLoading.show({
template: "Не може да се вчитаат добитници x6. Пробај подоцна."
duration: 3000
})
# A: draw #, B: date, P..W: winning column lotto, X: winning column joker
query = """
SELECT
YEAR(B), COUNT(A), MIN(C), MAX(C), AVG(C),
SUM(D), SUM(E), AVG(F), AVG(G), AVG(H),
MIN(I), MAX(I), AVG(I)
GROUP BY YEAR(B)
ORDER BY YEAR(B)
"""
$http.get $scope.qurl(query)
.success (data, status) ->
res = $scope.to_json data
$scope.winners = res.table.rows.map (r) ->
a = $scope.eval_row r
{
year: a[0]
draws: a[1]
min: a[2]
max: a[3]
avg: Math.round a[4]
jmin: a[10]
jmax: a[11]
javg: Math.round a[12]
x7: a[5]
'x6+1': a[6]
x6: Math.round a[7]
x5: Math.round a[8]
x4: Math.round a[9]
}
$ionicLoading.hide()
.error (err) ->
$ionicLoading.show({
template: "Не може да се вчита статистика на добитници. Пробај подоцна."
duration: 3000
})
|
[
{
"context": ">\n icon:\n name: \"ma-person\"\n pw: \"ma-vpn_key\"\n activeLabel: \"active\"\n initClass:\n mo",
"end": 242,
"score": 0.3864552974700928,
"start": 238,
"tag": "PASSWORD",
"value": "vpn_"
}
] | src/materialize.coffee | ceri-widgets/ceri-login-modal | 0 | module.exports =
mixins: [
require("ceri-progress/mixin")(require("ceri-progress/materialize"))
require("ceri-toaster/mixin")(require("ceri-toaster/materialize"))
]
data: ->
icon:
name: "ma-person"
pw: "ma-vpn_key"
activeLabel: "active"
initClass:
modal: "materialize login-modal"
content: "modal-content"
header: "center-align"
nameContainer: "input-field col s12"
nameIcon: "prefix"
pwContainer: "input-field col s12"
pwIcon: "prefix"
btnContainer: "col s6 m4 push-s3 push-m4 center-align"
btn: "btn" | 184173 | module.exports =
mixins: [
require("ceri-progress/mixin")(require("ceri-progress/materialize"))
require("ceri-toaster/mixin")(require("ceri-toaster/materialize"))
]
data: ->
icon:
name: "ma-person"
pw: "ma-<PASSWORD>key"
activeLabel: "active"
initClass:
modal: "materialize login-modal"
content: "modal-content"
header: "center-align"
nameContainer: "input-field col s12"
nameIcon: "prefix"
pwContainer: "input-field col s12"
pwIcon: "prefix"
btnContainer: "col s6 m4 push-s3 push-m4 center-align"
btn: "btn" | true | module.exports =
mixins: [
require("ceri-progress/mixin")(require("ceri-progress/materialize"))
require("ceri-toaster/mixin")(require("ceri-toaster/materialize"))
]
data: ->
icon:
name: "ma-person"
pw: "ma-PI:PASSWORD:<PASSWORD>END_PIkey"
activeLabel: "active"
initClass:
modal: "materialize login-modal"
content: "modal-content"
header: "center-align"
nameContainer: "input-field col s12"
nameIcon: "prefix"
pwContainer: "input-field col s12"
pwIcon: "prefix"
btnContainer: "col s6 m4 push-s3 push-m4 center-align"
btn: "btn" |
[
{
"context": "uires source-map-support\n@requires restify\n@author Isaac Johnston <isaac.johnston@joukou.com>\n###\n\nrequire( 'source",
"end": 673,
"score": 0.9998772740364075,
"start": 659,
"tag": "NAME",
"value": "Isaac Johnston"
},
{
"context": "support\n@requires restify\n@autho... | src/server.coffee | joukou/joukou-api | 0 | "use strict"
###*
Copyright 2014 Joukou Ltd
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
###
###*
@module joukou-api/server
@requires source-map-support
@requires restify
@author Isaac Johnston <isaac.johnston@joukou.com>
###
require( 'source-map-support' ).install()
restify = require( 'restify' )
authn = require( './authn' )
cors = require( './cors' )
env = require( './env' )
hal = require( './hal' )
routes = require( './routes' )
LoggerFactory = require( './log/LoggerFactory' )
module.exports = server = restify.createServer(
name: env.getServerName()
version: env.getVersion()
formatters:
'application/json': hal.formatter
log: LoggerFactory.getLogger( name: 'server' )
acceptable: [
'application/json'
'application/hal+json'
]
)
server.pre( cors.preflight )
server.use( cors.actual )
server.use( restify.acceptParser( server.acceptable ) )
server.use( restify.dateParser() )
server.use( restify.queryParser() )
server.use( restify.jsonp() )
server.use( restify.gzipResponse() )
server.use( restify.bodyParser( mapParams: false ) )
server.use( authn.middleware( ) )
server.use( hal.link( ) )
server.on( 'after', restify.auditLogger(
log: LoggerFactory.getLogger( name: 'audit' )
) )
routes.registerRoutes( server )
server.listen(
# Port 2101 is for develop/staging, 2201 is for production!
process.env.JOUKOU_API_PORT or 2101,
process.env.JOUKOU_API_HOST or 'localhost',
->
server.log.info(
'%s-%s listening at %s',
server.name,
env.getVersion(),
server.url
)
)
| 99347 | "use strict"
###*
Copyright 2014 Joukou Ltd
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
###
###*
@module joukou-api/server
@requires source-map-support
@requires restify
@author <NAME> <<EMAIL>>
###
require( 'source-map-support' ).install()
restify = require( 'restify' )
authn = require( './authn' )
cors = require( './cors' )
env = require( './env' )
hal = require( './hal' )
routes = require( './routes' )
LoggerFactory = require( './log/LoggerFactory' )
module.exports = server = restify.createServer(
name: env.getServerName()
version: env.getVersion()
formatters:
'application/json': hal.formatter
log: LoggerFactory.getLogger( name: 'server' )
acceptable: [
'application/json'
'application/hal+json'
]
)
server.pre( cors.preflight )
server.use( cors.actual )
server.use( restify.acceptParser( server.acceptable ) )
server.use( restify.dateParser() )
server.use( restify.queryParser() )
server.use( restify.jsonp() )
server.use( restify.gzipResponse() )
server.use( restify.bodyParser( mapParams: false ) )
server.use( authn.middleware( ) )
server.use( hal.link( ) )
server.on( 'after', restify.auditLogger(
log: LoggerFactory.getLogger( name: 'audit' )
) )
routes.registerRoutes( server )
server.listen(
# Port 2101 is for develop/staging, 2201 is for production!
process.env.JOUKOU_API_PORT or 2101,
process.env.JOUKOU_API_HOST or 'localhost',
->
server.log.info(
'%s-%s listening at %s',
server.name,
env.getVersion(),
server.url
)
)
| true | "use strict"
###*
Copyright 2014 Joukou Ltd
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
###
###*
@module joukou-api/server
@requires source-map-support
@requires restify
@author PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI>
###
require( 'source-map-support' ).install()
restify = require( 'restify' )
authn = require( './authn' )
cors = require( './cors' )
env = require( './env' )
hal = require( './hal' )
routes = require( './routes' )
LoggerFactory = require( './log/LoggerFactory' )
module.exports = server = restify.createServer(
name: env.getServerName()
version: env.getVersion()
formatters:
'application/json': hal.formatter
log: LoggerFactory.getLogger( name: 'server' )
acceptable: [
'application/json'
'application/hal+json'
]
)
server.pre( cors.preflight )
server.use( cors.actual )
server.use( restify.acceptParser( server.acceptable ) )
server.use( restify.dateParser() )
server.use( restify.queryParser() )
server.use( restify.jsonp() )
server.use( restify.gzipResponse() )
server.use( restify.bodyParser( mapParams: false ) )
server.use( authn.middleware( ) )
server.use( hal.link( ) )
server.on( 'after', restify.auditLogger(
log: LoggerFactory.getLogger( name: 'audit' )
) )
routes.registerRoutes( server )
server.listen(
# Port 2101 is for develop/staging, 2201 is for production!
process.env.JOUKOU_API_PORT or 2101,
process.env.JOUKOU_API_HOST or 'localhost',
->
server.log.info(
'%s-%s listening at %s',
server.name,
env.getVersion(),
server.url
)
)
|
[
{
"context": "#!vanilla\n\n# See: http://www.loria.fr/~rougier/teaching/numpy/scripts/gray-scott.py\n\nclass ",
"end": 41,
"score": 0.962641716003418,
"start": 39,
"tag": "USERNAME",
"value": "ro"
},
{
"context": "#!vanilla\n\n# See: http://www.loria.fr/~rougier/teaching/numpy/script... | main.coffee | stemblab/gray-scott | 4 | #!vanilla
# See: http://www.loria.fr/~rougier/teaching/numpy/scripts/gray-scott.py
class Model
N: 62
r: 20
rep = (x, n) -> ((x for [1..n]) for [1..n])
zeros = (n) -> rep 0, n
constructor: (@F=0.06, @k=0.062, @Du=0.19, @Dv=0.05) ->
z = => zeros(@N+2)
@U = z()
@V = z()
@du = z()
@dv = z()
@reset @F, @k
reset: (@F, @k) ->
@initCond(@U, 1, 0.5)
@initCond(@V, 0, 0.25)
initCond: (X, outVal, inVal) ->
for m in [1..@N]
for n in [1..@N]
X[m][n] = outVal
R = [@N/2-@r..@N/2+@r-1]
for m in R
for n in R
X[m][n] = inVal + 0.4*Math.random()
L: (X, m, n) ->
X[m][n-1] + X[m][n+1] + X[m-1][n] + X[m+1][n] - 4*X[m][n]
inc: (X, dx) ->
for m in [1..@N]
for n in [1..@N]
X[m][n] += dx[m][n]
step: (iterations) ->
for i in [1..iterations]
for m in [1..@N]
for n in [1..@N]
Lu = @L(@U, m, n)
Lv = @L(@V, m, n)
UVV = @U[m][n]*@V[m][n]*@V[m][n]
@du[m][n] = @Du*Lu - UVV + @F*(1-@U[m][n])
@dv[m][n] = @Dv*Lv + UVV - (@F+@k)*@V[m][n]
@inc @U, @du
@inc @V, @dv
class Heatmap
px: 4
constructor: (@id, @V) ->
@N = @V.length - 2
@initData()
$("#container").empty()
@heat = simpleheat(@id)
.data(@data)
.max(0.75)
.radius(@px, @px)
initData: ->
@data = []
for m in [0..@N+1]
for n in [0..@N+1]
@data.push [n*@px, m*@px, @V[m][n]]
mapModel: ->
i = 0
for m in [0..@N+1]
for n in [0..@N+1]
@data[i][2] = @V[m][n]
i++
draw: ->
@mapModel()
@heat.draw()
class Simulation
canvasId: "canvas"
iterationsPerSnapshot: 20
delay: 2000
tSnapshot: 40
pFamily: [
{s: "alpha", F: 0.02, k: 0.05}
{s: "epsilon", F: 0.02, k: 0.06}
{s: "kappa",F: 0.04, k: 0.06}
]
constructor: (@numSnapshots=100) ->
@clear()
@pIndex = 0
@setParams()
@model = new Model @F, @k
@heatmap = new Heatmap @canvasId, @model.V
@heatmap.draw()
@initButton()
$blab.simTId = setTimeout (=> @start()), @delay
clear: ->
clearTimeout $blab.simTId if $blab.simTId
initButton: ->
canvas = $("##{@canvasId}")
canvas.unbind()
canvas.click =>
@pIndex++
@pIndex = 0 if @pIndex > @pFamily.length-1
@setParams()
@reset @F, @k
setParams: ->
params = @pFamily[@pIndex]
@F = params.F
@k = params.k
$("#param_text").html "F=#{@F}, k=#{@k}"
start: ->
@snapshot = 0
@run()
reset: (@F, @k) ->
@clear()
@model.reset @F, @k
@start()
run: ->
return if @snapshot++ > @numSnapshots
@model.step @iterationsPerSnapshot
@heatmap.draw()
$blab.simTId = setTimeout (=> @run()), @tSnapshot # Recursion
new Simulation
| 181383 | #!vanilla
# See: http://www.loria.fr/~ro<NAME>/teaching/numpy/scripts/gray-scott.py
class Model
N: 62
r: 20
rep = (x, n) -> ((x for [1..n]) for [1..n])
zeros = (n) -> rep 0, n
constructor: (@F=0.06, @k=0.062, @Du=0.19, @Dv=0.05) ->
z = => zeros(@N+2)
@U = z()
@V = z()
@du = z()
@dv = z()
@reset @F, @k
reset: (@F, @k) ->
@initCond(@U, 1, 0.5)
@initCond(@V, 0, 0.25)
initCond: (X, outVal, inVal) ->
for m in [1..@N]
for n in [1..@N]
X[m][n] = outVal
R = [@N/2-@r..@N/2+@r-1]
for m in R
for n in R
X[m][n] = inVal + 0.4*Math.random()
L: (X, m, n) ->
X[m][n-1] + X[m][n+1] + X[m-1][n] + X[m+1][n] - 4*X[m][n]
inc: (X, dx) ->
for m in [1..@N]
for n in [1..@N]
X[m][n] += dx[m][n]
step: (iterations) ->
for i in [1..iterations]
for m in [1..@N]
for n in [1..@N]
Lu = @L(@U, m, n)
Lv = @L(@V, m, n)
UVV = @U[m][n]*@V[m][n]*@V[m][n]
@du[m][n] = @Du*Lu - UVV + @F*(1-@U[m][n])
@dv[m][n] = @Dv*Lv + UVV - (@F+@k)*@V[m][n]
@inc @U, @du
@inc @V, @dv
class Heatmap
px: 4
constructor: (@id, @V) ->
@N = @V.length - 2
@initData()
$("#container").empty()
@heat = simpleheat(@id)
.data(@data)
.max(0.75)
.radius(@px, @px)
initData: ->
@data = []
for m in [0..@N+1]
for n in [0..@N+1]
@data.push [n*@px, m*@px, @V[m][n]]
mapModel: ->
i = 0
for m in [0..@N+1]
for n in [0..@N+1]
@data[i][2] = @V[m][n]
i++
draw: ->
@mapModel()
@heat.draw()
class Simulation
canvasId: "canvas"
iterationsPerSnapshot: 20
delay: 2000
tSnapshot: 40
pFamily: [
{s: "alpha", F: 0.02, k: 0.05}
{s: "epsilon", F: 0.02, k: 0.06}
{s: "kappa",F: 0.04, k: 0.06}
]
constructor: (@numSnapshots=100) ->
@clear()
@pIndex = 0
@setParams()
@model = new Model @F, @k
@heatmap = new Heatmap @canvasId, @model.V
@heatmap.draw()
@initButton()
$blab.simTId = setTimeout (=> @start()), @delay
clear: ->
clearTimeout $blab.simTId if $blab.simTId
initButton: ->
canvas = $("##{@canvasId}")
canvas.unbind()
canvas.click =>
@pIndex++
@pIndex = 0 if @pIndex > @pFamily.length-1
@setParams()
@reset @F, @k
setParams: ->
params = @pFamily[@pIndex]
@F = params.F
@k = params.k
$("#param_text").html "F=#{@F}, k=#{@k}"
start: ->
@snapshot = 0
@run()
reset: (@F, @k) ->
@clear()
@model.reset @F, @k
@start()
run: ->
return if @snapshot++ > @numSnapshots
@model.step @iterationsPerSnapshot
@heatmap.draw()
$blab.simTId = setTimeout (=> @run()), @tSnapshot # Recursion
new Simulation
| true | #!vanilla
# See: http://www.loria.fr/~roPI:NAME:<NAME>END_PI/teaching/numpy/scripts/gray-scott.py
class Model
N: 62
r: 20
rep = (x, n) -> ((x for [1..n]) for [1..n])
zeros = (n) -> rep 0, n
constructor: (@F=0.06, @k=0.062, @Du=0.19, @Dv=0.05) ->
z = => zeros(@N+2)
@U = z()
@V = z()
@du = z()
@dv = z()
@reset @F, @k
reset: (@F, @k) ->
@initCond(@U, 1, 0.5)
@initCond(@V, 0, 0.25)
initCond: (X, outVal, inVal) ->
for m in [1..@N]
for n in [1..@N]
X[m][n] = outVal
R = [@N/2-@r..@N/2+@r-1]
for m in R
for n in R
X[m][n] = inVal + 0.4*Math.random()
L: (X, m, n) ->
X[m][n-1] + X[m][n+1] + X[m-1][n] + X[m+1][n] - 4*X[m][n]
inc: (X, dx) ->
for m in [1..@N]
for n in [1..@N]
X[m][n] += dx[m][n]
step: (iterations) ->
for i in [1..iterations]
for m in [1..@N]
for n in [1..@N]
Lu = @L(@U, m, n)
Lv = @L(@V, m, n)
UVV = @U[m][n]*@V[m][n]*@V[m][n]
@du[m][n] = @Du*Lu - UVV + @F*(1-@U[m][n])
@dv[m][n] = @Dv*Lv + UVV - (@F+@k)*@V[m][n]
@inc @U, @du
@inc @V, @dv
class Heatmap
px: 4
constructor: (@id, @V) ->
@N = @V.length - 2
@initData()
$("#container").empty()
@heat = simpleheat(@id)
.data(@data)
.max(0.75)
.radius(@px, @px)
initData: ->
@data = []
for m in [0..@N+1]
for n in [0..@N+1]
@data.push [n*@px, m*@px, @V[m][n]]
mapModel: ->
i = 0
for m in [0..@N+1]
for n in [0..@N+1]
@data[i][2] = @V[m][n]
i++
draw: ->
@mapModel()
@heat.draw()
class Simulation
canvasId: "canvas"
iterationsPerSnapshot: 20
delay: 2000
tSnapshot: 40
pFamily: [
{s: "alpha", F: 0.02, k: 0.05}
{s: "epsilon", F: 0.02, k: 0.06}
{s: "kappa",F: 0.04, k: 0.06}
]
constructor: (@numSnapshots=100) ->
@clear()
@pIndex = 0
@setParams()
@model = new Model @F, @k
@heatmap = new Heatmap @canvasId, @model.V
@heatmap.draw()
@initButton()
$blab.simTId = setTimeout (=> @start()), @delay
clear: ->
clearTimeout $blab.simTId if $blab.simTId
initButton: ->
canvas = $("##{@canvasId}")
canvas.unbind()
canvas.click =>
@pIndex++
@pIndex = 0 if @pIndex > @pFamily.length-1
@setParams()
@reset @F, @k
setParams: ->
params = @pFamily[@pIndex]
@F = params.F
@k = params.k
$("#param_text").html "F=#{@F}, k=#{@k}"
start: ->
@snapshot = 0
@run()
reset: (@F, @k) ->
@clear()
@model.reset @F, @k
@start()
run: ->
return if @snapshot++ > @numSnapshots
@model.step @iterationsPerSnapshot
@heatmap.draw()
$blab.simTId = setTimeout (=> @run()), @tSnapshot # Recursion
new Simulation
|
[
{
"context": "console.log \"Hi Folks!\"\n\n$(\".dropdown\").click ->\n $(this).find(\"ul\").t",
"end": 21,
"score": 0.9493117332458496,
"start": 16,
"tag": "NAME",
"value": "Folks"
}
] | src/coffee/stones.coffee | michaelgenesini/Stones-framework | 1 | console.log "Hi Folks!"
$(".dropdown").click ->
$(this).find("ul").toggleClass "open"
return | 84472 | console.log "Hi <NAME>!"
$(".dropdown").click ->
$(this).find("ul").toggleClass "open"
return | true | console.log "Hi PI:NAME:<NAME>END_PI!"
$(".dropdown").click ->
$(this).find("ul").toggleClass "open"
return |
[
{
"context": " \"use\":\"sig\",\n \"alg\":\"RS256\",\n \"n\":\"AJ4bmyK/fLoEMPuiR6uHOWlhjJRQFPunVxWHsG8uwPneJmPxCGPbboyVlCGtD1xsfHtygIu7zhfNbb1AiHW5pc3bi1k8udM3CHQUTuneudNtMkIODGm/pTV3nQ1TH1tr9ebquT360DTEhkmjv/5LZwsnOA0HAf/3GG9fu8gl55mhpKnyhWpkbrHryuh8cx8hUzLwi5Rr5gA1IrhQP9SFX2y68suSS0wp7HoQTIie6EX... | test/anvil-connect.angular.coffee | bauglir/connect-js | 9 | 'use strict'
describe 'Anvil Connect', ->
{Anvil,AnvilProvider,uri,nonce,$httpBackend,promise,jwk} = {}
config =
issuer: 'https://accounts.anvil.io'
client_id: 'uuid'
redirect_uri: 'https://my.app.com'
scope: ['other']
display: 'popup'
jwk:
"kty":"RSA",
"use":"sig",
"alg":"RS256",
"n":"AJ4bmyK/fLoEMPuiR6uHOWlhjJRQFPunVxWHsG8uwPneJmPxCGPbboyVlCGtD1xsfHtygIu7zhfNbb1AiHW5pc3bi1k8udM3CHQUTuneudNtMkIODGm/pTV3nQ1TH1tr9ebquT360DTEhkmjv/5LZwsnOA0HAf/3GG9fu8gl55mhpKnyhWpkbrHryuh8cx8hUzLwi5Rr5gA1IrhQP9SFX2y68suSS0wp7HoQTIie6EXy/G2OJi7kqJS0UjkXK7ZPqf56OGBm+TlYBmwyXdWZ3bBglnlPjBb67exSMiXmi+yeeFa52tWLZlOqNf6CWb2XrNf6PWCxt0NZ7V3HPOrjOmM=",
"e":"AQAB"
beforeEach module 'anvil'
beforeEach module ($injector) ->
AnvilProvider = $injector.get 'AnvilProvider'
AnvilProvider.configure config
#console.log('AFTER CONFIG', AnvilProvider)
beforeEach inject ($injector) ->
$httpBackend = $injector.get '$httpBackend'
Anvil = $injector.get 'Anvil'
describe 'setJWK', ->
describe 'with empty argument', ->
beforeEach ->
jwk =
kid: 'empty'
kty: 'TEST'
use: 'sig'
alg: 'WTF'
n: 'h3xh3xh3x'
e: 'h3x'
localStorage['anvil.connect.jwk'] = JSON.stringify jwk
AnvilProvider.setJWK()
it 'should serialize the JWK in localStorage', ->
expect(localStorage['anvil.connect.jwk']).toEqual JSON.stringify(jwk)
it 'should set the jwk on the provider', ->
expect(AnvilProvider.jwk).toEqual jwk
it 'should set the modulus on the provider', ->
expect(AnvilProvider.hN).toBe b64tohex(jwk.n)
it 'should set the exponent on the provider', ->
expect(AnvilProvider.hE).toBe b64tohex(jwk.e)
describe 'with object argument', ->
beforeEach ->
jwk =
kid: 'object'
kty: 'TEST'
use: 'sig'
alg: 'WTF'
n: 'h3xh3xh3x'
e: 'h3x'
AnvilProvider.setJWK(jwk)
it 'should serialize the JWK in localStorage', ->
expect(localStorage['anvil.connect.jwk']).toBe JSON.stringify(jwk)
it 'should set the jwk on the provider', ->
expect(AnvilProvider.jwk).toBe jwk
it 'should set the modulus on the provider', ->
expect(AnvilProvider.hN).toBe b64tohex(jwk.n)
it 'should set the exponent on the provider', ->
expect(AnvilProvider.hE).toBe b64tohex(jwk.e)
describe 'with array argument', ->
beforeEach ->
jwk =
kid: 'object'
kty: 'TEST'
use: 'sig'
alg: 'WTF'
n: 'h3xh3xh3x'
e: 'h3x'
AnvilProvider.setJWK([jwk])
it 'should serialize the JWK in localStorage', ->
expect(localStorage['anvil.connect.jwk']).toBe JSON.stringify(jwk)
it 'should set the jwk on the provider', ->
expect(AnvilProvider.jwk).toBe jwk
it 'should set the modulus on the provider', ->
expect(AnvilProvider.hN).toBe b64tohex(jwk.n)
it 'should set the exponent on the provider', ->
expect(AnvilProvider.hE).toBe b64tohex(jwk.e)
describe 'configure provider', ->
it 'should set the issuer', ->
expect(AnvilProvider.issuer).toBe config.issuer
it 'should set the default response type', ->
expect(AnvilProvider.params.response_type).toBe 'id_token token'
it 'should set the client id', ->
expect(AnvilProvider.params.client_id).toBe config.client_id
it 'should set the redirect uri', ->
expect(AnvilProvider.params.redirect_uri).toBe config.redirect_uri
it 'should set the default scope', ->
expect(AnvilProvider.params.scope).toContain 'openid'
expect(AnvilProvider.params.scope).toContain 'profile'
it 'should set additional scope', ->
expect(AnvilProvider.params.scope).toContain 'other'
it 'should set the display', ->
expect(AnvilProvider.display).toBe 'popup'
it 'should set the default display', ->
AnvilProvider.configure {}
expect(AnvilProvider.display).toBe 'page'
describe 'toFormUrlEncoded', ->
it 'should encode a string from an object', ->
encoded = Anvil.toFormUrlEncoded(AnvilProvider.params)
expect(encoded).toContain 'response_type=id_token%20token'
expect(encoded).toContain '&redirect_uri=https%3A%2F%2Fmy.app.com'
expect(encoded).toContain '&scope=openid%20profile%20other'
describe 'parseFormUrlEncoded', ->
it 'should decode and parse an encoded object', ->
decoded = Anvil.parseFormUrlEncoded('a=b%20c&d=e')
expect(decoded.a).toBe 'b c'
expect(decoded.d).toBe 'e'
describe 'parseUriFragment', ->
it 'should return a fragment value from a Url', ->
fragment = Anvil.getUrlFragment('https://host:port/path#a=b&c=d')
expect(fragment).toBe 'a=b&c=d'
describe 'popup', ->
it 'should return parameters for a popup window', ->
popup = Anvil.popup(700, 500)
expect(popup).toContain 'width=700,'
expect(popup).toContain 'height=500,'
expect(popup).toContain 'dialog=yes,'
expect(popup).toContain 'dependent=yes,'
expect(popup).toContain 'scrollbars=yes,'
expect(popup).toContain 'location=yes'
describe 'serialize', ->
beforeEach ->
delete localStorage['anvil.connect']
Anvil.session.access_token = 'random'
Anvil.serialize()
it 'should store the current session in localStorage', ->
expect(localStorage['anvil.connect']).toBeDefined()
describe 'deserialize', ->
it 'should retrieve and parse the current session from localStorage'
describe 'reset', ->
it 'should delete the current session from localStorage'
it 'should reset the session object'
it 'should remove the cookie value'
describe 'uri with "authorize" endpoint', ->
beforeEach ->
uri = Anvil.uri()
it 'should contain issuer', ->
expect(uri).toContain config.issuer
it 'should contain endpoint', ->
expect(uri).toContain '/authorize?'
it 'should contain response type', ->
expect(uri).toContain 'id_token%20token'
it 'should contain client id', ->
expect(uri).toContain config.client_id
it 'should contain redirect uri', ->
expect(uri).toContain encodeURIComponent(config.redirect_uri)
it 'should contain scope', ->
expect(uri).toContain encodeURIComponent('openid profile other')
it 'should contain nonce', ->
expect(uri).toContain '&nonce='
describe 'uri with "signin" endpoint', ->
beforeEach ->
uri = Anvil.uri('signin')
it 'should contain issuer', ->
expect(uri).toContain config.issuer
it 'should contain endpoint', ->
expect(uri).toContain '/signin?'
it 'should contain response type', ->
expect(uri).toContain 'id_token%20token'
it 'should contain client id', ->
expect(uri).toContain config.client_id
it 'should contain redirect uri', ->
expect(uri).toContain encodeURIComponent(config.redirect_uri)
it 'should contain scope', ->
expect(uri).toContain encodeURIComponent('openid profile other')
it 'should contain nonce', ->
expect(uri).toContain '&nonce='
describe 'uri with "signup" endpoint', ->
beforeEach ->
uri = Anvil.uri('signup')
it 'should contain issuer', ->
expect(uri).toContain config.issuer
it 'should contain endpoint', ->
expect(uri).toContain '/signup?'
it 'should contain response type', ->
expect(uri).toContain 'id_token%20token'
it 'should contain client id', ->
expect(uri).toContain config.client_id
it 'should contain redirect uri', ->
expect(uri).toContain encodeURIComponent(config.redirect_uri)
it 'should contain scope', ->
expect(uri).toContain encodeURIComponent('openid profile other')
it 'should contain nonce', ->
expect(uri).toContain '&nonce='
describe 'uri with "connect" endpoint', ->
it 'should contain issuer'
it 'should contain endpoint'
it 'should contain response type'
it 'should contain client id'
it 'should contain redirect uri'
it 'should contain scope'
it 'should contain nonce'
describe 'nonce without argument', ->
it 'should return a base64url encoded sha256 hash of a random value', ->
expect(Anvil.nonce().length).toBe 43
it 'should store the nonce in localStorage', ->
nonce = Anvil.nonce()
expect(localStorage['nonce'].length).toBe 10
describe 'nonce with argument', ->
beforeEach ->
nonce = Anvil.nonce()
it 'should verify an argument matching a hash of the value in localStorage', ->
expect(Anvil.nonce(nonce)).toBe true
it 'should not verify a mismatching argument', ->
expect(Anvil.nonce('WRONG')).toBe false
describe 'sha256url', ->
it 'should base64url encode the SHA 256 hash of a provided string'
describe 'headers', ->
it 'should add a bearer token Authorization header to an object', ->
Anvil.session = { access_token: 'random' }
expect(Anvil.headers()['Authorization']).toContain 'Bearer random'
describe 'request', ->
it 'should add a bearer token to an HTTP request', ->
uri = config.issuer + '/userinfo'
Anvil.session.access_token = 'random'
headers =
'Authorization': "Bearer #{Anvil.session.access_token}",
'Accept': 'application/json, text/plain, */*'
$httpBackend.expectGET(uri, headers).respond(200, {})
Anvil.request({ method: 'GET', url: uri })
$httpBackend.flush()
describe 'userInfo', ->
it 'should request user info from the provider', ->
uri = config.issuer + '/userinfo'
Anvil.session.access_token = 'random'
headers =
'Authorization': "Bearer #{Anvil.session.access_token}",
'Accept': 'application/json, text/plain, */*'
$httpBackend.expectGET(uri, headers).respond(200, {})
Anvil.userInfo()
$httpBackend.flush()
describe 'callback with error response', ->
beforeEach ->
localStorage['anvil.connect'] = '{}'
promise = Anvil.callback({ error: 'invalid' })
it 'should return a promise', ->
expect(promise.then).toBeDefined()
it 'should clear the session', ->
Anvil.callback({ error: 'invalid' })
expect(localStorage['anvil.connect']).toBeUndefined()
it 'should reject the promise'
describe 'callback with authorization response', ->
beforeEach ->
promise = Anvil.callback({ access_token: "eyJhbGciOiJSUzI1NiJ9.eyJqdGkiOiI0NTM1MDk5ZjY1NzBiOTBjZTE5ZiIsImlzcyI6Imh0dHA6Ly9sb2NhbGhvc3Q6MzAwMCIsInN1YiI6IjQwNzZmNDEyLTM3NGYtNGJjNi05MDlhLTFkOGViMWFhMjMzYyIsImF1ZCI6IjU4MTQ4YjcwLTg1YWEtNDcyNi1hZjdkLTQyYmQxMDlkY2M0OSIsImV4cCI6MTQxMzk0NDc1ODMzNSwiaWF0IjoxNDEzOTQxMTU4MzM1LCJzY29wZSI6Im9wZW5pZCBwcm9maWxlIn0.QuBrm0kb0NeVigV1vm_p6-xnGj0J0F_26PHUILtMhsa5-K2-W-0JtQ7o0xcoa7WKlBX66mkGDBKJSpA3kLi4lYEkSUUOo5utxwtrAaIS7wYlq--ECHhdpfHoYgdx4W06YBfmSekbQiVmtnBMOWJt2J6gmTphhwiE5ytL4fggU79LTg30mb-X9FJ_nRnFh_9EmnOLOpej8Jxw4gAQN6FEfcQGRomQ-rplP4cAs1i8Pt-3qYEmQSrjL_w8LqT69-MErhbCVknq7BgQqGcbJgYKOoQuRxWudkSWQljOaVmSdbjLeYwLilIlwkgWcsIuFuSSPtaCNmNhdn13ink4S5UuOQ" })
it 'should return a promise', ->
expect(promise.then).toBeDefined()
it 'should set session property on the service', ->
expect(Anvil.session.access_token).toBe "eyJhbGciOiJSUzI1NiJ9.eyJqdGkiOiI0NTM1MDk5ZjY1NzBiOTBjZTE5ZiIsImlzcyI6Imh0dHA6Ly9sb2NhbGhvc3Q6MzAwMCIsInN1YiI6IjQwNzZmNDEyLTM3NGYtNGJjNi05MDlhLTFkOGViMWFhMjMzYyIsImF1ZCI6IjU4MTQ4YjcwLTg1YWEtNDcyNi1hZjdkLTQyYmQxMDlkY2M0OSIsImV4cCI6MTQxMzk0NDc1ODMzNSwiaWF0IjoxNDEzOTQxMTU4MzM1LCJzY29wZSI6Im9wZW5pZCBwcm9maWxlIn0.QuBrm0kb0NeVigV1vm_p6-xnGj0J0F_26PHUILtMhsa5-K2-W-0JtQ7o0xcoa7WKlBX66mkGDBKJSpA3kLi4lYEkSUUOo5utxwtrAaIS7wYlq--ECHhdpfHoYgdx4W06YBfmSekbQiVmtnBMOWJt2J6gmTphhwiE5ytL4fggU79LTg30mb-X9FJ_nRnFh_9EmnOLOpej8Jxw4gAQN6FEfcQGRomQ-rplP4cAs1i8Pt-3qYEmQSrjL_w8LqT69-MErhbCVknq7BgQqGcbJgYKOoQuRxWudkSWQljOaVmSdbjLeYwLilIlwkgWcsIuFuSSPtaCNmNhdn13ink4S5UuOQ"
#it 'should serialize the session'
#it 'should resolve the promise'
describe 'authorize with location fragment', ->
it 'should invoke the callback with the parsed authorization response'
describe 'authorize with page display', ->
it 'should navigate to the authorize endpoint'
describe 'authorize with popup display', ->
it 'should open a new window'
it 'should attach a listener'
it 'should return a promise'
describe 'listener', ->
it 'should invoke the callback with parsed event data'
it 'should remove the listener'
describe 'connect', ->
describe 'signout', ->
| 221000 | 'use strict'
describe 'Anvil Connect', ->
{Anvil,AnvilProvider,uri,nonce,$httpBackend,promise,jwk} = {}
config =
issuer: 'https://accounts.anvil.io'
client_id: 'uuid'
redirect_uri: 'https://my.app.com'
scope: ['other']
display: 'popup'
jwk:
"kty":"RSA",
"use":"sig",
"alg":"RS256",
"n":"<KEY>
"e":"AQAB"
beforeEach module 'anvil'
beforeEach module ($injector) ->
AnvilProvider = $injector.get 'AnvilProvider'
AnvilProvider.configure config
#console.log('AFTER CONFIG', AnvilProvider)
beforeEach inject ($injector) ->
$httpBackend = $injector.get '$httpBackend'
Anvil = $injector.get 'Anvil'
describe 'setJWK', ->
describe 'with empty argument', ->
beforeEach ->
jwk =
kid: 'empty'
kty: 'TEST'
use: 'sig'
alg: 'WTF'
n: 'h3xh3xh3x'
e: 'h3x'
localStorage['anvil.connect.jwk'] = JSON.stringify jwk
AnvilProvider.setJWK()
it 'should serialize the JWK in localStorage', ->
expect(localStorage['anvil.connect.jwk']).toEqual JSON.stringify(jwk)
it 'should set the jwk on the provider', ->
expect(AnvilProvider.jwk).toEqual jwk
it 'should set the modulus on the provider', ->
expect(AnvilProvider.hN).toBe b64tohex(jwk.n)
it 'should set the exponent on the provider', ->
expect(AnvilProvider.hE).toBe b64tohex(jwk.e)
describe 'with object argument', ->
beforeEach ->
jwk =
kid: 'object'
kty: 'TEST'
use: 'sig'
alg: 'WTF'
n: 'h3xh3xh3x'
e: 'h3x'
AnvilProvider.setJWK(jwk)
it 'should serialize the JWK in localStorage', ->
expect(localStorage['anvil.connect.jwk']).toBe JSON.stringify(jwk)
it 'should set the jwk on the provider', ->
expect(AnvilProvider.jwk).toBe jwk
it 'should set the modulus on the provider', ->
expect(AnvilProvider.hN).toBe b64tohex(jwk.n)
it 'should set the exponent on the provider', ->
expect(AnvilProvider.hE).toBe b64tohex(jwk.e)
describe 'with array argument', ->
beforeEach ->
jwk =
kid: 'object'
kty: 'TEST'
use: 'sig'
alg: 'WTF'
n: 'h3xh3xh3x'
e: 'h3x'
AnvilProvider.setJWK([jwk])
it 'should serialize the JWK in localStorage', ->
expect(localStorage['anvil.connect.jwk']).toBe JSON.stringify(jwk)
it 'should set the jwk on the provider', ->
expect(AnvilProvider.jwk).toBe jwk
it 'should set the modulus on the provider', ->
expect(AnvilProvider.hN).toBe b64tohex(jwk.n)
it 'should set the exponent on the provider', ->
expect(AnvilProvider.hE).toBe b64tohex(jwk.e)
describe 'configure provider', ->
it 'should set the issuer', ->
expect(AnvilProvider.issuer).toBe config.issuer
it 'should set the default response type', ->
expect(AnvilProvider.params.response_type).toBe 'id_token token'
it 'should set the client id', ->
expect(AnvilProvider.params.client_id).toBe config.client_id
it 'should set the redirect uri', ->
expect(AnvilProvider.params.redirect_uri).toBe config.redirect_uri
it 'should set the default scope', ->
expect(AnvilProvider.params.scope).toContain 'openid'
expect(AnvilProvider.params.scope).toContain 'profile'
it 'should set additional scope', ->
expect(AnvilProvider.params.scope).toContain 'other'
it 'should set the display', ->
expect(AnvilProvider.display).toBe 'popup'
it 'should set the default display', ->
AnvilProvider.configure {}
expect(AnvilProvider.display).toBe 'page'
describe 'toFormUrlEncoded', ->
it 'should encode a string from an object', ->
encoded = Anvil.toFormUrlEncoded(AnvilProvider.params)
expect(encoded).toContain 'response_type=id_token%20token'
expect(encoded).toContain '&redirect_uri=https%3A%2F%2Fmy.app.com'
expect(encoded).toContain '&scope=openid%20profile%20other'
describe 'parseFormUrlEncoded', ->
it 'should decode and parse an encoded object', ->
decoded = Anvil.parseFormUrlEncoded('a=b%20c&d=e')
expect(decoded.a).toBe 'b c'
expect(decoded.d).toBe 'e'
describe 'parseUriFragment', ->
it 'should return a fragment value from a Url', ->
fragment = Anvil.getUrlFragment('https://host:port/path#a=b&c=d')
expect(fragment).toBe 'a=b&c=d'
describe 'popup', ->
it 'should return parameters for a popup window', ->
popup = Anvil.popup(700, 500)
expect(popup).toContain 'width=700,'
expect(popup).toContain 'height=500,'
expect(popup).toContain 'dialog=yes,'
expect(popup).toContain 'dependent=yes,'
expect(popup).toContain 'scrollbars=yes,'
expect(popup).toContain 'location=yes'
describe 'serialize', ->
beforeEach ->
delete localStorage['anvil.connect']
Anvil.session.access_token = '<PASSWORD>'
Anvil.serialize()
it 'should store the current session in localStorage', ->
expect(localStorage['anvil.connect']).toBeDefined()
describe 'deserialize', ->
it 'should retrieve and parse the current session from localStorage'
describe 'reset', ->
it 'should delete the current session from localStorage'
it 'should reset the session object'
it 'should remove the cookie value'
describe 'uri with "authorize" endpoint', ->
beforeEach ->
uri = Anvil.uri()
it 'should contain issuer', ->
expect(uri).toContain config.issuer
it 'should contain endpoint', ->
expect(uri).toContain '/authorize?'
it 'should contain response type', ->
expect(uri).toContain 'id_token%20token'
it 'should contain client id', ->
expect(uri).toContain config.client_id
it 'should contain redirect uri', ->
expect(uri).toContain encodeURIComponent(config.redirect_uri)
it 'should contain scope', ->
expect(uri).toContain encodeURIComponent('openid profile other')
it 'should contain nonce', ->
expect(uri).toContain '&nonce='
describe 'uri with "signin" endpoint', ->
beforeEach ->
uri = Anvil.uri('signin')
it 'should contain issuer', ->
expect(uri).toContain config.issuer
it 'should contain endpoint', ->
expect(uri).toContain '/signin?'
it 'should contain response type', ->
expect(uri).toContain 'id_token%20token'
it 'should contain client id', ->
expect(uri).toContain config.client_id
it 'should contain redirect uri', ->
expect(uri).toContain encodeURIComponent(config.redirect_uri)
it 'should contain scope', ->
expect(uri).toContain encodeURIComponent('openid profile other')
it 'should contain nonce', ->
expect(uri).toContain '&nonce='
describe 'uri with "signup" endpoint', ->
beforeEach ->
uri = Anvil.uri('signup')
it 'should contain issuer', ->
expect(uri).toContain config.issuer
it 'should contain endpoint', ->
expect(uri).toContain '/signup?'
it 'should contain response type', ->
expect(uri).toContain 'id_token%20token'
it 'should contain client id', ->
expect(uri).toContain config.client_id
it 'should contain redirect uri', ->
expect(uri).toContain encodeURIComponent(config.redirect_uri)
it 'should contain scope', ->
expect(uri).toContain encodeURIComponent('openid profile other')
it 'should contain nonce', ->
expect(uri).toContain '&nonce='
describe 'uri with "connect" endpoint', ->
it 'should contain issuer'
it 'should contain endpoint'
it 'should contain response type'
it 'should contain client id'
it 'should contain redirect uri'
it 'should contain scope'
it 'should contain nonce'
describe 'nonce without argument', ->
it 'should return a base64url encoded sha256 hash of a random value', ->
expect(Anvil.nonce().length).toBe 43
it 'should store the nonce in localStorage', ->
nonce = Anvil.nonce()
expect(localStorage['nonce'].length).toBe 10
describe 'nonce with argument', ->
beforeEach ->
nonce = Anvil.nonce()
it 'should verify an argument matching a hash of the value in localStorage', ->
expect(Anvil.nonce(nonce)).toBe true
it 'should not verify a mismatching argument', ->
expect(Anvil.nonce('WRONG')).toBe false
describe 'sha256url', ->
it 'should base64url encode the SHA 256 hash of a provided string'
describe 'headers', ->
it 'should add a bearer token Authorization header to an object', ->
Anvil.session = { access_token: '<PASSWORD>' }
expect(Anvil.headers()['Authorization']).toContain 'Bearer random'
describe 'request', ->
it 'should add a bearer token to an HTTP request', ->
uri = config.issuer + '/userinfo'
Anvil.session.access_token = '<PASSWORD>'
headers =
'Authorization': "Bearer #{Anvil.session.access_token}",
'Accept': 'application/json, text/plain, */*'
$httpBackend.expectGET(uri, headers).respond(200, {})
Anvil.request({ method: 'GET', url: uri })
$httpBackend.flush()
describe 'userInfo', ->
it 'should request user info from the provider', ->
uri = config.issuer + '/userinfo'
Anvil.session.access_token = 'random'
headers =
'Authorization': "Bearer #{Anvil.session.access_token}",
'Accept': 'application/json, text/plain, */*'
$httpBackend.expectGET(uri, headers).respond(200, {})
Anvil.userInfo()
$httpBackend.flush()
describe 'callback with error response', ->
beforeEach ->
localStorage['anvil.connect'] = '{}'
promise = Anvil.callback({ error: 'invalid' })
it 'should return a promise', ->
expect(promise.then).toBeDefined()
it 'should clear the session', ->
Anvil.callback({ error: 'invalid' })
expect(localStorage['anvil.connect']).toBeUndefined()
it 'should reject the promise'
describe 'callback with authorization response', ->
beforeEach ->
promise = Anvil.callback({ access_token: "<KEY>" })
it 'should return a promise', ->
expect(promise.then).toBeDefined()
it 'should set session property on the service', ->
expect(Anvil.session.access_token).toBe "<KEY>"
#it 'should serialize the session'
#it 'should resolve the promise'
describe 'authorize with location fragment', ->
it 'should invoke the callback with the parsed authorization response'
describe 'authorize with page display', ->
it 'should navigate to the authorize endpoint'
describe 'authorize with popup display', ->
it 'should open a new window'
it 'should attach a listener'
it 'should return a promise'
describe 'listener', ->
it 'should invoke the callback with parsed event data'
it 'should remove the listener'
describe 'connect', ->
describe 'signout', ->
| true | 'use strict'
describe 'Anvil Connect', ->
{Anvil,AnvilProvider,uri,nonce,$httpBackend,promise,jwk} = {}
config =
issuer: 'https://accounts.anvil.io'
client_id: 'uuid'
redirect_uri: 'https://my.app.com'
scope: ['other']
display: 'popup'
jwk:
"kty":"RSA",
"use":"sig",
"alg":"RS256",
"n":"PI:KEY:<KEY>END_PI
"e":"AQAB"
beforeEach module 'anvil'
beforeEach module ($injector) ->
AnvilProvider = $injector.get 'AnvilProvider'
AnvilProvider.configure config
#console.log('AFTER CONFIG', AnvilProvider)
beforeEach inject ($injector) ->
$httpBackend = $injector.get '$httpBackend'
Anvil = $injector.get 'Anvil'
describe 'setJWK', ->
describe 'with empty argument', ->
beforeEach ->
jwk =
kid: 'empty'
kty: 'TEST'
use: 'sig'
alg: 'WTF'
n: 'h3xh3xh3x'
e: 'h3x'
localStorage['anvil.connect.jwk'] = JSON.stringify jwk
AnvilProvider.setJWK()
it 'should serialize the JWK in localStorage', ->
expect(localStorage['anvil.connect.jwk']).toEqual JSON.stringify(jwk)
it 'should set the jwk on the provider', ->
expect(AnvilProvider.jwk).toEqual jwk
it 'should set the modulus on the provider', ->
expect(AnvilProvider.hN).toBe b64tohex(jwk.n)
it 'should set the exponent on the provider', ->
expect(AnvilProvider.hE).toBe b64tohex(jwk.e)
describe 'with object argument', ->
beforeEach ->
jwk =
kid: 'object'
kty: 'TEST'
use: 'sig'
alg: 'WTF'
n: 'h3xh3xh3x'
e: 'h3x'
AnvilProvider.setJWK(jwk)
it 'should serialize the JWK in localStorage', ->
expect(localStorage['anvil.connect.jwk']).toBe JSON.stringify(jwk)
it 'should set the jwk on the provider', ->
expect(AnvilProvider.jwk).toBe jwk
it 'should set the modulus on the provider', ->
expect(AnvilProvider.hN).toBe b64tohex(jwk.n)
it 'should set the exponent on the provider', ->
expect(AnvilProvider.hE).toBe b64tohex(jwk.e)
describe 'with array argument', ->
beforeEach ->
jwk =
kid: 'object'
kty: 'TEST'
use: 'sig'
alg: 'WTF'
n: 'h3xh3xh3x'
e: 'h3x'
AnvilProvider.setJWK([jwk])
it 'should serialize the JWK in localStorage', ->
expect(localStorage['anvil.connect.jwk']).toBe JSON.stringify(jwk)
it 'should set the jwk on the provider', ->
expect(AnvilProvider.jwk).toBe jwk
it 'should set the modulus on the provider', ->
expect(AnvilProvider.hN).toBe b64tohex(jwk.n)
it 'should set the exponent on the provider', ->
expect(AnvilProvider.hE).toBe b64tohex(jwk.e)
describe 'configure provider', ->
it 'should set the issuer', ->
expect(AnvilProvider.issuer).toBe config.issuer
it 'should set the default response type', ->
expect(AnvilProvider.params.response_type).toBe 'id_token token'
it 'should set the client id', ->
expect(AnvilProvider.params.client_id).toBe config.client_id
it 'should set the redirect uri', ->
expect(AnvilProvider.params.redirect_uri).toBe config.redirect_uri
it 'should set the default scope', ->
expect(AnvilProvider.params.scope).toContain 'openid'
expect(AnvilProvider.params.scope).toContain 'profile'
it 'should set additional scope', ->
expect(AnvilProvider.params.scope).toContain 'other'
it 'should set the display', ->
expect(AnvilProvider.display).toBe 'popup'
it 'should set the default display', ->
AnvilProvider.configure {}
expect(AnvilProvider.display).toBe 'page'
describe 'toFormUrlEncoded', ->
it 'should encode a string from an object', ->
encoded = Anvil.toFormUrlEncoded(AnvilProvider.params)
expect(encoded).toContain 'response_type=id_token%20token'
expect(encoded).toContain '&redirect_uri=https%3A%2F%2Fmy.app.com'
expect(encoded).toContain '&scope=openid%20profile%20other'
describe 'parseFormUrlEncoded', ->
it 'should decode and parse an encoded object', ->
decoded = Anvil.parseFormUrlEncoded('a=b%20c&d=e')
expect(decoded.a).toBe 'b c'
expect(decoded.d).toBe 'e'
describe 'parseUriFragment', ->
it 'should return a fragment value from a Url', ->
fragment = Anvil.getUrlFragment('https://host:port/path#a=b&c=d')
expect(fragment).toBe 'a=b&c=d'
describe 'popup', ->
it 'should return parameters for a popup window', ->
popup = Anvil.popup(700, 500)
expect(popup).toContain 'width=700,'
expect(popup).toContain 'height=500,'
expect(popup).toContain 'dialog=yes,'
expect(popup).toContain 'dependent=yes,'
expect(popup).toContain 'scrollbars=yes,'
expect(popup).toContain 'location=yes'
describe 'serialize', ->
beforeEach ->
delete localStorage['anvil.connect']
Anvil.session.access_token = 'PI:PASSWORD:<PASSWORD>END_PI'
Anvil.serialize()
it 'should store the current session in localStorage', ->
expect(localStorage['anvil.connect']).toBeDefined()
describe 'deserialize', ->
it 'should retrieve and parse the current session from localStorage'
describe 'reset', ->
it 'should delete the current session from localStorage'
it 'should reset the session object'
it 'should remove the cookie value'
describe 'uri with "authorize" endpoint', ->
beforeEach ->
uri = Anvil.uri()
it 'should contain issuer', ->
expect(uri).toContain config.issuer
it 'should contain endpoint', ->
expect(uri).toContain '/authorize?'
it 'should contain response type', ->
expect(uri).toContain 'id_token%20token'
it 'should contain client id', ->
expect(uri).toContain config.client_id
it 'should contain redirect uri', ->
expect(uri).toContain encodeURIComponent(config.redirect_uri)
it 'should contain scope', ->
expect(uri).toContain encodeURIComponent('openid profile other')
it 'should contain nonce', ->
expect(uri).toContain '&nonce='
describe 'uri with "signin" endpoint', ->
beforeEach ->
uri = Anvil.uri('signin')
it 'should contain issuer', ->
expect(uri).toContain config.issuer
it 'should contain endpoint', ->
expect(uri).toContain '/signin?'
it 'should contain response type', ->
expect(uri).toContain 'id_token%20token'
it 'should contain client id', ->
expect(uri).toContain config.client_id
it 'should contain redirect uri', ->
expect(uri).toContain encodeURIComponent(config.redirect_uri)
it 'should contain scope', ->
expect(uri).toContain encodeURIComponent('openid profile other')
it 'should contain nonce', ->
expect(uri).toContain '&nonce='
describe 'uri with "signup" endpoint', ->
beforeEach ->
uri = Anvil.uri('signup')
it 'should contain issuer', ->
expect(uri).toContain config.issuer
it 'should contain endpoint', ->
expect(uri).toContain '/signup?'
it 'should contain response type', ->
expect(uri).toContain 'id_token%20token'
it 'should contain client id', ->
expect(uri).toContain config.client_id
it 'should contain redirect uri', ->
expect(uri).toContain encodeURIComponent(config.redirect_uri)
it 'should contain scope', ->
expect(uri).toContain encodeURIComponent('openid profile other')
it 'should contain nonce', ->
expect(uri).toContain '&nonce='
describe 'uri with "connect" endpoint', ->
it 'should contain issuer'
it 'should contain endpoint'
it 'should contain response type'
it 'should contain client id'
it 'should contain redirect uri'
it 'should contain scope'
it 'should contain nonce'
describe 'nonce without argument', ->
it 'should return a base64url encoded sha256 hash of a random value', ->
expect(Anvil.nonce().length).toBe 43
it 'should store the nonce in localStorage', ->
nonce = Anvil.nonce()
expect(localStorage['nonce'].length).toBe 10
describe 'nonce with argument', ->
beforeEach ->
nonce = Anvil.nonce()
it 'should verify an argument matching a hash of the value in localStorage', ->
expect(Anvil.nonce(nonce)).toBe true
it 'should not verify a mismatching argument', ->
expect(Anvil.nonce('WRONG')).toBe false
describe 'sha256url', ->
it 'should base64url encode the SHA 256 hash of a provided string'
describe 'headers', ->
it 'should add a bearer token Authorization header to an object', ->
Anvil.session = { access_token: 'PI:KEY:<PASSWORD>END_PI' }
expect(Anvil.headers()['Authorization']).toContain 'Bearer random'
describe 'request', ->
it 'should add a bearer token to an HTTP request', ->
uri = config.issuer + '/userinfo'
Anvil.session.access_token = 'PI:PASSWORD:<PASSWORD>END_PI'
headers =
'Authorization': "Bearer #{Anvil.session.access_token}",
'Accept': 'application/json, text/plain, */*'
$httpBackend.expectGET(uri, headers).respond(200, {})
Anvil.request({ method: 'GET', url: uri })
$httpBackend.flush()
describe 'userInfo', ->
it 'should request user info from the provider', ->
uri = config.issuer + '/userinfo'
Anvil.session.access_token = 'random'
headers =
'Authorization': "Bearer #{Anvil.session.access_token}",
'Accept': 'application/json, text/plain, */*'
$httpBackend.expectGET(uri, headers).respond(200, {})
Anvil.userInfo()
$httpBackend.flush()
describe 'callback with error response', ->
beforeEach ->
localStorage['anvil.connect'] = '{}'
promise = Anvil.callback({ error: 'invalid' })
it 'should return a promise', ->
expect(promise.then).toBeDefined()
it 'should clear the session', ->
Anvil.callback({ error: 'invalid' })
expect(localStorage['anvil.connect']).toBeUndefined()
it 'should reject the promise'
describe 'callback with authorization response', ->
beforeEach ->
promise = Anvil.callback({ access_token: "PI:KEY:<KEY>END_PI" })
it 'should return a promise', ->
expect(promise.then).toBeDefined()
it 'should set session property on the service', ->
expect(Anvil.session.access_token).toBe "PI:KEY:<KEY>END_PI"
#it 'should serialize the session'
#it 'should resolve the promise'
describe 'authorize with location fragment', ->
it 'should invoke the callback with the parsed authorization response'
describe 'authorize with page display', ->
it 'should navigate to the authorize endpoint'
describe 'authorize with popup display', ->
it 'should open a new window'
it 'should attach a listener'
it 'should return a promise'
describe 'listener', ->
it 'should invoke the callback with parsed event data'
it 'should remove the listener'
describe 'connect', ->
describe 'signout', ->
|
[
{
"context": " hubot flip - Hubot flips a table\n#\n# Author:\n# jjasghar\n# Jason Dixon\n\nmodule.exports = (robot) ->\n\n r",
"end": 173,
"score": 0.9996629953384399,
"start": 165,
"tag": "USERNAME",
"value": "jjasghar"
},
{
"context": "- Hubot flips a table\n#\n# Author:\n# ... | scripts/fliptable.coffee | Drewzar/hayt | 5 | # Description:
# Make hubot flip a table
#
# Dependencies:
# None
#
# Configuration:
# None
#
# Commands:
# hubot flip - Hubot flips a table
#
# Author:
# jjasghar
# Jason Dixon
module.exports = (robot) ->
robot.respond /flip\b/i, (msg) ->
flips = [
'(╯°□°)╯︵ ┻━┻',
'┬─┬ ノ( ゜-゜ノ)',
'(ノ ゜Д゜)ノ ︵ ┻━┻',
'(╯°□°)╯︵ ┻━┻ ︵ ╯(°□° ╯)',
'┬─┬ ︵ /(.□. \)',
'(ノಥ益ಥ)ノ ┻━┻',
'(ノ^_^)ノ┻━┻ ┬─┬ ノ( ^_^ノ)',
'(╯°Д°)╯︵ /(.□ . \)',
"(╯'□')╯︵ ┻━┻",
'(ノಥДಥ)ノ︵┻━┻・/',
'(/ .□.)\ ︵╰(゜Д゜)╯︵ /(.□. \)',
'(._.) ~ ︵ ┻━┻',
'ʕノ•ᴥ•ʔノ ︵ ┻━┻',
'(/¯◡ ‿ ◡)/¯ ~ ┻━┻',
'(/¯◡ ‿ ◡)/¯ ~ ┻━┻',
'┻━┻ ︵ ლ(⌒-⌒ლ)',
]
sets = [
'┬─┬ ノ( ゜-゜ノ)',
'┬─┬ ︵ /(.□. \)',
]
if /geckomuerto/i.test(msg.message.user.name.trim())
msg.send msg.random sets
else
msg.send msg.random flips
| 164394 | # Description:
# Make hubot flip a table
#
# Dependencies:
# None
#
# Configuration:
# None
#
# Commands:
# hubot flip - Hubot flips a table
#
# Author:
# jjasghar
# <NAME>
module.exports = (robot) ->
robot.respond /flip\b/i, (msg) ->
flips = [
'(╯°□°)╯︵ ┻━┻',
'┬─┬ ノ( ゜-゜ノ)',
'(ノ ゜Д゜)ノ ︵ ┻━┻',
'(╯°□°)╯︵ ┻━┻ ︵ ╯(°□° ╯)',
'┬─┬ ︵ /(.□. \)',
'(ノಥ益ಥ)ノ ┻━┻',
'(ノ^_^)ノ┻━┻ ┬─┬ ノ( ^_^ノ)',
'(╯°Д°)╯︵ /(.□ . \)',
"(╯'□')╯︵ ┻━┻",
'(ノಥДಥ)ノ︵┻━┻・/',
'(/ .□.)\ ︵╰(゜Д゜)╯︵ /(.□. \)',
'(._.) ~ ︵ ┻━┻',
'ʕノ•ᴥ•ʔノ ︵ ┻━┻',
'(/¯◡ ‿ ◡)/¯ ~ ┻━┻',
'(/¯◡ ‿ ◡)/¯ ~ ┻━┻',
'┻━┻ ︵ ლ(⌒-⌒ლ)',
]
sets = [
'┬─┬ ノ( ゜-゜ノ)',
'┬─┬ ︵ /(.□. \)',
]
if /geckomuerto/i.test(msg.message.user.name.trim())
msg.send msg.random sets
else
msg.send msg.random flips
| true | # Description:
# Make hubot flip a table
#
# Dependencies:
# None
#
# Configuration:
# None
#
# Commands:
# hubot flip - Hubot flips a table
#
# Author:
# jjasghar
# PI:NAME:<NAME>END_PI
module.exports = (robot) ->
robot.respond /flip\b/i, (msg) ->
flips = [
'(╯°□°)╯︵ ┻━┻',
'┬─┬ ノ( ゜-゜ノ)',
'(ノ ゜Д゜)ノ ︵ ┻━┻',
'(╯°□°)╯︵ ┻━┻ ︵ ╯(°□° ╯)',
'┬─┬ ︵ /(.□. \)',
'(ノಥ益ಥ)ノ ┻━┻',
'(ノ^_^)ノ┻━┻ ┬─┬ ノ( ^_^ノ)',
'(╯°Д°)╯︵ /(.□ . \)',
"(╯'□')╯︵ ┻━┻",
'(ノಥДಥ)ノ︵┻━┻・/',
'(/ .□.)\ ︵╰(゜Д゜)╯︵ /(.□. \)',
'(._.) ~ ︵ ┻━┻',
'ʕノ•ᴥ•ʔノ ︵ ┻━┻',
'(/¯◡ ‿ ◡)/¯ ~ ┻━┻',
'(/¯◡ ‿ ◡)/¯ ~ ┻━┻',
'┻━┻ ︵ ლ(⌒-⌒ლ)',
]
sets = [
'┬─┬ ノ( ゜-゜ノ)',
'┬─┬ ︵ /(.□. \)',
]
if /geckomuerto/i.test(msg.message.user.name.trim())
msg.send msg.random sets
else
msg.send msg.random flips
|
[
{
"context": "###\nCopyright (c) 2002-2013 \"Neo Technology,\"\nNetwork Engine for Objects in Lund AB [http://n",
"end": 43,
"score": 0.7343764305114746,
"start": 33,
"tag": "NAME",
"value": "Technology"
}
] | community/server/src/main/coffeescript/neo4j/webadmin/modules/console/models/HttpConsole.coffee | rebaze/neo4j | 1 | ###
Copyright (c) 2002-2013 "Neo Technology,"
Network Engine for Objects in Lund AB [http://neotechnology.com]
This file is part of Neo4j.
Neo4j is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
###
define ['./Console'], (Console) ->
class HttpConsole extends Console
statementRegex : /^((GET)|(PUT)|(POST)|(DELETE)) ([^ ]+)( (.+))?$/i
initialize : (opts) =>
@server = opts.server
@lang = opts.lang
@setPromptPrefix "#{@lang}> "
@set {"showPrompt":true},{silent:true}
executeStatement : (statement) ->
if @statementRegex.test statement
result = @statementRegex.exec statement
[method, url, data] = [result[1], result[6], result[8]]
if data
try
@server.web.ajax method, url, JSON.parse(data), @callSucceeded, @callFailed
catch e
@setResult ["Invalid JSON data."]
else
@server.web.ajax method, url, @callSucceeded, @callFailed
else
@setResult ["Invalid statement."]
setResult : (lines) ->
@set {"showPrompt":true},{silent:true}
@pushLines lines
callSucceeded : (responseData, type, response) =>
status = [response.status + " " + response.statusText]
lines = response.responseText.split "\n"
@setResult status.concat lines
callFailed : (response) =>
@callSucceeded null, null, arguments[0].req
| 114491 | ###
Copyright (c) 2002-2013 "Neo <NAME>,"
Network Engine for Objects in Lund AB [http://neotechnology.com]
This file is part of Neo4j.
Neo4j is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
###
define ['./Console'], (Console) ->
class HttpConsole extends Console
statementRegex : /^((GET)|(PUT)|(POST)|(DELETE)) ([^ ]+)( (.+))?$/i
initialize : (opts) =>
@server = opts.server
@lang = opts.lang
@setPromptPrefix "#{@lang}> "
@set {"showPrompt":true},{silent:true}
executeStatement : (statement) ->
if @statementRegex.test statement
result = @statementRegex.exec statement
[method, url, data] = [result[1], result[6], result[8]]
if data
try
@server.web.ajax method, url, JSON.parse(data), @callSucceeded, @callFailed
catch e
@setResult ["Invalid JSON data."]
else
@server.web.ajax method, url, @callSucceeded, @callFailed
else
@setResult ["Invalid statement."]
setResult : (lines) ->
@set {"showPrompt":true},{silent:true}
@pushLines lines
callSucceeded : (responseData, type, response) =>
status = [response.status + " " + response.statusText]
lines = response.responseText.split "\n"
@setResult status.concat lines
callFailed : (response) =>
@callSucceeded null, null, arguments[0].req
| true | ###
Copyright (c) 2002-2013 "Neo PI:NAME:<NAME>END_PI,"
Network Engine for Objects in Lund AB [http://neotechnology.com]
This file is part of Neo4j.
Neo4j is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
###
define ['./Console'], (Console) ->
class HttpConsole extends Console
statementRegex : /^((GET)|(PUT)|(POST)|(DELETE)) ([^ ]+)( (.+))?$/i
initialize : (opts) =>
@server = opts.server
@lang = opts.lang
@setPromptPrefix "#{@lang}> "
@set {"showPrompt":true},{silent:true}
executeStatement : (statement) ->
if @statementRegex.test statement
result = @statementRegex.exec statement
[method, url, data] = [result[1], result[6], result[8]]
if data
try
@server.web.ajax method, url, JSON.parse(data), @callSucceeded, @callFailed
catch e
@setResult ["Invalid JSON data."]
else
@server.web.ajax method, url, @callSucceeded, @callFailed
else
@setResult ["Invalid statement."]
setResult : (lines) ->
@set {"showPrompt":true},{silent:true}
@pushLines lines
callSucceeded : (responseData, type, response) =>
status = [response.status + " " + response.statusText]
lines = response.responseText.split "\n"
@setResult status.concat lines
callFailed : (response) =>
@callSucceeded null, null, arguments[0].req
|
[
{
"context": "\n\n onJsdoit = document.domain is 'jsrun.it'\n\n imgURL = if not onJsdoit t",
"end": 1929,
"score": 0.7361804246902466,
"start": 1927,
"tag": "EMAIL",
"value": "it"
}
] | js/main2.coffee | edom18/S3D | 0 | do (win = window, doc = window.document, exports = window) ->
#Import
{tan, cos, sin, PI} = Math
{Face2, Object3D, Line, Color, AmbientLight, DirectionalLight, Plate, Face, Cube, Texture, Triangle, Matrix4, Camera, Renderer, Scene, Vector3, Particle} = window.S3D
$ = (selector) ->
doc.querySelector selector
requestAnimFrame = do ->
return win.requestAnimationFrame or
win.webkitRequestAnimationFrame or
win.mozRequestAnimationFrame or
win.msRequestAnimationFrame or
(callback) ->
setTimeout callback, 16
DEG_TO_RAD = PI / 180
isTouch = 'ontouchstart' of window
MOUSE_DOWN = if isTouch then 'touchstart' else 'mousedown'
MOUSE_MOVE = if isTouch then 'touchmove' else 'mousemove'
MOUSE_UP = if isTouch then 'touchend' else 'mouseup'
textureImage = null
logoImage = null
photoImage = null
rotX = 0
rotY = 0
renderer = null
camera = null
scene = null
getVideo = ->
video = doc.getElementById 'video'
video.autoplay = true
video.loop = true
return video
init = ->
video = getVideo()
cv = doc.getElementById 'canvas'
ctx = cv.getContext '2d'
w = cv.width = win.innerWidth
h = cv.height = win.innerHeight
fov = 60
aspect = w / h
camera = new Camera 40, aspect, 0.1, 10000
camera.position.x = 10
camera.position.y = 20
camera.position.z = 200
#camera.up = new Vector3 1, 0, 0
camera.lookAt new Vector3 0, 50, 0
camera.lookAtLock = true
scene = new Scene
renderer = new Renderer cv, '#111'
#renderer.fog = false
#renderer.lighting = false
#renderer.wireframe = true
create = ->
onJsdoit = document.domain is 'jsrun.it'
imgURL = if not onJsdoit then 'img/aXjiA.png' else 'http://jsrun.it/assets/y/r/A/V/yrAVl.jpg'
imgHtml5LogoURL = if not onJsdoit then 'img/HTML5_Logo_512.png' else 'http://jsrun.it/assets/z/1/2/9/z129U.png'
imgPhotoURL = if not onJsdoit then 'img/photo.jpg' else 'http://jsrun.it/assets/k/M/J/J/kMJJS.png'
materials1 = [
imgPhotoURL, imgPhotoURL, imgPhotoURL, imgPhotoURL, imgPhotoURL, imgPhotoURL,
imgPhotoURL, imgPhotoURL, imgPhotoURL, imgPhotoURL, imgPhotoURL, imgPhotoURL
]
materials2 = [
video, video, video, video, video, video,
video, video, video, video, video, video
]
materials3 = [
new Color(200, 0, 0, 1)
new Color(200, 0, 0, 1)
new Color(200, 0, 0, 1)
new Color(200, 0, 0, 1)
new Color(200, 0, 0, 1)
new Color(200, 0, 0, 1)
new Color(200, 0, 0, 1)
new Color(200, 0, 0, 1)
new Color(200, 0, 0, 1)
new Color(200, 0, 0, 1)
new Color(200, 0, 0, 1)
new Color(200, 0, 0, 1)
]
cube1 = new Cube 50, 20, 20, 1, 1, 1, materials2
cube1.position.z = -50
cube1.position.y = 50
cube1.rotation.z = 30
cube1.scale.set(0.5, 0.5, 0.5)
cube2 = new Cube 20, 20, 20, 1, 1, 1, materials1
cube2.position.z = -150
cube2.position.y = 50
cube2.position.x = 50
cube3 = new Cube 20, 20, 20, 1, 1, 1, materials3
cube3.position.z = -350
cube3.position.x = 50
cube3.position.y = 80
plate1 = new Plate 50, 50, 1, 1, imgHtml5LogoURL, imgHtml5LogoURL
plate1.position.set -50, 10, -300
plate2 = new Plate 50, 50, 1, 1, video, video
plate2.position.set 0, 100, -500
line1 = new Line(0, 0, -200, 0, 0, 200, new Color(255, 0, 0, 0.3))
line2 = new Line(-200, 0, 0, 200, 0, 0, new Color(0, 255, 0, 0.3))
line3 = new Line(0, 200, 0, 0, -200, 0, new Color(0, 0, 255, 0.3))
particle1 = new Particle(new Vector3(50, 50, 30), 2000)
particle2 = new Particle(new Vector3(150, 50, 0), 3000)
particle3 = new Particle(new Vector3(250, 30, -150), 2500)
particle4 = new Particle(new Vector3(-150, 150, -250), 4000)
particle5 = new Particle(new Vector3(-250, 250, 50), 3500)
size = 500
container = new Object3D
container.position.x = -(size * 0.5)
container.position.z = -(size * 0.5)
for i in [0..(size / 10)]
z = i * 10
line = new Line(0, 0, z, size, 0, z, new Color(255, 255, 255, 0.3))
container.add line
for i in [0..(size / 10)]
x = i * 10
line = new Line(x, 0, 0, x, 0, size, new Color(255, 255, 255, 0.3))
container.add line
ambLight = new AmbientLight(0.1)
dirLight = new DirectionalLight(1.0, (new Vector3(0, 0, 1)).normalize())
scene.add ambLight
scene.add dirLight
scene.add particle1
scene.add particle2
scene.add particle3
scene.add particle4
scene.add particle5
scene.add plate1
scene.add plate2
scene.add container
scene.add cube1
scene.add cube2
scene.add cube3
scene.add line1
scene.add line2
scene.add line3
angle = 0
do _loop = ->
angle = (++angle % 360)
plate1.rotation.z = angle
plate2.rotation.x = angle * 3
cube1.rotation.z = angle
cube2.rotation.x = angle * 2
cube3.rotation.x = angle * 3
cube3.rotation.y = angle * 3
cube3.rotation.z = angle * 3
s = 1 + sin(angle * DEG_TO_RAD)
cube3.scale.set(s, s, s)
renderer.render scene, camera
requestAnimFrame _loop
create()
dragging = false
prevX = 0
prevY = 0
# Events
win.addEventListener 'mousewheel', (e) ->
camera.position.z += (e.wheelDelta / 10)
renderer.render scene, camera
e.preventDefault()
, false
base = 100
startZoom = 0
document.addEventListener 'gesturechange', (e) ->
num = e.scale * base - base
camera.position.z = startZoom - num
renderer.render scene, camera
e.preventDefault()
, false
document.addEventListener 'gesturestart', ->
startZoom = camera.position.z
, false
doc.addEventListener 'touchstart', (e) ->
e.preventDefault()
, false
doc.addEventListener MOUSE_DOWN, (e) ->
dragging = true
prevX = if isTouch then e.touches[0].pageX else e.pageX
prevY = if isTouch then e.touches[0].pageY else e.pageY
, false
moveX = camera.position.x
moveY = camera.position.y
doc.addEventListener MOUSE_MOVE, (e) ->
return if dragging is false
pageX = if isTouch then e.touches[0].pageX else e.pageX
pageY = if isTouch then e.touches[0].pageY else e.pageY
moveX -= (prevX - pageX) * 3
moveY += (prevY - pageY) * 3
camera.position.y = moveY
camera.position.x = moveX
prevX = pageX
prevY = pageY
renderer.render scene, camera
, false
doc.addEventListener MOUSE_UP, (e) ->
dragging = false
, false
# コントロール
btnFog = $('#fog')
btnLight = $('#light')
btnWire = $('#wire')
fog = true
light = true
wire = false
btnFog.addEventListener MOUSE_DOWN, ->
fog = !fog
type = if fog then 'ON' else 'OFF'
btnFog.value = "フォグ[#{type}]"
renderer.fog = fog
, false
btnLight.addEventListener MOUSE_DOWN, ->
light = !light
type = if light then 'ON' else 'OFF'
btnLight.value = "ライティング[#{type}]"
renderer.lighting = light
, false
btnWire.addEventListener MOUSE_DOWN, ->
wire = !wire
type = if wire then 'ON' else 'OFF'
btnWire.value = "ワイヤーフレーム[#{type}]"
renderer.wireframe = wire
, false
doc.addEventListener 'DOMContentLoaded', init, false
| 49798 | do (win = window, doc = window.document, exports = window) ->
#Import
{tan, cos, sin, PI} = Math
{Face2, Object3D, Line, Color, AmbientLight, DirectionalLight, Plate, Face, Cube, Texture, Triangle, Matrix4, Camera, Renderer, Scene, Vector3, Particle} = window.S3D
$ = (selector) ->
doc.querySelector selector
requestAnimFrame = do ->
return win.requestAnimationFrame or
win.webkitRequestAnimationFrame or
win.mozRequestAnimationFrame or
win.msRequestAnimationFrame or
(callback) ->
setTimeout callback, 16
DEG_TO_RAD = PI / 180
isTouch = 'ontouchstart' of window
MOUSE_DOWN = if isTouch then 'touchstart' else 'mousedown'
MOUSE_MOVE = if isTouch then 'touchmove' else 'mousemove'
MOUSE_UP = if isTouch then 'touchend' else 'mouseup'
textureImage = null
logoImage = null
photoImage = null
rotX = 0
rotY = 0
renderer = null
camera = null
scene = null
getVideo = ->
video = doc.getElementById 'video'
video.autoplay = true
video.loop = true
return video
init = ->
video = getVideo()
cv = doc.getElementById 'canvas'
ctx = cv.getContext '2d'
w = cv.width = win.innerWidth
h = cv.height = win.innerHeight
fov = 60
aspect = w / h
camera = new Camera 40, aspect, 0.1, 10000
camera.position.x = 10
camera.position.y = 20
camera.position.z = 200
#camera.up = new Vector3 1, 0, 0
camera.lookAt new Vector3 0, 50, 0
camera.lookAtLock = true
scene = new Scene
renderer = new Renderer cv, '#111'
#renderer.fog = false
#renderer.lighting = false
#renderer.wireframe = true
create = ->
onJsdoit = document.domain is 'jsrun.<EMAIL>'
imgURL = if not onJsdoit then 'img/aXjiA.png' else 'http://jsrun.it/assets/y/r/A/V/yrAVl.jpg'
imgHtml5LogoURL = if not onJsdoit then 'img/HTML5_Logo_512.png' else 'http://jsrun.it/assets/z/1/2/9/z129U.png'
imgPhotoURL = if not onJsdoit then 'img/photo.jpg' else 'http://jsrun.it/assets/k/M/J/J/kMJJS.png'
materials1 = [
imgPhotoURL, imgPhotoURL, imgPhotoURL, imgPhotoURL, imgPhotoURL, imgPhotoURL,
imgPhotoURL, imgPhotoURL, imgPhotoURL, imgPhotoURL, imgPhotoURL, imgPhotoURL
]
materials2 = [
video, video, video, video, video, video,
video, video, video, video, video, video
]
materials3 = [
new Color(200, 0, 0, 1)
new Color(200, 0, 0, 1)
new Color(200, 0, 0, 1)
new Color(200, 0, 0, 1)
new Color(200, 0, 0, 1)
new Color(200, 0, 0, 1)
new Color(200, 0, 0, 1)
new Color(200, 0, 0, 1)
new Color(200, 0, 0, 1)
new Color(200, 0, 0, 1)
new Color(200, 0, 0, 1)
new Color(200, 0, 0, 1)
]
cube1 = new Cube 50, 20, 20, 1, 1, 1, materials2
cube1.position.z = -50
cube1.position.y = 50
cube1.rotation.z = 30
cube1.scale.set(0.5, 0.5, 0.5)
cube2 = new Cube 20, 20, 20, 1, 1, 1, materials1
cube2.position.z = -150
cube2.position.y = 50
cube2.position.x = 50
cube3 = new Cube 20, 20, 20, 1, 1, 1, materials3
cube3.position.z = -350
cube3.position.x = 50
cube3.position.y = 80
plate1 = new Plate 50, 50, 1, 1, imgHtml5LogoURL, imgHtml5LogoURL
plate1.position.set -50, 10, -300
plate2 = new Plate 50, 50, 1, 1, video, video
plate2.position.set 0, 100, -500
line1 = new Line(0, 0, -200, 0, 0, 200, new Color(255, 0, 0, 0.3))
line2 = new Line(-200, 0, 0, 200, 0, 0, new Color(0, 255, 0, 0.3))
line3 = new Line(0, 200, 0, 0, -200, 0, new Color(0, 0, 255, 0.3))
particle1 = new Particle(new Vector3(50, 50, 30), 2000)
particle2 = new Particle(new Vector3(150, 50, 0), 3000)
particle3 = new Particle(new Vector3(250, 30, -150), 2500)
particle4 = new Particle(new Vector3(-150, 150, -250), 4000)
particle5 = new Particle(new Vector3(-250, 250, 50), 3500)
size = 500
container = new Object3D
container.position.x = -(size * 0.5)
container.position.z = -(size * 0.5)
for i in [0..(size / 10)]
z = i * 10
line = new Line(0, 0, z, size, 0, z, new Color(255, 255, 255, 0.3))
container.add line
for i in [0..(size / 10)]
x = i * 10
line = new Line(x, 0, 0, x, 0, size, new Color(255, 255, 255, 0.3))
container.add line
ambLight = new AmbientLight(0.1)
dirLight = new DirectionalLight(1.0, (new Vector3(0, 0, 1)).normalize())
scene.add ambLight
scene.add dirLight
scene.add particle1
scene.add particle2
scene.add particle3
scene.add particle4
scene.add particle5
scene.add plate1
scene.add plate2
scene.add container
scene.add cube1
scene.add cube2
scene.add cube3
scene.add line1
scene.add line2
scene.add line3
angle = 0
do _loop = ->
angle = (++angle % 360)
plate1.rotation.z = angle
plate2.rotation.x = angle * 3
cube1.rotation.z = angle
cube2.rotation.x = angle * 2
cube3.rotation.x = angle * 3
cube3.rotation.y = angle * 3
cube3.rotation.z = angle * 3
s = 1 + sin(angle * DEG_TO_RAD)
cube3.scale.set(s, s, s)
renderer.render scene, camera
requestAnimFrame _loop
create()
dragging = false
prevX = 0
prevY = 0
# Events
win.addEventListener 'mousewheel', (e) ->
camera.position.z += (e.wheelDelta / 10)
renderer.render scene, camera
e.preventDefault()
, false
base = 100
startZoom = 0
document.addEventListener 'gesturechange', (e) ->
num = e.scale * base - base
camera.position.z = startZoom - num
renderer.render scene, camera
e.preventDefault()
, false
document.addEventListener 'gesturestart', ->
startZoom = camera.position.z
, false
doc.addEventListener 'touchstart', (e) ->
e.preventDefault()
, false
doc.addEventListener MOUSE_DOWN, (e) ->
dragging = true
prevX = if isTouch then e.touches[0].pageX else e.pageX
prevY = if isTouch then e.touches[0].pageY else e.pageY
, false
moveX = camera.position.x
moveY = camera.position.y
doc.addEventListener MOUSE_MOVE, (e) ->
return if dragging is false
pageX = if isTouch then e.touches[0].pageX else e.pageX
pageY = if isTouch then e.touches[0].pageY else e.pageY
moveX -= (prevX - pageX) * 3
moveY += (prevY - pageY) * 3
camera.position.y = moveY
camera.position.x = moveX
prevX = pageX
prevY = pageY
renderer.render scene, camera
, false
doc.addEventListener MOUSE_UP, (e) ->
dragging = false
, false
# コントロール
btnFog = $('#fog')
btnLight = $('#light')
btnWire = $('#wire')
fog = true
light = true
wire = false
btnFog.addEventListener MOUSE_DOWN, ->
fog = !fog
type = if fog then 'ON' else 'OFF'
btnFog.value = "フォグ[#{type}]"
renderer.fog = fog
, false
btnLight.addEventListener MOUSE_DOWN, ->
light = !light
type = if light then 'ON' else 'OFF'
btnLight.value = "ライティング[#{type}]"
renderer.lighting = light
, false
btnWire.addEventListener MOUSE_DOWN, ->
wire = !wire
type = if wire then 'ON' else 'OFF'
btnWire.value = "ワイヤーフレーム[#{type}]"
renderer.wireframe = wire
, false
doc.addEventListener 'DOMContentLoaded', init, false
| true | do (win = window, doc = window.document, exports = window) ->
#Import
{tan, cos, sin, PI} = Math
{Face2, Object3D, Line, Color, AmbientLight, DirectionalLight, Plate, Face, Cube, Texture, Triangle, Matrix4, Camera, Renderer, Scene, Vector3, Particle} = window.S3D
$ = (selector) ->
doc.querySelector selector
requestAnimFrame = do ->
return win.requestAnimationFrame or
win.webkitRequestAnimationFrame or
win.mozRequestAnimationFrame or
win.msRequestAnimationFrame or
(callback) ->
setTimeout callback, 16
DEG_TO_RAD = PI / 180
isTouch = 'ontouchstart' of window
MOUSE_DOWN = if isTouch then 'touchstart' else 'mousedown'
MOUSE_MOVE = if isTouch then 'touchmove' else 'mousemove'
MOUSE_UP = if isTouch then 'touchend' else 'mouseup'
textureImage = null
logoImage = null
photoImage = null
rotX = 0
rotY = 0
renderer = null
camera = null
scene = null
getVideo = ->
video = doc.getElementById 'video'
video.autoplay = true
video.loop = true
return video
init = ->
video = getVideo()
cv = doc.getElementById 'canvas'
ctx = cv.getContext '2d'
w = cv.width = win.innerWidth
h = cv.height = win.innerHeight
fov = 60
aspect = w / h
camera = new Camera 40, aspect, 0.1, 10000
camera.position.x = 10
camera.position.y = 20
camera.position.z = 200
#camera.up = new Vector3 1, 0, 0
camera.lookAt new Vector3 0, 50, 0
camera.lookAtLock = true
scene = new Scene
renderer = new Renderer cv, '#111'
#renderer.fog = false
#renderer.lighting = false
#renderer.wireframe = true
create = ->
onJsdoit = document.domain is 'jsrun.PI:EMAIL:<EMAIL>END_PI'
imgURL = if not onJsdoit then 'img/aXjiA.png' else 'http://jsrun.it/assets/y/r/A/V/yrAVl.jpg'
imgHtml5LogoURL = if not onJsdoit then 'img/HTML5_Logo_512.png' else 'http://jsrun.it/assets/z/1/2/9/z129U.png'
imgPhotoURL = if not onJsdoit then 'img/photo.jpg' else 'http://jsrun.it/assets/k/M/J/J/kMJJS.png'
materials1 = [
imgPhotoURL, imgPhotoURL, imgPhotoURL, imgPhotoURL, imgPhotoURL, imgPhotoURL,
imgPhotoURL, imgPhotoURL, imgPhotoURL, imgPhotoURL, imgPhotoURL, imgPhotoURL
]
materials2 = [
video, video, video, video, video, video,
video, video, video, video, video, video
]
materials3 = [
new Color(200, 0, 0, 1)
new Color(200, 0, 0, 1)
new Color(200, 0, 0, 1)
new Color(200, 0, 0, 1)
new Color(200, 0, 0, 1)
new Color(200, 0, 0, 1)
new Color(200, 0, 0, 1)
new Color(200, 0, 0, 1)
new Color(200, 0, 0, 1)
new Color(200, 0, 0, 1)
new Color(200, 0, 0, 1)
new Color(200, 0, 0, 1)
]
cube1 = new Cube 50, 20, 20, 1, 1, 1, materials2
cube1.position.z = -50
cube1.position.y = 50
cube1.rotation.z = 30
cube1.scale.set(0.5, 0.5, 0.5)
cube2 = new Cube 20, 20, 20, 1, 1, 1, materials1
cube2.position.z = -150
cube2.position.y = 50
cube2.position.x = 50
cube3 = new Cube 20, 20, 20, 1, 1, 1, materials3
cube3.position.z = -350
cube3.position.x = 50
cube3.position.y = 80
plate1 = new Plate 50, 50, 1, 1, imgHtml5LogoURL, imgHtml5LogoURL
plate1.position.set -50, 10, -300
plate2 = new Plate 50, 50, 1, 1, video, video
plate2.position.set 0, 100, -500
line1 = new Line(0, 0, -200, 0, 0, 200, new Color(255, 0, 0, 0.3))
line2 = new Line(-200, 0, 0, 200, 0, 0, new Color(0, 255, 0, 0.3))
line3 = new Line(0, 200, 0, 0, -200, 0, new Color(0, 0, 255, 0.3))
particle1 = new Particle(new Vector3(50, 50, 30), 2000)
particle2 = new Particle(new Vector3(150, 50, 0), 3000)
particle3 = new Particle(new Vector3(250, 30, -150), 2500)
particle4 = new Particle(new Vector3(-150, 150, -250), 4000)
particle5 = new Particle(new Vector3(-250, 250, 50), 3500)
size = 500
container = new Object3D
container.position.x = -(size * 0.5)
container.position.z = -(size * 0.5)
for i in [0..(size / 10)]
z = i * 10
line = new Line(0, 0, z, size, 0, z, new Color(255, 255, 255, 0.3))
container.add line
for i in [0..(size / 10)]
x = i * 10
line = new Line(x, 0, 0, x, 0, size, new Color(255, 255, 255, 0.3))
container.add line
ambLight = new AmbientLight(0.1)
dirLight = new DirectionalLight(1.0, (new Vector3(0, 0, 1)).normalize())
scene.add ambLight
scene.add dirLight
scene.add particle1
scene.add particle2
scene.add particle3
scene.add particle4
scene.add particle5
scene.add plate1
scene.add plate2
scene.add container
scene.add cube1
scene.add cube2
scene.add cube3
scene.add line1
scene.add line2
scene.add line3
angle = 0
do _loop = ->
angle = (++angle % 360)
plate1.rotation.z = angle
plate2.rotation.x = angle * 3
cube1.rotation.z = angle
cube2.rotation.x = angle * 2
cube3.rotation.x = angle * 3
cube3.rotation.y = angle * 3
cube3.rotation.z = angle * 3
s = 1 + sin(angle * DEG_TO_RAD)
cube3.scale.set(s, s, s)
renderer.render scene, camera
requestAnimFrame _loop
create()
dragging = false
prevX = 0
prevY = 0
# Events
win.addEventListener 'mousewheel', (e) ->
camera.position.z += (e.wheelDelta / 10)
renderer.render scene, camera
e.preventDefault()
, false
base = 100
startZoom = 0
document.addEventListener 'gesturechange', (e) ->
num = e.scale * base - base
camera.position.z = startZoom - num
renderer.render scene, camera
e.preventDefault()
, false
document.addEventListener 'gesturestart', ->
startZoom = camera.position.z
, false
doc.addEventListener 'touchstart', (e) ->
e.preventDefault()
, false
doc.addEventListener MOUSE_DOWN, (e) ->
dragging = true
prevX = if isTouch then e.touches[0].pageX else e.pageX
prevY = if isTouch then e.touches[0].pageY else e.pageY
, false
moveX = camera.position.x
moveY = camera.position.y
doc.addEventListener MOUSE_MOVE, (e) ->
return if dragging is false
pageX = if isTouch then e.touches[0].pageX else e.pageX
pageY = if isTouch then e.touches[0].pageY else e.pageY
moveX -= (prevX - pageX) * 3
moveY += (prevY - pageY) * 3
camera.position.y = moveY
camera.position.x = moveX
prevX = pageX
prevY = pageY
renderer.render scene, camera
, false
doc.addEventListener MOUSE_UP, (e) ->
dragging = false
, false
# コントロール
btnFog = $('#fog')
btnLight = $('#light')
btnWire = $('#wire')
fog = true
light = true
wire = false
btnFog.addEventListener MOUSE_DOWN, ->
fog = !fog
type = if fog then 'ON' else 'OFF'
btnFog.value = "フォグ[#{type}]"
renderer.fog = fog
, false
btnLight.addEventListener MOUSE_DOWN, ->
light = !light
type = if light then 'ON' else 'OFF'
btnLight.value = "ライティング[#{type}]"
renderer.lighting = light
, false
btnWire.addEventListener MOUSE_DOWN, ->
wire = !wire
type = if wire then 'ON' else 'OFF'
btnWire.value = "ワイヤーフレーム[#{type}]"
renderer.wireframe = wire
, false
doc.addEventListener 'DOMContentLoaded', init, false
|
[
{
"context": "rfc3339date.js library\n# https://github.com/tardate/rfc3339date.js\n# Copyright (c) 2010 Paul GALLAGHE",
"end": 1028,
"score": 0.5992943644523621,
"start": 1025,
"tag": "USERNAME",
"value": "ate"
},
{
"context": "ub.com/tardate/rfc3339date.js\n# Copyright (c) 2010 Pa... | lib/iso8601.coffee | Do/iso8601.js | 10 | # iso8601.js
#
# Partial ECMAScript 5.1 Date object polyfill to support the ISO 8601 format
# specified in section 15.9.1.15 in Date.parse (section 15.9.4.2) and
# Date.prototype.toISOString (section 15.9.5.43). ISO 8601 formats from RFC 3339
# and the W3C Date and Time Formats NOTE (http://www.w3.org/TR/NOTE-datetime)
# are also supported.
#
# Adds string parsing and formatting functions directly to the native Date
# object and prototype. Uses native functionality where available.
#
# Examples
#
# Date.parse("2010-07-20T15:00:00Z")
# // => 1307834445456
#
# date = Date.parseISO8601("2010-07-20T15:00:00Z")
# // => Tue Jul 20 2010 08:00:00 GMT-0700 (PDT)
#
# date.toISOString()
# // => "2010-07-20T15:00:00.000Z"
#
# date.toISO8601String(true)
# // => "2010-07-20T08:00:00.000-07:00"
#
# Note: Avoid using "new Date(...)" to parse ISO 8601 strings since this library
# does not polyfill the Date constructor.
#
#
# Originally based on Paul Gallagher's rfc3339date.js library
# https://github.com/tardate/rfc3339date.js
# Copyright (c) 2010 Paul GALLAGHER http://tardate.com
#
# Additional modifications by the Do team
# Copyright (c) 2011 Do http://do.com
#
# Licensed under the MIT license
# http://www.opensource.org/licenses/mit-license.php
#
# Helper function to left-pad numbers to the specified length
pad = (number, length = 2) ->
result = number.toString()
while result.length < length
result = '0' + result
result
# Unit test to check native ISO 8601 parsing support
supportsISOParsing = Date.parse?('2011-06-11T23:20:45.456-0700') is 1307859645456
# Date.prototype.toISO8601String
#
# Format the date in ISO 8601 / RFC 3339 with custom rules. With no parameters,
# output is equivalent to the ECMAScript 5.1 defined Date.prototype.toISOString.
#
# localTimezone - Use local timezone or UTC offset? (default: false, i.e. UTC)
# separators - Include date/time separators? (default: true)
# milliseconds - Include milliseconds? (default: true)
#
# Examples
#
# new Date().toISO8601String(true)
# // => "2010-07-25T19:51:31.427+08:00"
#
# new Date().toISO8601String(true)
# // => "2010-07-25T19:51:31.427+08:00"
#
# new Date().toISO8601String(true)
# // => "2010-07-25T19:51:31.427+08:00"
#
Date::toISO8601String = (localTimezone = false, separators = true, milliseconds = true) ->
# Raise RangeError for invalid dates
timet = @getTime()
if timet != timet # isNaN
throw new RangeError 'Invalid date'
dateSeparator = if separators then '-' else ''
timeSeparator = if separators then ':' else ''
result =
if localTimezone
@getFullYear().toString() + dateSeparator +
pad(@getMonth() + 1) + dateSeparator +
pad(@getDate()) + 'T' +
pad(@getHours()) + timeSeparator +
pad(@getMinutes()) + timeSeparator +
pad(@getSeconds())
else
@getUTCFullYear().toString() + dateSeparator +
pad(@getUTCMonth() + 1) + dateSeparator +
pad(@getUTCDate()) + 'T' +
pad(@getUTCHours()) + timeSeparator +
pad(@getUTCMinutes()) + timeSeparator +
pad(@getUTCSeconds())
if milliseconds
result += '.' +
pad (if localTimezone then @getMilliseconds() else @getUTCMilliseconds()), 3
if localTimezone
tzOffset = @getTimezoneOffset()
if tzOffset >= 0
result += '-'
else
result += '+'
tzOffset *= -1
result + pad(tzOffset / 60) + timeSeparator + pad(tzOffset % 60)
else
result + 'Z'
# Date.prototype.toISOString
#
# Format the date in UTC ISO 8601 / RFC 3339.
#
# Defined in ECMAScript 5.1 15.9.5.43. An implementation is set only if the
# browser lacks native support.
#
# Examples
#
# new Date().toISOString()
# // => "2010-07-25T11:51:31.427Z"
#
unless Date::toISOString
Date::toISOString = Date::toISO8601String
# Date.prototype.toJSON
#
# Format the date in ISO 8601 UTC suitable for use by JSON.stringify. Returns
# null for invalid dates. See ECMAScript 5.1 15.9.5.44.
#
# Examples
#
# new Date().toJSON()
# // => "2010-07-25T11:51:31.427Z"
#
unless Date::toJSON
Date::toJSON = (key) ->
if isFinite(@valueOf())
@toISOString()
else
null # Spec requires returning null for non-finite values
# Date.parseISO8601
#
# Parses ISO8601 / RFC 3339 date strings to a Date object. Uses native browser
# parsing if available.
#
# input - The String to parse.
# useNative - Use browser native parsing if available? (default: true)
#
# Examples
#
# Date.parseISO8601("2010-07-20T15:00:00Z")
# // => ....
#
ISO8601_PATTERN = ///
(\d\d\d\d) # year
(-)?
(\d\d) # month
(-)?
(\d\d) # day
(T)?
(\d\d) # hour
(:)?
(\d\d)? # minute
(:)?
(\d\d)? # seconds
([\.,]\d+)? # milliseconds
(
$| # end of input = no timezone information
Z| # UTC
([+-]) # offset direction
(\d\d) # offset hours
(:)?
(\d\d)? # offset minutes
)
///i
DECIMAL_SEPARATOR = String(1.5).charAt(1)
parseISO8601 = (input) ->
type = Object::toString.call(input)
# Return the input if it's already a Date instance
return input if type == '[object Date]'
# Can only parse Strings
return undefined unless type == '[object String]'
if matches = input.match(ISO8601_PATTERN)
year = parseInt(matches[1], 10)
month = parseInt(matches[3], 10) - 1
day = parseInt(matches[5], 10)
hour = parseInt(matches[7], 10)
minutes = if matches[9] then parseInt(matches[9], 10) else 0
seconds = if matches[11] then parseInt(matches[11], 10) else 0
milliseconds =
if matches[12]
parseFloat(DECIMAL_SEPARATOR + matches[12][1..]) * 1000
else
0
result = Date.UTC(year, month, day, hour, minutes, seconds, milliseconds)
if matches[13] && matches[14] # Timezone adjustment
offset = matches[15] * 60
offset += parseInt(matches[17], 10) if (matches[17])
offset *= if matches[14] is '-' then -1 else 1
result -= offset * 60 * 1000
new Date(result)
if supportsISOParsing
Date.parseISO8601 = (input, useNative = true) ->
if useNative # Use the default Date constructor, we have native support.
new Date(input)
else # Force the polyfill.
parseISO8601 input
else # No native support, always use polyfill.
Date.parseISO8601 = parseISO8601
# Date.parse
#
# Parses date strings; returns time in milliseconds since the Unix epoch. See
# http://developer.mozilla.org/en/JavaScript/Reference/Global_Objects/Date/parse
#
# This polyfills the standard Date.parse to support ISO 8601 / RFC 3339 date
# strings only if the browser doesen't have native support.
#
# Examples
#
# Date.parse("2010-07-20T15:00:00Z")
# // => 1279638000000
#
if Object::toString.call(Date.parse) != '[object Function]'
# No built-in Date.parse, use our version.
Date.parse = (input) -> parseISO8601(input)?.getTime()
else if not supportsISOParsing
# No native ISO 8601 parsing, chain our version
oldDateParse = Date.parse
Date.parse = (input) ->
result = parseISO8601(input)?.getTime()
result = oldDateParse(input) if not result and oldDateParse
result
# Date.now
#
# Returns the Number value (milliseconds since the Unix epoch) of the current
# time. See ECMAScript 5.1 15.9.4.4.
#
# Examples
#
# Date.now()
# // => 1325174662624
#
unless Date.now
Date.now = -> new Date().getTime()
| 2391 | # iso8601.js
#
# Partial ECMAScript 5.1 Date object polyfill to support the ISO 8601 format
# specified in section 15.9.1.15 in Date.parse (section 15.9.4.2) and
# Date.prototype.toISOString (section 15.9.5.43). ISO 8601 formats from RFC 3339
# and the W3C Date and Time Formats NOTE (http://www.w3.org/TR/NOTE-datetime)
# are also supported.
#
# Adds string parsing and formatting functions directly to the native Date
# object and prototype. Uses native functionality where available.
#
# Examples
#
# Date.parse("2010-07-20T15:00:00Z")
# // => 1307834445456
#
# date = Date.parseISO8601("2010-07-20T15:00:00Z")
# // => Tue Jul 20 2010 08:00:00 GMT-0700 (PDT)
#
# date.toISOString()
# // => "2010-07-20T15:00:00.000Z"
#
# date.toISO8601String(true)
# // => "2010-07-20T08:00:00.000-07:00"
#
# Note: Avoid using "new Date(...)" to parse ISO 8601 strings since this library
# does not polyfill the Date constructor.
#
#
# Originally based on Paul Gallagher's rfc3339date.js library
# https://github.com/tardate/rfc3339date.js
# Copyright (c) 2010 <NAME> http://tardate.com
#
# Additional modifications by the Do team
# Copyright (c) 2011 Do http://do.com
#
# Licensed under the MIT license
# http://www.opensource.org/licenses/mit-license.php
#
# Helper function to left-pad numbers to the specified length
pad = (number, length = 2) ->
result = number.toString()
while result.length < length
result = '0' + result
result
# Unit test to check native ISO 8601 parsing support
supportsISOParsing = Date.parse?('2011-06-11T23:20:45.456-0700') is 1307859645456
# Date.prototype.toISO8601String
#
# Format the date in ISO 8601 / RFC 3339 with custom rules. With no parameters,
# output is equivalent to the ECMAScript 5.1 defined Date.prototype.toISOString.
#
# localTimezone - Use local timezone or UTC offset? (default: false, i.e. UTC)
# separators - Include date/time separators? (default: true)
# milliseconds - Include milliseconds? (default: true)
#
# Examples
#
# new Date().toISO8601String(true)
# // => "2010-07-25T19:51:31.427+08:00"
#
# new Date().toISO8601String(true)
# // => "2010-07-25T19:51:31.427+08:00"
#
# new Date().toISO8601String(true)
# // => "2010-07-25T19:51:31.427+08:00"
#
Date::toISO8601String = (localTimezone = false, separators = true, milliseconds = true) ->
# Raise RangeError for invalid dates
timet = @getTime()
if timet != timet # isNaN
throw new RangeError 'Invalid date'
dateSeparator = if separators then '-' else ''
timeSeparator = if separators then ':' else ''
result =
if localTimezone
@getFullYear().toString() + dateSeparator +
pad(@getMonth() + 1) + dateSeparator +
pad(@getDate()) + 'T' +
pad(@getHours()) + timeSeparator +
pad(@getMinutes()) + timeSeparator +
pad(@getSeconds())
else
@getUTCFullYear().toString() + dateSeparator +
pad(@getUTCMonth() + 1) + dateSeparator +
pad(@getUTCDate()) + 'T' +
pad(@getUTCHours()) + timeSeparator +
pad(@getUTCMinutes()) + timeSeparator +
pad(@getUTCSeconds())
if milliseconds
result += '.' +
pad (if localTimezone then @getMilliseconds() else @getUTCMilliseconds()), 3
if localTimezone
tzOffset = @getTimezoneOffset()
if tzOffset >= 0
result += '-'
else
result += '+'
tzOffset *= -1
result + pad(tzOffset / 60) + timeSeparator + pad(tzOffset % 60)
else
result + 'Z'
# Date.prototype.toISOString
#
# Format the date in UTC ISO 8601 / RFC 3339.
#
# Defined in ECMAScript 5.1 192.168.3.11. An implementation is set only if the
# browser lacks native support.
#
# Examples
#
# new Date().toISOString()
# // => "2010-07-25T11:51:31.427Z"
#
unless Date::toISOString
Date::toISOString = Date::toISO8601String
# Date.prototype.toJSON
#
# Format the date in ISO 8601 UTC suitable for use by JSON.stringify. Returns
# null for invalid dates. See ECMAScript 5.1 172.16.17.32.
#
# Examples
#
# new Date().toJSON()
# // => "2010-07-25T11:51:31.427Z"
#
unless Date::toJSON
Date::toJSON = (key) ->
if isFinite(@valueOf())
@toISOString()
else
null # Spec requires returning null for non-finite values
# Date.parseISO8601
#
# Parses ISO8601 / RFC 3339 date strings to a Date object. Uses native browser
# parsing if available.
#
# input - The String to parse.
# useNative - Use browser native parsing if available? (default: true)
#
# Examples
#
# Date.parseISO8601("2010-07-20T15:00:00Z")
# // => ....
#
ISO8601_PATTERN = ///
(\d\d\d\d) # year
(-)?
(\d\d) # month
(-)?
(\d\d) # day
(T)?
(\d\d) # hour
(:)?
(\d\d)? # minute
(:)?
(\d\d)? # seconds
([\.,]\d+)? # milliseconds
(
$| # end of input = no timezone information
Z| # UTC
([+-]) # offset direction
(\d\d) # offset hours
(:)?
(\d\d)? # offset minutes
)
///i
DECIMAL_SEPARATOR = String(1.5).charAt(1)
parseISO8601 = (input) ->
type = Object::toString.call(input)
# Return the input if it's already a Date instance
return input if type == '[object Date]'
# Can only parse Strings
return undefined unless type == '[object String]'
if matches = input.match(ISO8601_PATTERN)
year = parseInt(matches[1], 10)
month = parseInt(matches[3], 10) - 1
day = parseInt(matches[5], 10)
hour = parseInt(matches[7], 10)
minutes = if matches[9] then parseInt(matches[9], 10) else 0
seconds = if matches[11] then parseInt(matches[11], 10) else 0
milliseconds =
if matches[12]
parseFloat(DECIMAL_SEPARATOR + matches[12][1..]) * 1000
else
0
result = Date.UTC(year, month, day, hour, minutes, seconds, milliseconds)
if matches[13] && matches[14] # Timezone adjustment
offset = matches[15] * 60
offset += parseInt(matches[17], 10) if (matches[17])
offset *= if matches[14] is '-' then -1 else 1
result -= offset * 60 * 1000
new Date(result)
if supportsISOParsing
Date.parseISO8601 = (input, useNative = true) ->
if useNative # Use the default Date constructor, we have native support.
new Date(input)
else # Force the polyfill.
parseISO8601 input
else # No native support, always use polyfill.
Date.parseISO8601 = parseISO8601
# Date.parse
#
# Parses date strings; returns time in milliseconds since the Unix epoch. See
# http://developer.mozilla.org/en/JavaScript/Reference/Global_Objects/Date/parse
#
# This polyfills the standard Date.parse to support ISO 8601 / RFC 3339 date
# strings only if the browser doesen't have native support.
#
# Examples
#
# Date.parse("2010-07-20T15:00:00Z")
# // => 1279638000000
#
if Object::toString.call(Date.parse) != '[object Function]'
# No built-in Date.parse, use our version.
Date.parse = (input) -> parseISO8601(input)?.getTime()
else if not supportsISOParsing
# No native ISO 8601 parsing, chain our version
oldDateParse = Date.parse
Date.parse = (input) ->
result = parseISO8601(input)?.getTime()
result = oldDateParse(input) if not result and oldDateParse
result
# Date.now
#
# Returns the Number value (milliseconds since the Unix epoch) of the current
# time. See ECMAScript 5.1 15.9.4.4.
#
# Examples
#
# Date.now()
# // => 1325174662624
#
unless Date.now
Date.now = -> new Date().getTime()
| true | # iso8601.js
#
# Partial ECMAScript 5.1 Date object polyfill to support the ISO 8601 format
# specified in section 15.9.1.15 in Date.parse (section 15.9.4.2) and
# Date.prototype.toISOString (section 15.9.5.43). ISO 8601 formats from RFC 3339
# and the W3C Date and Time Formats NOTE (http://www.w3.org/TR/NOTE-datetime)
# are also supported.
#
# Adds string parsing and formatting functions directly to the native Date
# object and prototype. Uses native functionality where available.
#
# Examples
#
# Date.parse("2010-07-20T15:00:00Z")
# // => 1307834445456
#
# date = Date.parseISO8601("2010-07-20T15:00:00Z")
# // => Tue Jul 20 2010 08:00:00 GMT-0700 (PDT)
#
# date.toISOString()
# // => "2010-07-20T15:00:00.000Z"
#
# date.toISO8601String(true)
# // => "2010-07-20T08:00:00.000-07:00"
#
# Note: Avoid using "new Date(...)" to parse ISO 8601 strings since this library
# does not polyfill the Date constructor.
#
#
# Originally based on Paul Gallagher's rfc3339date.js library
# https://github.com/tardate/rfc3339date.js
# Copyright (c) 2010 PI:NAME:<NAME>END_PI http://tardate.com
#
# Additional modifications by the Do team
# Copyright (c) 2011 Do http://do.com
#
# Licensed under the MIT license
# http://www.opensource.org/licenses/mit-license.php
#
# Helper function to left-pad numbers to the specified length
pad = (number, length = 2) ->
result = number.toString()
while result.length < length
result = '0' + result
result
# Unit test to check native ISO 8601 parsing support
supportsISOParsing = Date.parse?('2011-06-11T23:20:45.456-0700') is 1307859645456
# Date.prototype.toISO8601String
#
# Format the date in ISO 8601 / RFC 3339 with custom rules. With no parameters,
# output is equivalent to the ECMAScript 5.1 defined Date.prototype.toISOString.
#
# localTimezone - Use local timezone or UTC offset? (default: false, i.e. UTC)
# separators - Include date/time separators? (default: true)
# milliseconds - Include milliseconds? (default: true)
#
# Examples
#
# new Date().toISO8601String(true)
# // => "2010-07-25T19:51:31.427+08:00"
#
# new Date().toISO8601String(true)
# // => "2010-07-25T19:51:31.427+08:00"
#
# new Date().toISO8601String(true)
# // => "2010-07-25T19:51:31.427+08:00"
#
Date::toISO8601String = (localTimezone = false, separators = true, milliseconds = true) ->
# Raise RangeError for invalid dates
timet = @getTime()
if timet != timet # isNaN
throw new RangeError 'Invalid date'
dateSeparator = if separators then '-' else ''
timeSeparator = if separators then ':' else ''
result =
if localTimezone
@getFullYear().toString() + dateSeparator +
pad(@getMonth() + 1) + dateSeparator +
pad(@getDate()) + 'T' +
pad(@getHours()) + timeSeparator +
pad(@getMinutes()) + timeSeparator +
pad(@getSeconds())
else
@getUTCFullYear().toString() + dateSeparator +
pad(@getUTCMonth() + 1) + dateSeparator +
pad(@getUTCDate()) + 'T' +
pad(@getUTCHours()) + timeSeparator +
pad(@getUTCMinutes()) + timeSeparator +
pad(@getUTCSeconds())
if milliseconds
result += '.' +
pad (if localTimezone then @getMilliseconds() else @getUTCMilliseconds()), 3
if localTimezone
tzOffset = @getTimezoneOffset()
if tzOffset >= 0
result += '-'
else
result += '+'
tzOffset *= -1
result + pad(tzOffset / 60) + timeSeparator + pad(tzOffset % 60)
else
result + 'Z'
# Date.prototype.toISOString
#
# Format the date in UTC ISO 8601 / RFC 3339.
#
# Defined in ECMAScript 5.1 PI:IP_ADDRESS:192.168.3.11END_PI. An implementation is set only if the
# browser lacks native support.
#
# Examples
#
# new Date().toISOString()
# // => "2010-07-25T11:51:31.427Z"
#
unless Date::toISOString
Date::toISOString = Date::toISO8601String
# Date.prototype.toJSON
#
# Format the date in ISO 8601 UTC suitable for use by JSON.stringify. Returns
# null for invalid dates. See ECMAScript 5.1 PI:IP_ADDRESS:172.16.17.32END_PI.
#
# Examples
#
# new Date().toJSON()
# // => "2010-07-25T11:51:31.427Z"
#
unless Date::toJSON
Date::toJSON = (key) ->
if isFinite(@valueOf())
@toISOString()
else
null # Spec requires returning null for non-finite values
# Date.parseISO8601
#
# Parses ISO8601 / RFC 3339 date strings to a Date object. Uses native browser
# parsing if available.
#
# input - The String to parse.
# useNative - Use browser native parsing if available? (default: true)
#
# Examples
#
# Date.parseISO8601("2010-07-20T15:00:00Z")
# // => ....
#
ISO8601_PATTERN = ///
(\d\d\d\d) # year
(-)?
(\d\d) # month
(-)?
(\d\d) # day
(T)?
(\d\d) # hour
(:)?
(\d\d)? # minute
(:)?
(\d\d)? # seconds
([\.,]\d+)? # milliseconds
(
$| # end of input = no timezone information
Z| # UTC
([+-]) # offset direction
(\d\d) # offset hours
(:)?
(\d\d)? # offset minutes
)
///i
DECIMAL_SEPARATOR = String(1.5).charAt(1)
parseISO8601 = (input) ->
type = Object::toString.call(input)
# Return the input if it's already a Date instance
return input if type == '[object Date]'
# Can only parse Strings
return undefined unless type == '[object String]'
if matches = input.match(ISO8601_PATTERN)
year = parseInt(matches[1], 10)
month = parseInt(matches[3], 10) - 1
day = parseInt(matches[5], 10)
hour = parseInt(matches[7], 10)
minutes = if matches[9] then parseInt(matches[9], 10) else 0
seconds = if matches[11] then parseInt(matches[11], 10) else 0
milliseconds =
if matches[12]
parseFloat(DECIMAL_SEPARATOR + matches[12][1..]) * 1000
else
0
result = Date.UTC(year, month, day, hour, minutes, seconds, milliseconds)
if matches[13] && matches[14] # Timezone adjustment
offset = matches[15] * 60
offset += parseInt(matches[17], 10) if (matches[17])
offset *= if matches[14] is '-' then -1 else 1
result -= offset * 60 * 1000
new Date(result)
if supportsISOParsing
Date.parseISO8601 = (input, useNative = true) ->
if useNative # Use the default Date constructor, we have native support.
new Date(input)
else # Force the polyfill.
parseISO8601 input
else # No native support, always use polyfill.
Date.parseISO8601 = parseISO8601
# Date.parse
#
# Parses date strings; returns time in milliseconds since the Unix epoch. See
# http://developer.mozilla.org/en/JavaScript/Reference/Global_Objects/Date/parse
#
# This polyfills the standard Date.parse to support ISO 8601 / RFC 3339 date
# strings only if the browser doesen't have native support.
#
# Examples
#
# Date.parse("2010-07-20T15:00:00Z")
# // => 1279638000000
#
if Object::toString.call(Date.parse) != '[object Function]'
# No built-in Date.parse, use our version.
Date.parse = (input) -> parseISO8601(input)?.getTime()
else if not supportsISOParsing
# No native ISO 8601 parsing, chain our version
oldDateParse = Date.parse
Date.parse = (input) ->
result = parseISO8601(input)?.getTime()
result = oldDateParse(input) if not result and oldDateParse
result
# Date.now
#
# Returns the Number value (milliseconds since the Unix epoch) of the current
# time. See ECMAScript 5.1 15.9.4.4.
#
# Examples
#
# Date.now()
# // => 1325174662624
#
unless Date.now
Date.now = -> new Date().getTime()
|
[
{
"context": " \"765\": \"French (Suriname)\"\n \"815\": \"French (Svalbard and Jan Mayen)\"\n \"767\": \"French (Swaziland)\"\n",
"end": 13737,
"score": 0.7035489678382874,
"start": 13736,
"tag": "NAME",
"value": "b"
},
{
"context": "\"French (Suriname)\"\n \"815\": \"French (Sva... | src/commands/update/lists/languages.coffee | upisfree/medic | 2 | # export
module.exports =
"186": "Afrikaans"
"110": "Afrikaans (South Africa)"
"187": "Albanian"
"151": "Albanian (Albania)"
"188": "Amharic"
"111": "Amharic (Ethiopia)"
"8": "Arabic"
"266": "Arabic (Algeria)"
"265": "Arabic (Bahrain)"
"36": "Arabic (Egypt)"
"268": "Arabic (Iraq)"
"57": "Arabic (Israel)"
"269": "Arabic (Jordan)"
"270": "Arabic (Kuwait)"
"271": "Arabic (Lebanon)"
"444": "Arabic (Libya)"
"272": "Arabic (Mauritania)"
"445": "Arabic (Morocco)"
"273": "Arabic (Oman)"
"274": "Arabic (Palestinian Authority)"
"275": "Arabic (Qatar)"
"13": "Arabic (Saudi Arabia)"
"276": "Arabic (Syria)"
"446": "Arabic (Tunisia)"
"264": "Arabic (United Arab Emirates)"
"267": "Arabic (Western Sahara (Disputed))"
"277": "Arabic (Yemen)"
"189": "Armenian"
"125": "Armenian (Armenia)"
"190": "Assamese"
"45": "Assamese (India)"
"191": "Azerbaijani"
"447": "Azerbaijani (Arabic)"
"794": "Azerbaijani (Arabic) (Azerbaijan)"
"448": "Azerbaijani (Cyrillic)"
"795": "Azerbaijani (Cyrillic) (Azerbaijan)"
"482": "Azerbaijani (Latin)"
"112": "Azerbaijani (Latin) (Azerbaijan)"
"194": "Bangla"
"114": "Bangla (Bangladesh)"
"46": "Bangla (India)"
"518": "Bangla (Pakistan)"
"192": "Basque"
"119": "Basque (Basque)"
"193": "Belarusian"
"113": "Belarusian (Belarus)"
"195": "Bosnian"
"449": "Bosnian (Cyrillic)"
"450": "Bosnian (Cyrillic) (Bosnia and Herzegovina)"
"451": "Bosnian (Latin)"
"115": "Bosnian (Latin) (Bosnia and Herzegovina)"
"26": "Bulgarian"
"27": "Bulgarian (Bulgaria)"
"463": "Cajun French"
"466": "Cajun French (Latin)"
"197": "Catalan"
"116": "Catalan (Spain)"
"200": "Cherokee"
"253": "Cherokee (Cherokee)"
"118": "Cherokee (Cherokee) (United States)"
"452": "Cherokee (Latin)"
"99": "Cherokee (United States)"
"2": "Chinese"
"5": "Chinese (China)"
"43": "Chinese (Hong Kong SAR)"
"442": "Chinese (Macao SAR)"
"262": "Chinese (Simplified)"
"479": "Chinese (Simplified) (China)"
"477": "Chinese (Simplified) (Singapore)"
"176": "Chinese (Singapore)"
"88": "Chinese (Taiwan)"
"263": "Chinese (Traditional)"
"481": "Chinese (Traditional) (Hong Kong)"
"478": "Chinese (Traditional) (Macao SAR)"
"480": "Chinese (Traditional) (Taiwan)"
"31": "Croatian"
"475": "Croatian (Bosnia and Herzegovina)"
"32": "Croatian (Croatia)"
"198": "Czech"
"33": "Czech (Czech Republic)"
"34": "Danish"
"35": "Danish (Denmark)"
"199": "Dari"
"145": "Dari (Afghanistan)"
"476": "Dari (Arabic)"
"67": "Dutch"
"178": "Dutch (Belgium)"
"68": "Dutch (Netherlands)"
"1": "English"
"280": "English (Afghanistan)"
"290": "English (Åland Islands)"
"283": "English (Albania)"
"526": "English (Algeria)"
"288": "English (American Samoa)"
"519": "English (Andorra)"
"286": "English (Angola)"
"282": "English (Anguilla)"
"287": "English (Antarctica)"
"281": "English (Antigua and Barbuda)"
"284": "English (Armenia)"
"289": "English (Aruba)"
"824": "English (Australia and New Zealand)"
"24": "English (Australia)"
"291": "English (Azerbaijan)"
"301": "English (Bahama"
"520": "English (Bangladesh)"
"293": "English (Barbados)"
"305": "English (Belarus)"
"453": "English (Belize)"
"296": "English (Benin)"
"298": "English (Bermuda)"
"302": "English (Bhutan)"
"522": "English (Bolivia)"
"300": "English (Bonaire, Saint Eustatius and Saba)"
"292": "English (Bosnia and Herzegovina)"
"304": "English (Botswana)"
"303": "English (Bouvet Island)"
"339": "English (British Indian Ocean Territory)"
"415": "English (British Virgin Islands)"
"299": "English (Brunei)"
"521": "English (Bulgaria)"
"294": "English (Burkina Faso)"
"295": "English (Burundi)"
"344": "English (Cambodia)"
"312": "English (Cameroon)"
"28": "English (Canada)"
"523": "English (Cape Verde)"
"826": "English (Caribbean)"
"348": "English (Cayman Islands)"
"308": "English (Central African Republic)"
"400": "English (Chad)"
"314": "English (Christmas Island)"
"306": "English (Cocos (Keeling) Islands)"
"346": "English (Comoros)"
"307": "English (Congo (DRC))"
"309": "English (Congo)"
"311": "English (Cook Islands)"
"310": "English (Côte d'Ivoire)"
"531": "English (Croatia)"
"313": "English (Curaçao)"
"315": "English (Djibouti)"
"524": "English (Dominica)"
"525": "English (Dominican Republic)"
"829": "English (Eastern Africa)"
"331": "English (Equatorial Guinea)"
"316": "English (Eritrea)"
"527": "English (Estonia)"
"317": "English (Ethiopia)"
"319": "English (Falkland Islands (Islas Malvinas))"
"321": "English (Faroe Islands)"
"318": "English (Fiji)"
"324": "English (French Guiana)"
"379": "English (French Polynesia)"
"401": "English (French Southern and Antarctic Lands)"
"322": "English (Gabon)"
"329": "English (Gambia, The)"
"323": "English (Georgia)"
"528": "English (Germany)"
"326": "English (Ghana)"
"327": "English (Gibraltar)"
"328": "English (Greenland)"
"529": "English (Guadeloupe)"
"333": "English (Guam)"
"325": "English (Guernsey)"
"330": "English (Guinea)"
"334": "English (Guinea-Bissau)"
"335": "English (Guyana)"
"337": "English (Haiti)"
"336": "English (Heard Island and McDonald Islands)"
"530": "English (Honduras)"
"183": "English (Hong Kong SAR)"
"534": "English (Iceland)"
"44": "English (India)"
"454": "English (Indonesia)"
"533": "English (Iraq)"
"173": "English (Ireland)"
"338": "English (Isle of Man)"
"532": "English (Israel)"
"341": "English (Jamaica)"
"340": "English (Jersey)"
"535": "English (Jordan)"
"455": "English (Kazakhstan)"
"342": "English (Kenya)"
"345": "English (Kiribati)"
"343": "English (Kyrgyzstan)"
"349": "English (Laos)"
"537": "English (Latvia)"
"353": "English (Lesotho)"
"352": "English (Liberia)"
"354": "English (Libya)"
"536": "English (Lithuania)"
"538": "English (Macao SAR)"
"360": "English (Macedonia, FYRO)"
"358": "English (Madagascar)"
"369": "English (Malawi)"
"457": "English (Malaysia)"
"368": "English (Maldives)"
"361": "English (Mali)"
"456": "English (Malta)"
"359": "English (Marshall Islands)"
"365": "English (Martinique)"
"367": "English (Mauritius)"
"420": "English (Mayotte)"
"320": "English (Micronesia)"
"356": "English (Moldova)"
"355": "English (Monaco)"
"363": "English (Mongolia)"
"366": "English (Montserrat)"
"370": "English (Mozambique)"
"362": "English (Myanmar)"
"371": "English (Namibia)"
"377": "English (Nauru)"
"376": "English (Nepal)"
"285": "English (Netherlands Antilles (Former))"
"69": "English (Netherlands)"
"372": "English (New Caledonia)"
"174": "English (New Zealand)"
"540": "English (Nicaragua)"
"373": "English (Niger)"
"375": "English (Nigeria)"
"378": "English (Niue)"
"374": "English (Norfolk Island)"
"825": "English (Northern America)"
"364": "English (Northern Mariana Islands)"
"541": "English (Oman)"
"459": "English (Pakistan)"
"383": "English (Palau)"
"543": "English (Palestinian Authority)"
"380": "English (Papua New Guinea)"
"458": "English (Philippines)"
"382": "English (Pitcairn Islands)"
"542": "English (Puerto Rico)"
"544": "English (Qatar)"
"384": "English (Réunion)"
"545": "English (Romania)"
"385": "English (Rwanda)"
"297": "English (Saint Barthélemy)"
"389": "English (Saint Helena, Ascension and Tristan da Cunha)"
"347": "English (Saint Kitts and Nevis)"
"350": "English (Saint Lucia)"
"357": "English (Saint Martin (French Part))"
"381": "English (Saint Pierre and Miquelon)"
"414": "English (Saint Vincent and the Grenadines)"
"419": "English (Samoa)"
"392": "English (San Marino)"
"396": "English (São Tomé and Príncipe)"
"393": "English (Senegal)"
"387": "English (Seychelles)"
"391": "English (Sierra Leone)"
"181": "English (Singapore)"
"397": "English (Sint Maarten (Dutch Part))"
"547": "English (Slovakia)"
"546": "English (Slovenia)"
"386": "English (Solomon Islands)"
"394": "English (Somalia)"
"182": "English (South Africa)"
"332": "English (South Georgia and the South Sandwich Islands)"
"828": "English (Southern Africa)"
"351": "English (Sri Lanka)"
"388": "English (Sudan)"
"395": "English (Suriname)"
"390": "English (Svalbard and Jan Mayen)"
"398": "English (Swaziland)"
"403": "English (Tajikistan)"
"410": "English (Tanzania)"
"548": "English (Thailand)"
"405": "English (Timor-Leste)"
"402": "English (Togo)"
"404": "English (Tokelau)"
"407": "English (Tonga)"
"408": "English (Trinidad and Tobago)"
"549": "English (Turkey)"
"406": "English (Turkmenistan)"
"399": "English (Turks and Caicos Islands)"
"409": "English (Tuvalu)"
"796": "English (U.S. Minor Outlying Islands)"
"416": "English (U.S. Virgin Islands)"
"411": "English (Uganda)"
"95": "English (United Kingdom)"
"4": "English (United States)"
"412": "English (Uzbekistan)"
"417": "English (Vanuatu)"
"413": "English (Vatican City)"
"460": "English (Vietnam)"
"418": "English (Wallis and Futuna)"
"827": "English (Western Africa)"
"421": "English (Zambia)"
"422": "English (Zimbabwe)"
"37": "Estonian"
"38": "Estonian (Estonia)"
"201": "Filipino"
"461": "Filipino (Latin)"
"121": "Filipino (Philippines)"
"39": "Finnish"
"40": "Finnish (Finland)"
"471": "Franco-Provençal"
"472": "Franco-Provençal (Latin)"
"18": "French"
"809": "French (Åland Islands)"
"670": "French (Albania)"
"697": "French (Algeria)"
"673": "French (American Samoa)"
"667": "French (Andorra)"
"671": "French (Angola)"
"669": "French (Anguilla)"
"672": "French (Antarctica)"
"668": "French (Antigua and Barbuda)"
"674": "French (Aruba)"
"675": "French (Azerbaijan)"
"684": "French (Bahamas, The)"
"677": "French (Barbados)"
"175": "French (Belgium)"
"687": "French (Belize)"
"680": "French (Benin)"
"681": "French (Bermuda)"
"685": "French (Bhutan)"
"683": "French (Bolivia)"
"810": "French (Bonaire, Saint Eustatius and Saba)"
"676": "French (Bosnia and Herzegovina)"
"686": "French (Botswana)"
"811": "French (Bouvet Island)"
"812": "French (British Indian Ocean Territory)"
"780": "French (British Virgin Islands)"
"682": "French (Brunei)"
"678": "French (Burkina Faso)"
"679": "French (Burundi)"
"467": "French (Cameroon)"
"29": "French (Canada)"
"692": "French (Cape Verde)"
"833": "French (Caribbean)"
"725": "French (Cayman Islands)"
"689": "French (Central African Republic)"
"769": "French (Chad)"
"693": "French (Christmas Island)"
"688": "French (Cocos (Keeling) Islands)"
"723": "French (Comoros)"
"464": "French (Congo (DRC))"
"690": "French (Congo)"
"691": "French (Cook Islands)"
"465": "French (Côte d'Ivoire)"
"813": "French (Curaçao)"
"694": "French (Djibouti)"
"695": "French (Dominica)"
"696": "French (Dominican Republic)"
"714": "French (Equatorial Guinea)"
"698": "French (Eritrea)"
"700": "French (Falkland Islands (Islas Malvinas))"
"702": "French (Faroe Islands)"
"699": "French (Fiji)"
"19": "French (France)"
"706": "French (French Guiana)"
"751": "French (French Polynesia)"
"703": "French (Gabon)"
"711": "French (Gambia, The)"
"705": "French (Georgia)"
"708": "French (Ghana)"
"709": "French (Gibraltar)"
"710": "French (Greenland)"
"704": "French (Grenada)"
"713": "French (Guadeloupe)"
"715": "French (Guam)"
"707": "French (Guernsey)"
"712": "French (Guinea)"
"716": "French (Guinea-Bissau)"
"717": "French (Guyana)"
"468": "French (Haiti)"
"814": "French (Heard Island and McDonald Islands)"
"718": "French (Honduras)"
"720": "French (Jamaica)"
"719": "French (Jersey)"
"722": "French (Kiribati)"
"721": "French (Kyrgyzstan)"
"726": "French (Laos)"
"729": "French (Lesotho)"
"728": "French (Liberia)"
"730": "French (Libya)"
"179": "French (Luxembourg)"
"736": "French (Macao SAR)"
"732": "French (Madagascar)"
"742": "French (Malawi)"
"741": "French (Maldives)"
"470": "French (Mali)"
"733": "French (Marshall Islands)"
"738": "French (Martinique)"
"740": "French (Mauritius)"
"785": "French (Mayotte)"
"701": "French (Micronesia)"
"731": "French (Moldova)"
"469": "French (Monaco)"
"735": "French (Mongolia)"
"739": "French (Montserrat)"
"438": "French (Morocco)"
"743": "French (Mozambique)"
"734": "French (Myanmar)"
"744": "French (Namibia)"
"749": "French (Nauru)"
"745": "French (New Caledonia)"
"748": "French (Nicaragua)"
"746": "French (Niger)"
"750": "French (Niue)"
"747": "French (Norfolk Island)"
"462": "French (North Africa)"
"834": "French (Northern America)"
"737": "French (Northern Mariana Islands)"
"756": "French (Palau)"
"755": "French (Palestinian Authority)"
"752": "French (Papua New Guinea)"
"754": "French (Pitcairn Islands)"
"473": "French (Réunion)"
"757": "French (Rwanda)"
"816": "French (Saint Barthélemy)"
"760": "French (Saint Helena, Ascension and Tristan da Cunha)"
"724": "French (Saint Kitts and Nevis)"
"727": "French (Saint Lucia)"
"817": "French (Saint Martin (French Part))"
"753": "French (Saint Pierre and Miquelon)"
"779": "French (Saint Vincent and the Grenadines)"
"784": "French (Samoa)"
"762": "French (San Marino)"
"766": "French (São Tomé and Príncipe)"
"763": "French (Senegal)"
"759": "French (Seychelles)"
"761": "French (Sierra Leone)"
"818": "French (Sint Maarten (Dutch Part))"
"758": "French (Solomon Islands)"
"764": "French (Somalia)"
"819": "French (South Georgia and the South Sandwich Islands)"
"765": "French (Suriname)"
"815": "French (Svalbard and Jan Mayen)"
"767": "French (Swaziland)"
"87": "French (Switzerland)"
"771": "French (Tajikistan)"
"776": "French (Tanzania)"
"773": "French (Timor-Leste)"
"770": "French (Togo)"
"772": "French (Tokelau)"
"774": "French (Tonga)"
"439": "French (Tunisia)"
"768": "French (Turks and Caicos Islands)"
"775": "French (Tuvalu)"
"820": "French (U.S. Minor Outlying Islands)"
"781": "French (U.S. Virgin Islands)"
"777": "French (Uganda)"
"782": "French (Vanuatu)"
"778": "French (Vatican City)"
"783": "French (Wallis and Futuna)"
"835": "French (Western Africa)"
"832": "French (Western Europe)"
"786": "French (Zambia)"
"787": "French (Zimbabwe)"
"81": "Galician"
"82": "Galician (Spain)"
"202": "Georgian"
"130": "Georgian (Georgia)"
"9": "German"
"177": "German (Austria)"
"14": "German (Germany)"
"278": "German (Liechtenstein)"
"180": "German (Luxembourg)"
"86": "German (Switzerland)"
"41": "Greek"
"279": "Greek (Cyprus)"
"42": "Greek (Greece)"
"203": "Gujarati"
"48": "Gujarati (India)"
"254": "Hausa"
"204": "Hausa (Latin)"
"124": "Hausa (Latin) (Nigeria)"
"55": "Hebrew"
"56": "Hebrew (Israel)"
"11": "Hindi"
"16": "Hindi (India)"
"104": "Hungarian"
"105": "Hungarian (Hungary)"
"205": "Icelandic"
"128": "Icelandic (Iceland)"
"206": "Igbo"
"484": "Igbo (Latin)"
"127": "Igbo (Nigeria)"
"207": "Indonesian"
"126": "Indonesian (Indonesia)"
"196": "Inuktitut"
"22": "Inuktitut (Canada)"
"30": "Inuktitut (Canadian) Canada"
"255": "Inuktitut (Latin)"
"129": "Inuktitut (Latin) (Canada)"
"208": "Irish"
"122": "Irish (Ireland)"
"209": "isiXhosa"
"165": "isiXhosa (South Africa)"
"210": "isiZulu"
"167": "isiZulu (South Africa)"
"58": "Italian"
"59": "Italian (Italy)"
"443": "Italian (Switzerland)"
"12": "Japanese"
"17": "Japanese (Japan)"
"211": "Kannada"
"52": "Kannada (India)"
"212": "Kazakh"
"131": "Kazakh (Kazakhstan)"
"213": "Khmer"
"132": "Khmer (Cambodia)"
"516": "K'iche'"
"901": "K'iche' (Guatemala)"
"517": "K'iche' (Latin)"
"215": "Kinyarwanda"
"148": "Kinyarwanda (Rwanda)"
"216": "Kiswahili"
"154": "Kiswahili (Kenya)"
"217": "Konkani"
"54": "Konkani (India)"
"60": "Korean"
"61": "Korean (Korea)"
"218": "Kurdish"
"256": "Kurdish (Arabic)"
"133": "Kurdish (Iraq)"
"219": "Kyrgyz"
"485": "Kyrgyz (Cyrillic)"
"134": "Kyrgyz (Kyrgyzstan)"
"903": "Lao"
"902": "Lao (Laos)"
"62": "Latvian"
"63": "Latvian (Latvia)"
"64": "Lithuanian"
"65": "Lithuanian (Lithuania)"
"220": "Luxembourgish"
"135": "Luxembourgish (Luxembourg)"
"221": "Macedonian"
"137": "Macedonian (Macedonia, FYRO)"
"222": "Malay"
"490": "Malay (Brunei)"
"140": "Malay (Malaysia)"
"223": "Malayalam"
"53": "Malayalam (India)"
"224": "Maltese"
"141": "Maltese (Malta)"
"225": "Maori"
"486": "Maori (Latin)"
"136": "Maori (New Zealand)"
"226": "Marathi"
"139": "Marathi (India)"
"227": "Mongolian"
"487": "Mongolian (Cyrillic)"
"138": "Mongolian (Mongolia)"
"488": "Mongolian (Mongolian)"
"489": "Mongolian (Phags-pa)"
"228": "Nepali"
"142": "Nepali (Nepal)"
"491": "Norwegian"
"229": "Norwegian (Bokmål)"
"70": "Norwegian (Bokmål) (Norway)"
"492": "Norwegian (Norway)"
"230": "Norwegian (Nynorsk)"
"71": "Norwegian (Nynorsk) (Norway)"
"231": "Odia"
"49": "Odia (India)"
"232": "Persian/Farsi"
"120": "Persian/Farsi (Iran)"
"72": "Polish"
"73": "Polish (Poland)"
"25": "Portuguese"
"74": "Portuguese (Brazil)"
"75": "Portuguese (Portugal)"
"3": "Pseudo"
"6": "Pseudoloc"
"823": "Pseudoloc (Latin) (Selfhost)"
"900": "Pseudoloc East Asian-language"
"7": "Pseudoloc Mirrored"
"233": "Punjabi"
"257": "Punjabi (Arabic)"
"493": "Punjabi (Devanagari)"
"47": "Punjabi (India)"
"144": "Punjabi (Pakistan)"
"234": "Quechua"
"495": "Quechua (Bolivia)"
"496": "Quechua (Ecuador)"
"147": "Quechua (Peru)"
"76": "Romanian"
"77": "Romanian (Romania)"
"788": "Russian (Armenia)"
"789": "Russian (Belarus)"
"790": "Russian (Kazakhstan)"
"791": "Russian (Ukraine)"
"235": "Scottish Gaelic"
"474": "Scottish Gaelic (Latin)"
"123": "Scottish Gaelic (United Kingdom)"
"236": "Serbian"
"259": "Serbian (Cyrillic)"
"152": "Serbian (Cyrillic) (Bosnia and Herzegovina)"
"498": "Serbian (Cyrillic) (Montenegro)"
"440": "Serbian (Cyrillic) (Serbia )"
"153": "Serbian (Cyrillic) (Serbia and Montenegro, Former)"
"100": "Serbian (Latin)"
"499": "Serbian (Latin) (Bosnia and Herzegovina)"
"441": "Serbian (Latin) (Montenegro)"
"101": "Serbian (Latin) (Serbia and Montenegro (Former))"
"500": "Serbian (Latin) (Serbia)"
"237": "Sesotho sa Leboa"
"143": "Sesotho sa Leboa (South Africa)"
"238": "Setswana"
"507": "Setswana (Botswana)"
"158": "Setswana (South Africa)"
"239": "Sindhi"
"258": "Sindhi (Arabic)"
"497": "Sindhi (Devanagari)"
"149": "Sindhi (Pakistan)"
"240": "Sinhala"
"150": "Sinhala (Sri Lanka)"
"78": "Slovak"
"79": "Slovak (Slovakia)"
"102": "Slovenian"
"792": "Slovenian (Sierra Leone)"
"103": "Slovenian (Slovenia)"
"10": "Spanish"
"797": "Spanish (Åland Islands)"
"552": "Spanish (Albania)"
"555": "Spanish (American Samoa)"
"830": "Spanish (Americas)"
"423": "Spanish (Andorra)"
"553": "Spanish (Angola)"
"551": "Spanish (Anguilla)"
"554": "Spanish (Antarctica)"
"550": "Spanish (Antigua and Barbuda)"
"23": "Spanish (Argentina)"
"556": "Spanish (Aruba)"
"557": "Spanish (Azerbaijan)"
"565": "Spanish (Bahamas, The)"
"559": "Spanish (Barbados)"
"425": "Spanish (Belize)"
"562": "Spanish (Benin)"
"563": "Spanish (Bermuda)"
"566": "Spanish (Bhutan)"
"424": "Spanish (Bolivia)"
"798": "Spanish (Bonaire, Saint Eustatius and Saba)"
"558": "Spanish (Bosnia and Herzegovina)"
"567": "Spanish (Botswana)"
"799": "Spanish (Bouvet Island)"
"800": "Spanish (British Indian Ocean Territory)"
"659": "Spanish (British Virgin Islands)"
"564": "Spanish (Brunei)"
"560": "Spanish (Burkina Faso)"
"561": "Spanish (Burundi)"
"574": "Spanish (Cameroon)"
"80": "Spanish (Canada)"
"428": "Spanish (Cape Verde)"
"602": "Spanish (Cayman Islands)"
"570": "Spanish (Central African Republic)"
"648": "Spanish (Chad)"
"184": "Spanish (Chile)"
"575": "Spanish (Christmas Island)"
"568": "Spanish (Cocos (Keeling) Islands)"
"185": "Spanish (Colombia)"
"600": "Spanish (Comoros)"
"569": "Spanish (Congo (DRC))"
"571": "Spanish (Congo)"
"573": "Spanish (Cook Islands)"
"426": "Spanish (Costa Rica)"
"572": "Spanish (Côte d'Ivoire)"
"427": "Spanish (Cuba)"
"801": "Spanish (Curaçao)"
"576": "Spanish (Djibouti)"
"429": "Spanish (Dominica)"
"430": "Spanish (Dominican Republic)"
"169": "Spanish (Ecuador)"
"172": "Spanish (El Salvador)"
"591": "Spanish (Equatorial Guinea)"
"577": "Spanish (Eritrea)"
"83": "Spanish (EU)"
"579": "Spanish (Falkland Islands (Islas Malvinas))"
"581": "Spanish (Faroe Islands)"
"578": "Spanish (Fiji)"
"584": "Spanish (French Guiana)"
"629": "Spanish (French Polynesia)"
"582": "Spanish (Gabon)"
"589": "Spanish (Gambia, The)"
"583": "Spanish (Georgia)"
"586": "Spanish (Ghana)"
"587": "Spanish (Gibraltar)"
"588": "Spanish (Greenland)"
"431": "Spanish (Grenada)"
"432": "Spanish (Guadeloupe)"
"592": "Spanish (Guam)"
"171": "Spanish (Guatemala)"
"585": "Spanish (Guernsey)"
"590": "Spanish (Guinea)"
"593": "Spanish (Guinea-Bissau)"
"594": "Spanish (Guyana)"
"595": "Spanish (Haiti)"
"802": "Spanish (Heard Island and McDonald Islands)"
"170": "Spanish (Honduras)"
"597": "Spanish (Jamaica)"
"596": "Spanish (Jersey)"
"599": "Spanish (Kiribati)"
"598": "Spanish (Kyrgyzstan)"
"603": "Spanish (Laos)"
"831": "Spanish (Latin America and the Caribbean)"
"606": "Spanish (Lesotho)"
"605": "Spanish (Liberia)"
"607": "Spanish (Libya)"
"615": "Spanish (Macao SAR)"
"610": "Spanish (Madagascar)"
"621": "Spanish (Malawi)"
"620": "Spanish (Maldives)"
"612": "Spanish (Mali)"
"611": "Spanish (Marshall Islands)"
"617": "Spanish (Martinique)"
"619": "Spanish (Mauritius)"
"664": "Spanish (Mayotte)"
"66": "Spanish (Mexico)"
"580": "Spanish (Micronesia)"
"609": "Spanish (Moldova)"
"608": "Spanish (Monaco)"
"614": "Spanish (Mongolia)"
"618": "Spanish (Montserrat)"
"622": "Spanish (Mozambique)"
"613": "Spanish (Myanmar)"
"623": "Spanish (Namibia)"
"627": "Spanish (Nauru)"
"624": "Spanish (New Caledonia)"
"433": "Spanish (Nicaragua)"
"625": "Spanish (Niger)"
"628": "Spanish (Niue)"
"626": "Spanish (Norfolk Island)"
"616": "Spanish (Northern Mariana Islands)"
"634": "Spanish (Palau)"
"633": "Spanish (Palestinian Authority)"
"168": "Spanish (Panama)"
"630": "Spanish (Papua New Guinea)"
"435": "Spanish (Paraguay)"
"434": "Spanish (Peru)"
"632": "Spanish (Pitcairn Islands)"
"483": "Spanish (Puerto Rico)"
"635": "Spanish (Réunion)"
"636": "Spanish (Rwanda)"
"804": "Spanish (Saint Barthélemy)"
"639": "Spanish (Saint Helena, Ascension and Tristan da Cunha)"
"601": "Spanish (Saint Kitts and Nevis)"
"604": "Spanish (Saint Lucia)"
"805": "Spanish (Saint Martin (French Part))"
"631": "Spanish (Saint Pierre and Miquelon)"
"658": "Spanish (Saint Vincent and the Grenadines)"
"663": "Spanish (Samoa)"
"641": "Spanish (San Marino)"
"645": "Spanish (São Tomé and Príncipe)"
"642": "Spanish (Senegal)"
"638": "Spanish (Seychelles)"
"640": "Spanish (Sierra Leone)"
"806": "Spanish (Sint Maarten (Dutch Part))"
"637": "Spanish (Solomon Islands)"
"643": "Spanish (Somalia)"
"807": "Spanish (South Georgia and the South Sandwich Islands)"
"15": "Spanish (Spain)"
"644": "Spanish (Suriname)"
"803": "Spanish (Svalbard and Jan Mayen)"
"646": "Spanish (Swaziland)"
"650": "Spanish (Tajikistan)"
"655": "Spanish (Tanzania)"
"652": "Spanish (Timor-Leste)"
"649": "Spanish (Togo)"
"651": "Spanish (Tokelau)"
"653": "Spanish (Tonga)"
"647": "Spanish (Turks and Caicos Islands)"
"654": "Spanish (Tuvalu)"
"808": "Spanish (U.S. Minor Outlying Islands)"
"660": "Spanish (U.S. Virgin Islands)"
"656": "Spanish (Uganda)"
"98": "Spanish (United States)"
"436": "Spanish (Uruguay)"
"661": "Spanish (Vanuatu)"
"657": "Spanish (Vatican City)"
"437": "Spanish (Venezuela)"
"662": "Spanish (Wallis and Futuna)"
"665": "Spanish (Zambia)"
"666": "Spanish (Zimbabwe)"
"84": "Swedish"
"501": "Swedish (Finland)"
"85": "Swedish (Sweden)"
"241": "Tajik"
"502": "Tajik (Arabic)"
"260": "Tajik (Cyrillic)"
"503": "Tajik (Latin)"
"155": "Tajik (Tajikistan)"
"242": "Tamil"
"50": "Tamil (India)"
"243": "Tatar"
"508": "Tatar (Arabic)"
"509": "Tatar (Cyrillic)"
"510": "Tatar (Latin)"
"159": "Tatar (Russia)"
"244": "Telugu"
"51": "Telugu (India)"
"89": "Thai"
"90": "Thai (Thailand)"
"245": "Tigrinya"
"156": "Tigrinya (Ethiopia)"
"91": "Turkish"
"92": "Turkish (Turkey)"
"246": "Turkmen"
"504": "Turkmen (Cyrillic)"
"821": "Turkmen (Cyrillic) (Turkey)"
"505": "Turkmen (Latin)"
"822": "Turkmen (Latin) (Turkey)"
"506": "Turkmen (Turkey)"
"157": "Turkmen (Turkmenistan)"
"93": "Ukrainian"
"94": "Ukrainian (Ukraine)"
"247": "Urdu"
"161": "Urdu (Pakistan)"
"248": "Uyghur"
"511": "Uyghur (Arabic)"
"160": "Uyghur (China)"
"512": "Uyghur (Cyrillic)"
"513": "Uyghur (Latin)"
"249": "Uzbek"
"514": "Uzbek (Cyrillic)"
"261": "Uzbek (Latin)"
"162": "Uzbek (Latin) (Uzbekistan)"
"793": "Uzbek (Uzbekistan)"
"117": "Valencian"
"250": "Vietnamese"
"163": "Vietnamese (Vietnam)"
"96": "Welsh"
"97": "Welsh (United Kingdom)"
"251": "Wolof"
"164": "Wolof (Senegal)"
"252": "Yoruba"
"515": "Yoruba (Latin)"
"166": "Yoruba (Nigeria)" | 71801 | # export
module.exports =
"186": "Afrikaans"
"110": "Afrikaans (South Africa)"
"187": "Albanian"
"151": "Albanian (Albania)"
"188": "Amharic"
"111": "Amharic (Ethiopia)"
"8": "Arabic"
"266": "Arabic (Algeria)"
"265": "Arabic (Bahrain)"
"36": "Arabic (Egypt)"
"268": "Arabic (Iraq)"
"57": "Arabic (Israel)"
"269": "Arabic (Jordan)"
"270": "Arabic (Kuwait)"
"271": "Arabic (Lebanon)"
"444": "Arabic (Libya)"
"272": "Arabic (Mauritania)"
"445": "Arabic (Morocco)"
"273": "Arabic (Oman)"
"274": "Arabic (Palestinian Authority)"
"275": "Arabic (Qatar)"
"13": "Arabic (Saudi Arabia)"
"276": "Arabic (Syria)"
"446": "Arabic (Tunisia)"
"264": "Arabic (United Arab Emirates)"
"267": "Arabic (Western Sahara (Disputed))"
"277": "Arabic (Yemen)"
"189": "Armenian"
"125": "Armenian (Armenia)"
"190": "Assamese"
"45": "Assamese (India)"
"191": "Azerbaijani"
"447": "Azerbaijani (Arabic)"
"794": "Azerbaijani (Arabic) (Azerbaijan)"
"448": "Azerbaijani (Cyrillic)"
"795": "Azerbaijani (Cyrillic) (Azerbaijan)"
"482": "Azerbaijani (Latin)"
"112": "Azerbaijani (Latin) (Azerbaijan)"
"194": "Bangla"
"114": "Bangla (Bangladesh)"
"46": "Bangla (India)"
"518": "Bangla (Pakistan)"
"192": "Basque"
"119": "Basque (Basque)"
"193": "Belarusian"
"113": "Belarusian (Belarus)"
"195": "Bosnian"
"449": "Bosnian (Cyrillic)"
"450": "Bosnian (Cyrillic) (Bosnia and Herzegovina)"
"451": "Bosnian (Latin)"
"115": "Bosnian (Latin) (Bosnia and Herzegovina)"
"26": "Bulgarian"
"27": "Bulgarian (Bulgaria)"
"463": "Cajun French"
"466": "Cajun French (Latin)"
"197": "Catalan"
"116": "Catalan (Spain)"
"200": "Cherokee"
"253": "Cherokee (Cherokee)"
"118": "Cherokee (Cherokee) (United States)"
"452": "Cherokee (Latin)"
"99": "Cherokee (United States)"
"2": "Chinese"
"5": "Chinese (China)"
"43": "Chinese (Hong Kong SAR)"
"442": "Chinese (Macao SAR)"
"262": "Chinese (Simplified)"
"479": "Chinese (Simplified) (China)"
"477": "Chinese (Simplified) (Singapore)"
"176": "Chinese (Singapore)"
"88": "Chinese (Taiwan)"
"263": "Chinese (Traditional)"
"481": "Chinese (Traditional) (Hong Kong)"
"478": "Chinese (Traditional) (Macao SAR)"
"480": "Chinese (Traditional) (Taiwan)"
"31": "Croatian"
"475": "Croatian (Bosnia and Herzegovina)"
"32": "Croatian (Croatia)"
"198": "Czech"
"33": "Czech (Czech Republic)"
"34": "Danish"
"35": "Danish (Denmark)"
"199": "Dari"
"145": "Dari (Afghanistan)"
"476": "Dari (Arabic)"
"67": "Dutch"
"178": "Dutch (Belgium)"
"68": "Dutch (Netherlands)"
"1": "English"
"280": "English (Afghanistan)"
"290": "English (Åland Islands)"
"283": "English (Albania)"
"526": "English (Algeria)"
"288": "English (American Samoa)"
"519": "English (Andorra)"
"286": "English (Angola)"
"282": "English (Anguilla)"
"287": "English (Antarctica)"
"281": "English (Antigua and Barbuda)"
"284": "English (Armenia)"
"289": "English (Aruba)"
"824": "English (Australia and New Zealand)"
"24": "English (Australia)"
"291": "English (Azerbaijan)"
"301": "English (Bahama"
"520": "English (Bangladesh)"
"293": "English (Barbados)"
"305": "English (Belarus)"
"453": "English (Belize)"
"296": "English (Benin)"
"298": "English (Bermuda)"
"302": "English (Bhutan)"
"522": "English (Bolivia)"
"300": "English (Bonaire, Saint Eustatius and Saba)"
"292": "English (Bosnia and Herzegovina)"
"304": "English (Botswana)"
"303": "English (Bouvet Island)"
"339": "English (British Indian Ocean Territory)"
"415": "English (British Virgin Islands)"
"299": "English (Brunei)"
"521": "English (Bulgaria)"
"294": "English (Burkina Faso)"
"295": "English (Burundi)"
"344": "English (Cambodia)"
"312": "English (Cameroon)"
"28": "English (Canada)"
"523": "English (Cape Verde)"
"826": "English (Caribbean)"
"348": "English (Cayman Islands)"
"308": "English (Central African Republic)"
"400": "English (Chad)"
"314": "English (Christmas Island)"
"306": "English (Cocos (Keeling) Islands)"
"346": "English (Comoros)"
"307": "English (Congo (DRC))"
"309": "English (Congo)"
"311": "English (Cook Islands)"
"310": "English (Côte d'Ivoire)"
"531": "English (Croatia)"
"313": "English (Curaçao)"
"315": "English (Djibouti)"
"524": "English (Dominica)"
"525": "English (Dominican Republic)"
"829": "English (Eastern Africa)"
"331": "English (Equatorial Guinea)"
"316": "English (Eritrea)"
"527": "English (Estonia)"
"317": "English (Ethiopia)"
"319": "English (Falkland Islands (Islas Malvinas))"
"321": "English (Faroe Islands)"
"318": "English (Fiji)"
"324": "English (French Guiana)"
"379": "English (French Polynesia)"
"401": "English (French Southern and Antarctic Lands)"
"322": "English (Gabon)"
"329": "English (Gambia, The)"
"323": "English (Georgia)"
"528": "English (Germany)"
"326": "English (Ghana)"
"327": "English (Gibraltar)"
"328": "English (Greenland)"
"529": "English (Guadeloupe)"
"333": "English (Guam)"
"325": "English (Guernsey)"
"330": "English (Guinea)"
"334": "English (Guinea-Bissau)"
"335": "English (Guyana)"
"337": "English (Haiti)"
"336": "English (Heard Island and McDonald Islands)"
"530": "English (Honduras)"
"183": "English (Hong Kong SAR)"
"534": "English (Iceland)"
"44": "English (India)"
"454": "English (Indonesia)"
"533": "English (Iraq)"
"173": "English (Ireland)"
"338": "English (Isle of Man)"
"532": "English (Israel)"
"341": "English (Jamaica)"
"340": "English (Jersey)"
"535": "English (Jordan)"
"455": "English (Kazakhstan)"
"342": "English (Kenya)"
"345": "English (Kiribati)"
"343": "English (Kyrgyzstan)"
"349": "English (Laos)"
"537": "English (Latvia)"
"353": "English (Lesotho)"
"352": "English (Liberia)"
"354": "English (Libya)"
"536": "English (Lithuania)"
"538": "English (Macao SAR)"
"360": "English (Macedonia, FYRO)"
"358": "English (Madagascar)"
"369": "English (Malawi)"
"457": "English (Malaysia)"
"368": "English (Maldives)"
"361": "English (Mali)"
"456": "English (Malta)"
"359": "English (Marshall Islands)"
"365": "English (Martinique)"
"367": "English (Mauritius)"
"420": "English (Mayotte)"
"320": "English (Micronesia)"
"356": "English (Moldova)"
"355": "English (Monaco)"
"363": "English (Mongolia)"
"366": "English (Montserrat)"
"370": "English (Mozambique)"
"362": "English (Myanmar)"
"371": "English (Namibia)"
"377": "English (Nauru)"
"376": "English (Nepal)"
"285": "English (Netherlands Antilles (Former))"
"69": "English (Netherlands)"
"372": "English (New Caledonia)"
"174": "English (New Zealand)"
"540": "English (Nicaragua)"
"373": "English (Niger)"
"375": "English (Nigeria)"
"378": "English (Niue)"
"374": "English (Norfolk Island)"
"825": "English (Northern America)"
"364": "English (Northern Mariana Islands)"
"541": "English (Oman)"
"459": "English (Pakistan)"
"383": "English (Palau)"
"543": "English (Palestinian Authority)"
"380": "English (Papua New Guinea)"
"458": "English (Philippines)"
"382": "English (Pitcairn Islands)"
"542": "English (Puerto Rico)"
"544": "English (Qatar)"
"384": "English (Réunion)"
"545": "English (Romania)"
"385": "English (Rwanda)"
"297": "English (Saint Barthélemy)"
"389": "English (Saint Helena, Ascension and Tristan da Cunha)"
"347": "English (Saint Kitts and Nevis)"
"350": "English (Saint Lucia)"
"357": "English (Saint Martin (French Part))"
"381": "English (Saint Pierre and Miquelon)"
"414": "English (Saint Vincent and the Grenadines)"
"419": "English (Samoa)"
"392": "English (San Marino)"
"396": "English (São Tomé and Príncipe)"
"393": "English (Senegal)"
"387": "English (Seychelles)"
"391": "English (Sierra Leone)"
"181": "English (Singapore)"
"397": "English (Sint Maarten (Dutch Part))"
"547": "English (Slovakia)"
"546": "English (Slovenia)"
"386": "English (Solomon Islands)"
"394": "English (Somalia)"
"182": "English (South Africa)"
"332": "English (South Georgia and the South Sandwich Islands)"
"828": "English (Southern Africa)"
"351": "English (Sri Lanka)"
"388": "English (Sudan)"
"395": "English (Suriname)"
"390": "English (Svalbard and Jan Mayen)"
"398": "English (Swaziland)"
"403": "English (Tajikistan)"
"410": "English (Tanzania)"
"548": "English (Thailand)"
"405": "English (Timor-Leste)"
"402": "English (Togo)"
"404": "English (Tokelau)"
"407": "English (Tonga)"
"408": "English (Trinidad and Tobago)"
"549": "English (Turkey)"
"406": "English (Turkmenistan)"
"399": "English (Turks and Caicos Islands)"
"409": "English (Tuvalu)"
"796": "English (U.S. Minor Outlying Islands)"
"416": "English (U.S. Virgin Islands)"
"411": "English (Uganda)"
"95": "English (United Kingdom)"
"4": "English (United States)"
"412": "English (Uzbekistan)"
"417": "English (Vanuatu)"
"413": "English (Vatican City)"
"460": "English (Vietnam)"
"418": "English (Wallis and Futuna)"
"827": "English (Western Africa)"
"421": "English (Zambia)"
"422": "English (Zimbabwe)"
"37": "Estonian"
"38": "Estonian (Estonia)"
"201": "Filipino"
"461": "Filipino (Latin)"
"121": "Filipino (Philippines)"
"39": "Finnish"
"40": "Finnish (Finland)"
"471": "Franco-Provençal"
"472": "Franco-Provençal (Latin)"
"18": "French"
"809": "French (Åland Islands)"
"670": "French (Albania)"
"697": "French (Algeria)"
"673": "French (American Samoa)"
"667": "French (Andorra)"
"671": "French (Angola)"
"669": "French (Anguilla)"
"672": "French (Antarctica)"
"668": "French (Antigua and Barbuda)"
"674": "French (Aruba)"
"675": "French (Azerbaijan)"
"684": "French (Bahamas, The)"
"677": "French (Barbados)"
"175": "French (Belgium)"
"687": "French (Belize)"
"680": "French (Benin)"
"681": "French (Bermuda)"
"685": "French (Bhutan)"
"683": "French (Bolivia)"
"810": "French (Bonaire, Saint Eustatius and Saba)"
"676": "French (Bosnia and Herzegovina)"
"686": "French (Botswana)"
"811": "French (Bouvet Island)"
"812": "French (British Indian Ocean Territory)"
"780": "French (British Virgin Islands)"
"682": "French (Brunei)"
"678": "French (Burkina Faso)"
"679": "French (Burundi)"
"467": "French (Cameroon)"
"29": "French (Canada)"
"692": "French (Cape Verde)"
"833": "French (Caribbean)"
"725": "French (Cayman Islands)"
"689": "French (Central African Republic)"
"769": "French (Chad)"
"693": "French (Christmas Island)"
"688": "French (Cocos (Keeling) Islands)"
"723": "French (Comoros)"
"464": "French (Congo (DRC))"
"690": "French (Congo)"
"691": "French (Cook Islands)"
"465": "French (Côte d'Ivoire)"
"813": "French (Curaçao)"
"694": "French (Djibouti)"
"695": "French (Dominica)"
"696": "French (Dominican Republic)"
"714": "French (Equatorial Guinea)"
"698": "French (Eritrea)"
"700": "French (Falkland Islands (Islas Malvinas))"
"702": "French (Faroe Islands)"
"699": "French (Fiji)"
"19": "French (France)"
"706": "French (French Guiana)"
"751": "French (French Polynesia)"
"703": "French (Gabon)"
"711": "French (Gambia, The)"
"705": "French (Georgia)"
"708": "French (Ghana)"
"709": "French (Gibraltar)"
"710": "French (Greenland)"
"704": "French (Grenada)"
"713": "French (Guadeloupe)"
"715": "French (Guam)"
"707": "French (Guernsey)"
"712": "French (Guinea)"
"716": "French (Guinea-Bissau)"
"717": "French (Guyana)"
"468": "French (Haiti)"
"814": "French (Heard Island and McDonald Islands)"
"718": "French (Honduras)"
"720": "French (Jamaica)"
"719": "French (Jersey)"
"722": "French (Kiribati)"
"721": "French (Kyrgyzstan)"
"726": "French (Laos)"
"729": "French (Lesotho)"
"728": "French (Liberia)"
"730": "French (Libya)"
"179": "French (Luxembourg)"
"736": "French (Macao SAR)"
"732": "French (Madagascar)"
"742": "French (Malawi)"
"741": "French (Maldives)"
"470": "French (Mali)"
"733": "French (Marshall Islands)"
"738": "French (Martinique)"
"740": "French (Mauritius)"
"785": "French (Mayotte)"
"701": "French (Micronesia)"
"731": "French (Moldova)"
"469": "French (Monaco)"
"735": "French (Mongolia)"
"739": "French (Montserrat)"
"438": "French (Morocco)"
"743": "French (Mozambique)"
"734": "French (Myanmar)"
"744": "French (Namibia)"
"749": "French (Nauru)"
"745": "French (New Caledonia)"
"748": "French (Nicaragua)"
"746": "French (Niger)"
"750": "French (Niue)"
"747": "French (Norfolk Island)"
"462": "French (North Africa)"
"834": "French (Northern America)"
"737": "French (Northern Mariana Islands)"
"756": "French (Palau)"
"755": "French (Palestinian Authority)"
"752": "French (Papua New Guinea)"
"754": "French (Pitcairn Islands)"
"473": "French (Réunion)"
"757": "French (Rwanda)"
"816": "French (Saint Barthélemy)"
"760": "French (Saint Helena, Ascension and Tristan da Cunha)"
"724": "French (Saint Kitts and Nevis)"
"727": "French (Saint Lucia)"
"817": "French (Saint Martin (French Part))"
"753": "French (Saint Pierre and Miquelon)"
"779": "French (Saint Vincent and the Grenadines)"
"784": "French (Samoa)"
"762": "French (San Marino)"
"766": "French (São Tomé and Príncipe)"
"763": "French (Senegal)"
"759": "French (Seychelles)"
"761": "French (Sierra Leone)"
"818": "French (Sint Maarten (Dutch Part))"
"758": "French (Solomon Islands)"
"764": "French (Somalia)"
"819": "French (South Georgia and the South Sandwich Islands)"
"765": "French (Suriname)"
"815": "French (Sval<NAME>ard and <NAME>)"
"767": "French (Swaziland)"
"87": "French (Switzerland)"
"771": "French (Tajikistan)"
"776": "French (Tanzania)"
"773": "French (Timor-Leste)"
"770": "French (Togo)"
"772": "French (Tokelau)"
"774": "French (Tonga)"
"439": "French (Tunisia)"
"768": "French (Turks and Caicos Islands)"
"775": "French (Tuvalu)"
"820": "French (U.S. Minor Outlying Islands)"
"781": "French (U.S. Virgin Islands)"
"777": "French (Uganda)"
"782": "French (Vanuatu)"
"778": "French (Vatican City)"
"783": "French (Wallis and Futuna)"
"835": "French (Western Africa)"
"832": "French (Western Europe)"
"786": "French (Zambia)"
"787": "French (Zimbabwe)"
"81": "Galician"
"82": "Galician (Spain)"
"202": "Georgian"
"130": "Georgian (Georgia)"
"9": "German"
"177": "German (Austria)"
"14": "German (Germany)"
"278": "German (Liechtenstein)"
"180": "German (Luxembourg)"
"86": "German (Switzerland)"
"41": "Greek"
"279": "Greek (Cyprus)"
"42": "Greek (Greece)"
"203": "Gujarati"
"48": "Gujarati (India)"
"254": "Hausa"
"204": "Hausa (Latin)"
"124": "Hausa (Latin) (Nigeria)"
"55": "Hebrew"
"56": "Hebrew (Israel)"
"11": "Hindi"
"16": "Hindi (India)"
"104": "Hungarian"
"105": "Hungarian (Hungary)"
"205": "Icelandic"
"128": "Icelandic (Iceland)"
"206": "Igbo"
"484": "Igbo (Latin)"
"127": "Igbo (Nigeria)"
"207": "Indonesian"
"126": "Indonesian (Indonesia)"
"196": "Inuktitut"
"22": "Inuktitut (Canada)"
"30": "Inuktitut (Canadian) Canada"
"255": "Inuktitut (Latin)"
"129": "Inuktitut (Latin) (Canada)"
"208": "Irish"
"122": "Irish (Ireland)"
"209": "isiXhosa"
"165": "isiXhosa (South Africa)"
"210": "isiZulu"
"167": "isiZulu (South Africa)"
"58": "Italian"
"59": "Italian (Italy)"
"443": "Italian (Switzerland)"
"12": "Japanese"
"17": "Japanese (Japan)"
"211": "Kannada"
"52": "Kannada (India)"
"212": "Kazakh"
"131": "Kazakh (Kazakhstan)"
"213": "Khmer"
"132": "Khmer (Cambodia)"
"516": "K'iche'"
"901": "K'iche' (Guatemala)"
"517": "K'iche' (Latin)"
"215": "Kinyarwanda"
"148": "Kinyarwanda (Rwanda)"
"216": "Kiswahili"
"154": "Kiswahili (Kenya)"
"217": "Konkani"
"54": "Konkani (India)"
"60": "Korean"
"61": "Korean (Korea)"
"218": "Kurdish"
"256": "Kurdish (Arabic)"
"133": "Kurdish (Iraq)"
"219": "Kyrgyz"
"485": "Kyrgyz (Cyrillic)"
"134": "Kyrgyz (Kyrgyzstan)"
"903": "Lao"
"902": "Lao (Laos)"
"62": "Latvian"
"63": "Latvian (Latvia)"
"64": "Lithuanian"
"65": "Lithuanian (Lithuania)"
"220": "Luxembourgish"
"135": "Luxembourgish (Luxembourg)"
"221": "Macedonian"
"137": "Macedonian (Macedonia, FYRO)"
"222": "Malay"
"490": "Malay (Brunei)"
"140": "Malay (Malaysia)"
"223": "Malayalam"
"53": "Malayalam (India)"
"224": "Maltese"
"141": "Maltese (Malta)"
"225": "Maori"
"486": "Maori (Latin)"
"136": "Maori (New Zealand)"
"226": "Marathi"
"139": "Marathi (India)"
"227": "Mongolian"
"487": "Mongolian (Cyrillic)"
"138": "Mongolian (Mongolia)"
"488": "Mongolian (Mongolian)"
"489": "Mongolian (Phags-pa)"
"228": "Nepali"
"142": "Nepali (Nepal)"
"491": "Norwegian"
"229": "Norwegian (Bokmål)"
"70": "Norwegian (Bokmål) (Norway)"
"492": "Norwegian (Norway)"
"230": "Norwegian (Nynorsk)"
"71": "Norwegian (Nynorsk) (Norway)"
"231": "Odia"
"49": "Odia (India)"
"232": "Persian/Farsi"
"120": "Persian/Farsi (Iran)"
"72": "Polish"
"73": "Polish (Poland)"
"25": "Portuguese"
"74": "Portuguese (Brazil)"
"75": "Portuguese (Portugal)"
"3": "Pseudo"
"6": "Pseudoloc"
"823": "Pseudoloc (Latin) (Selfhost)"
"900": "Pseudoloc East Asian-language"
"7": "Pseudoloc Mirrored"
"233": "Punjabi"
"257": "Punjabi (Arabic)"
"493": "Punjabi (Devanagari)"
"47": "Punjabi (India)"
"144": "Punjabi (Pakistan)"
"234": "Quechua"
"495": "Quechua (Bolivia)"
"496": "Quechua (Ecuador)"
"147": "Quechua (Peru)"
"76": "Romanian"
"77": "Romanian (Romania)"
"788": "Russian (Armenia)"
"789": "Russian (Belarus)"
"790": "Russian (Kazakhstan)"
"791": "Russian (Ukraine)"
"235": "Scottish Gaelic"
"474": "Scottish Gaelic (Latin)"
"123": "Scottish Gaelic (United Kingdom)"
"236": "Serbian"
"259": "Serbian (Cyrillic)"
"152": "Serbian (Cyrillic) (Bosnia and Herzegovina)"
"498": "Serbian (Cyrillic) (Montenegro)"
"440": "Serbian (Cyrillic) (Serbia )"
"153": "Serbian (Cyrillic) (Serbia and Montenegro, Former)"
"100": "Serbian (Latin)"
"499": "Serbian (Latin) (Bosnia and Herzegovina)"
"441": "Serbian (Latin) (Montenegro)"
"101": "Serbian (Latin) (Serbia and Montenegro (Former))"
"500": "Serbian (Latin) (Serbia)"
"237": "Sesotho sa Leboa"
"143": "Sesotho sa Leboa (South Africa)"
"238": "Setswana"
"507": "Setswana (Botswana)"
"158": "Setswana (South Africa)"
"239": "Sindhi"
"258": "Sindhi (Arabic)"
"497": "Sindhi (Devanagari)"
"149": "Sindhi (Pakistan)"
"240": "Sinhala"
"150": "Sinhala (Sri Lanka)"
"78": "Slovak"
"79": "Slovak (Slovakia)"
"102": "Slovenian"
"792": "Slovenian (Sierra Leone)"
"103": "Slovenian (Slovenia)"
"10": "Spanish"
"797": "Spanish (Åland Islands)"
"552": "Spanish (Albania)"
"555": "Spanish (American Samoa)"
"830": "Spanish (Americas)"
"423": "Spanish (Andorra)"
"553": "Spanish (Angola)"
"551": "Spanish (Anguilla)"
"554": "Spanish (Antarctica)"
"550": "Spanish (Antigua and Barbuda)"
"23": "Spanish (Argentina)"
"556": "Spanish (Aruba)"
"557": "Spanish (Azerbaijan)"
"565": "Spanish (Bahamas, The)"
"559": "Spanish (Barbados)"
"425": "Spanish (Belize)"
"562": "Spanish (Benin)"
"563": "Spanish (Bermuda)"
"566": "Spanish (Bhutan)"
"424": "Spanish (Bolivia)"
"798": "Spanish (Bonaire, Saint Eustatius and Saba)"
"558": "Spanish (Bosnia and Herzegovina)"
"567": "Spanish (Botswana)"
"799": "Spanish (Bouvet Island)"
"800": "Spanish (British Indian Ocean Territory)"
"659": "Spanish (British Virgin Islands)"
"564": "Spanish (Brunei)"
"560": "Spanish (Burkina Faso)"
"561": "Spanish (Burundi)"
"574": "Spanish (Cameroon)"
"80": "Spanish (Canada)"
"428": "Spanish (Cape Verde)"
"602": "Spanish (Cayman Islands)"
"570": "Spanish (Central African Republic)"
"648": "Spanish (Chad)"
"184": "Spanish (Chile)"
"575": "Spanish (Christmas Island)"
"568": "Spanish (Cocos (Keeling) Islands)"
"185": "Spanish (Colombia)"
"600": "Spanish (Comoros)"
"569": "Spanish (Congo (DRC))"
"571": "Spanish (Congo)"
"573": "Spanish (Cook Islands)"
"426": "Spanish (Costa Rica)"
"572": "Spanish (Côte d'Ivoire)"
"427": "Spanish (Cuba)"
"801": "Spanish (Curaçao)"
"576": "Spanish (Djibouti)"
"429": "Spanish (Dominica)"
"430": "Spanish (Dominican Republic)"
"169": "Spanish (Ecuador)"
"172": "Spanish (El Salvador)"
"591": "Spanish (Equatorial Guinea)"
"577": "Spanish (Eritrea)"
"83": "Spanish (EU)"
"579": "Spanish (Falkland Islands (Islas Malvinas))"
"581": "Spanish (Faroe Islands)"
"578": "Spanish (Fiji)"
"584": "Spanish (French Guiana)"
"629": "Spanish (French Polynesia)"
"582": "Spanish (Gabon)"
"589": "Spanish (Gambia, The)"
"583": "Spanish (Georgia)"
"586": "Spanish (Ghana)"
"587": "Spanish (Gibraltar)"
"588": "Spanish (Greenland)"
"431": "Spanish (Grenada)"
"432": "Spanish (Guadeloupe)"
"592": "Spanish (Guam)"
"171": "Spanish (Guatemala)"
"585": "Spanish (Guernsey)"
"590": "Spanish (Guinea)"
"593": "Spanish (Guinea-Bissau)"
"594": "Spanish (Guyana)"
"595": "Spanish (Haiti)"
"802": "Spanish (Heard Island and McDonald Islands)"
"170": "Spanish (Honduras)"
"597": "Spanish (Jamaica)"
"596": "Spanish (Jersey)"
"599": "Spanish (Kiribati)"
"598": "Spanish (Kyrgyzstan)"
"603": "Spanish (Laos)"
"831": "Spanish (Latin America and the Caribbean)"
"606": "Spanish (Lesotho)"
"605": "Spanish (Liberia)"
"607": "Spanish (Libya)"
"615": "Spanish (Macao SAR)"
"610": "Spanish (Madagascar)"
"621": "Spanish (Malawi)"
"620": "Spanish (Maldives)"
"612": "Spanish (Mali)"
"611": "Spanish (Marshall Islands)"
"617": "Spanish (Martinique)"
"619": "Spanish (Mauritius)"
"664": "Spanish (Mayotte)"
"66": "Spanish (Mexico)"
"580": "Spanish (Micronesia)"
"609": "Spanish (Moldova)"
"608": "Spanish (Monaco)"
"614": "Spanish (Mongolia)"
"618": "Spanish (Montserrat)"
"622": "Spanish (Mozambique)"
"613": "Spanish (Myanmar)"
"623": "Spanish (Namibia)"
"627": "Spanish (Nauru)"
"624": "Spanish (New Caledonia)"
"433": "Spanish (Nicaragua)"
"625": "Spanish (Niger)"
"628": "Spanish (Niue)"
"626": "Spanish (Norfolk Island)"
"616": "Spanish (Northern Mariana Islands)"
"634": "Spanish (Palau)"
"633": "Spanish (Palestinian Authority)"
"168": "Spanish (Panama)"
"630": "Spanish (Papua New Guinea)"
"435": "Spanish (Paraguay)"
"434": "Spanish (Peru)"
"632": "Spanish (Pitcairn Islands)"
"483": "Spanish (Puerto Rico)"
"635": "Spanish (Réunion)"
"636": "Spanish (Rwanda)"
"804": "Spanish (Saint Barthélemy)"
"639": "Spanish (Saint Helena, Ascension and Tristan da Cunha)"
"601": "Spanish (Saint Kitts and Nevis)"
"604": "Spanish (Saint Lucia)"
"805": "Spanish (Saint Martin (French Part))"
"631": "Spanish (Saint Pierre and Miquelon)"
"658": "Spanish (Saint Vincent and the Grenadines)"
"663": "Spanish (Samoa)"
"641": "Spanish (San Marino)"
"645": "Spanish (São Tomé and Príncipe)"
"642": "Spanish (Senegal)"
"638": "Spanish (Seychelles)"
"640": "Spanish (Sierra Leone)"
"806": "Spanish (Sint Maarten (Dutch Part))"
"637": "Spanish (Solomon Islands)"
"643": "Spanish (Somalia)"
"807": "Spanish (South Georgia and the South Sandwich Islands)"
"15": "Spanish (Spain)"
"644": "Spanish (Suriname)"
"803": "Spanish (Svalbard and Jan Mayen)"
"646": "Spanish (Swaziland)"
"650": "Spanish (Tajikistan)"
"655": "Spanish (Tanzania)"
"652": "Spanish (Timor-Leste)"
"649": "Spanish (Togo)"
"651": "Spanish (Tokelau)"
"653": "Spanish (Tonga)"
"647": "Spanish (Turks and Caicos Islands)"
"654": "Spanish (Tuvalu)"
"808": "Spanish (U.S. Minor Outlying Islands)"
"660": "Spanish (U.S. Virgin Islands)"
"656": "Spanish (Uganda)"
"98": "Spanish (United States)"
"436": "Spanish (Uruguay)"
"661": "Spanish (Vanuatu)"
"657": "Spanish (Vatican City)"
"437": "Spanish (Venezuela)"
"662": "Spanish (Wallis and Futuna)"
"665": "Spanish (Zambia)"
"666": "Spanish (Zimbabwe)"
"84": "Swedish"
"501": "Swedish (Finland)"
"85": "Swedish (Sweden)"
"241": "Tajik"
"502": "Tajik (Arabic)"
"260": "Tajik (Cyrillic)"
"503": "Tajik (Latin)"
"155": "Tajik (Tajikistan)"
"242": "Tamil"
"50": "Tamil (India)"
"243": "Tatar"
"508": "Tatar (Arabic)"
"509": "Tatar (Cyrillic)"
"510": "Tatar (Latin)"
"159": "Tatar (Russia)"
"244": "Telugu"
"51": "Telugu (India)"
"89": "Thai"
"90": "Thai (Thailand)"
"245": "Tigrinya"
"156": "Tigrinya (Ethiopia)"
"91": "Turkish"
"92": "Turkish (Turkey)"
"246": "Turkmen"
"504": "Turkmen (Cyrillic)"
"821": "Turkmen (Cyrillic) (Turkey)"
"505": "Turkmen (Latin)"
"822": "Turkmen (Latin) (Turkey)"
"506": "Turkmen (Turkey)"
"157": "Turkmen (Turkmenistan)"
"93": "Ukrainian"
"94": "Ukrainian (Ukraine)"
"247": "Urdu"
"161": "Urdu (Pakistan)"
"248": "Uyghur"
"511": "Uyghur (Arabic)"
"160": "Uyghur (China)"
"512": "Uyghur (Cyrillic)"
"513": "Uyghur (Latin)"
"249": "Uzbek"
"514": "Uzbek (Cyrillic)"
"261": "Uzbek (Latin)"
"162": "Uzbek (Latin) (Uzbekistan)"
"793": "Uzbek (Uzbekistan)"
"117": "Valencian"
"250": "Vietnamese"
"163": "Vietnamese (Vietnam)"
"96": "Welsh"
"97": "Welsh (United Kingdom)"
"251": "Wolof"
"164": "Wolof (Senegal)"
"252": "Yoruba"
"515": "Yoruba (Latin)"
"166": "Yoruba (Nigeria)" | true | # export
module.exports =
"186": "Afrikaans"
"110": "Afrikaans (South Africa)"
"187": "Albanian"
"151": "Albanian (Albania)"
"188": "Amharic"
"111": "Amharic (Ethiopia)"
"8": "Arabic"
"266": "Arabic (Algeria)"
"265": "Arabic (Bahrain)"
"36": "Arabic (Egypt)"
"268": "Arabic (Iraq)"
"57": "Arabic (Israel)"
"269": "Arabic (Jordan)"
"270": "Arabic (Kuwait)"
"271": "Arabic (Lebanon)"
"444": "Arabic (Libya)"
"272": "Arabic (Mauritania)"
"445": "Arabic (Morocco)"
"273": "Arabic (Oman)"
"274": "Arabic (Palestinian Authority)"
"275": "Arabic (Qatar)"
"13": "Arabic (Saudi Arabia)"
"276": "Arabic (Syria)"
"446": "Arabic (Tunisia)"
"264": "Arabic (United Arab Emirates)"
"267": "Arabic (Western Sahara (Disputed))"
"277": "Arabic (Yemen)"
"189": "Armenian"
"125": "Armenian (Armenia)"
"190": "Assamese"
"45": "Assamese (India)"
"191": "Azerbaijani"
"447": "Azerbaijani (Arabic)"
"794": "Azerbaijani (Arabic) (Azerbaijan)"
"448": "Azerbaijani (Cyrillic)"
"795": "Azerbaijani (Cyrillic) (Azerbaijan)"
"482": "Azerbaijani (Latin)"
"112": "Azerbaijani (Latin) (Azerbaijan)"
"194": "Bangla"
"114": "Bangla (Bangladesh)"
"46": "Bangla (India)"
"518": "Bangla (Pakistan)"
"192": "Basque"
"119": "Basque (Basque)"
"193": "Belarusian"
"113": "Belarusian (Belarus)"
"195": "Bosnian"
"449": "Bosnian (Cyrillic)"
"450": "Bosnian (Cyrillic) (Bosnia and Herzegovina)"
"451": "Bosnian (Latin)"
"115": "Bosnian (Latin) (Bosnia and Herzegovina)"
"26": "Bulgarian"
"27": "Bulgarian (Bulgaria)"
"463": "Cajun French"
"466": "Cajun French (Latin)"
"197": "Catalan"
"116": "Catalan (Spain)"
"200": "Cherokee"
"253": "Cherokee (Cherokee)"
"118": "Cherokee (Cherokee) (United States)"
"452": "Cherokee (Latin)"
"99": "Cherokee (United States)"
"2": "Chinese"
"5": "Chinese (China)"
"43": "Chinese (Hong Kong SAR)"
"442": "Chinese (Macao SAR)"
"262": "Chinese (Simplified)"
"479": "Chinese (Simplified) (China)"
"477": "Chinese (Simplified) (Singapore)"
"176": "Chinese (Singapore)"
"88": "Chinese (Taiwan)"
"263": "Chinese (Traditional)"
"481": "Chinese (Traditional) (Hong Kong)"
"478": "Chinese (Traditional) (Macao SAR)"
"480": "Chinese (Traditional) (Taiwan)"
"31": "Croatian"
"475": "Croatian (Bosnia and Herzegovina)"
"32": "Croatian (Croatia)"
"198": "Czech"
"33": "Czech (Czech Republic)"
"34": "Danish"
"35": "Danish (Denmark)"
"199": "Dari"
"145": "Dari (Afghanistan)"
"476": "Dari (Arabic)"
"67": "Dutch"
"178": "Dutch (Belgium)"
"68": "Dutch (Netherlands)"
"1": "English"
"280": "English (Afghanistan)"
"290": "English (Åland Islands)"
"283": "English (Albania)"
"526": "English (Algeria)"
"288": "English (American Samoa)"
"519": "English (Andorra)"
"286": "English (Angola)"
"282": "English (Anguilla)"
"287": "English (Antarctica)"
"281": "English (Antigua and Barbuda)"
"284": "English (Armenia)"
"289": "English (Aruba)"
"824": "English (Australia and New Zealand)"
"24": "English (Australia)"
"291": "English (Azerbaijan)"
"301": "English (Bahama"
"520": "English (Bangladesh)"
"293": "English (Barbados)"
"305": "English (Belarus)"
"453": "English (Belize)"
"296": "English (Benin)"
"298": "English (Bermuda)"
"302": "English (Bhutan)"
"522": "English (Bolivia)"
"300": "English (Bonaire, Saint Eustatius and Saba)"
"292": "English (Bosnia and Herzegovina)"
"304": "English (Botswana)"
"303": "English (Bouvet Island)"
"339": "English (British Indian Ocean Territory)"
"415": "English (British Virgin Islands)"
"299": "English (Brunei)"
"521": "English (Bulgaria)"
"294": "English (Burkina Faso)"
"295": "English (Burundi)"
"344": "English (Cambodia)"
"312": "English (Cameroon)"
"28": "English (Canada)"
"523": "English (Cape Verde)"
"826": "English (Caribbean)"
"348": "English (Cayman Islands)"
"308": "English (Central African Republic)"
"400": "English (Chad)"
"314": "English (Christmas Island)"
"306": "English (Cocos (Keeling) Islands)"
"346": "English (Comoros)"
"307": "English (Congo (DRC))"
"309": "English (Congo)"
"311": "English (Cook Islands)"
"310": "English (Côte d'Ivoire)"
"531": "English (Croatia)"
"313": "English (Curaçao)"
"315": "English (Djibouti)"
"524": "English (Dominica)"
"525": "English (Dominican Republic)"
"829": "English (Eastern Africa)"
"331": "English (Equatorial Guinea)"
"316": "English (Eritrea)"
"527": "English (Estonia)"
"317": "English (Ethiopia)"
"319": "English (Falkland Islands (Islas Malvinas))"
"321": "English (Faroe Islands)"
"318": "English (Fiji)"
"324": "English (French Guiana)"
"379": "English (French Polynesia)"
"401": "English (French Southern and Antarctic Lands)"
"322": "English (Gabon)"
"329": "English (Gambia, The)"
"323": "English (Georgia)"
"528": "English (Germany)"
"326": "English (Ghana)"
"327": "English (Gibraltar)"
"328": "English (Greenland)"
"529": "English (Guadeloupe)"
"333": "English (Guam)"
"325": "English (Guernsey)"
"330": "English (Guinea)"
"334": "English (Guinea-Bissau)"
"335": "English (Guyana)"
"337": "English (Haiti)"
"336": "English (Heard Island and McDonald Islands)"
"530": "English (Honduras)"
"183": "English (Hong Kong SAR)"
"534": "English (Iceland)"
"44": "English (India)"
"454": "English (Indonesia)"
"533": "English (Iraq)"
"173": "English (Ireland)"
"338": "English (Isle of Man)"
"532": "English (Israel)"
"341": "English (Jamaica)"
"340": "English (Jersey)"
"535": "English (Jordan)"
"455": "English (Kazakhstan)"
"342": "English (Kenya)"
"345": "English (Kiribati)"
"343": "English (Kyrgyzstan)"
"349": "English (Laos)"
"537": "English (Latvia)"
"353": "English (Lesotho)"
"352": "English (Liberia)"
"354": "English (Libya)"
"536": "English (Lithuania)"
"538": "English (Macao SAR)"
"360": "English (Macedonia, FYRO)"
"358": "English (Madagascar)"
"369": "English (Malawi)"
"457": "English (Malaysia)"
"368": "English (Maldives)"
"361": "English (Mali)"
"456": "English (Malta)"
"359": "English (Marshall Islands)"
"365": "English (Martinique)"
"367": "English (Mauritius)"
"420": "English (Mayotte)"
"320": "English (Micronesia)"
"356": "English (Moldova)"
"355": "English (Monaco)"
"363": "English (Mongolia)"
"366": "English (Montserrat)"
"370": "English (Mozambique)"
"362": "English (Myanmar)"
"371": "English (Namibia)"
"377": "English (Nauru)"
"376": "English (Nepal)"
"285": "English (Netherlands Antilles (Former))"
"69": "English (Netherlands)"
"372": "English (New Caledonia)"
"174": "English (New Zealand)"
"540": "English (Nicaragua)"
"373": "English (Niger)"
"375": "English (Nigeria)"
"378": "English (Niue)"
"374": "English (Norfolk Island)"
"825": "English (Northern America)"
"364": "English (Northern Mariana Islands)"
"541": "English (Oman)"
"459": "English (Pakistan)"
"383": "English (Palau)"
"543": "English (Palestinian Authority)"
"380": "English (Papua New Guinea)"
"458": "English (Philippines)"
"382": "English (Pitcairn Islands)"
"542": "English (Puerto Rico)"
"544": "English (Qatar)"
"384": "English (Réunion)"
"545": "English (Romania)"
"385": "English (Rwanda)"
"297": "English (Saint Barthélemy)"
"389": "English (Saint Helena, Ascension and Tristan da Cunha)"
"347": "English (Saint Kitts and Nevis)"
"350": "English (Saint Lucia)"
"357": "English (Saint Martin (French Part))"
"381": "English (Saint Pierre and Miquelon)"
"414": "English (Saint Vincent and the Grenadines)"
"419": "English (Samoa)"
"392": "English (San Marino)"
"396": "English (São Tomé and Príncipe)"
"393": "English (Senegal)"
"387": "English (Seychelles)"
"391": "English (Sierra Leone)"
"181": "English (Singapore)"
"397": "English (Sint Maarten (Dutch Part))"
"547": "English (Slovakia)"
"546": "English (Slovenia)"
"386": "English (Solomon Islands)"
"394": "English (Somalia)"
"182": "English (South Africa)"
"332": "English (South Georgia and the South Sandwich Islands)"
"828": "English (Southern Africa)"
"351": "English (Sri Lanka)"
"388": "English (Sudan)"
"395": "English (Suriname)"
"390": "English (Svalbard and Jan Mayen)"
"398": "English (Swaziland)"
"403": "English (Tajikistan)"
"410": "English (Tanzania)"
"548": "English (Thailand)"
"405": "English (Timor-Leste)"
"402": "English (Togo)"
"404": "English (Tokelau)"
"407": "English (Tonga)"
"408": "English (Trinidad and Tobago)"
"549": "English (Turkey)"
"406": "English (Turkmenistan)"
"399": "English (Turks and Caicos Islands)"
"409": "English (Tuvalu)"
"796": "English (U.S. Minor Outlying Islands)"
"416": "English (U.S. Virgin Islands)"
"411": "English (Uganda)"
"95": "English (United Kingdom)"
"4": "English (United States)"
"412": "English (Uzbekistan)"
"417": "English (Vanuatu)"
"413": "English (Vatican City)"
"460": "English (Vietnam)"
"418": "English (Wallis and Futuna)"
"827": "English (Western Africa)"
"421": "English (Zambia)"
"422": "English (Zimbabwe)"
"37": "Estonian"
"38": "Estonian (Estonia)"
"201": "Filipino"
"461": "Filipino (Latin)"
"121": "Filipino (Philippines)"
"39": "Finnish"
"40": "Finnish (Finland)"
"471": "Franco-Provençal"
"472": "Franco-Provençal (Latin)"
"18": "French"
"809": "French (Åland Islands)"
"670": "French (Albania)"
"697": "French (Algeria)"
"673": "French (American Samoa)"
"667": "French (Andorra)"
"671": "French (Angola)"
"669": "French (Anguilla)"
"672": "French (Antarctica)"
"668": "French (Antigua and Barbuda)"
"674": "French (Aruba)"
"675": "French (Azerbaijan)"
"684": "French (Bahamas, The)"
"677": "French (Barbados)"
"175": "French (Belgium)"
"687": "French (Belize)"
"680": "French (Benin)"
"681": "French (Bermuda)"
"685": "French (Bhutan)"
"683": "French (Bolivia)"
"810": "French (Bonaire, Saint Eustatius and Saba)"
"676": "French (Bosnia and Herzegovina)"
"686": "French (Botswana)"
"811": "French (Bouvet Island)"
"812": "French (British Indian Ocean Territory)"
"780": "French (British Virgin Islands)"
"682": "French (Brunei)"
"678": "French (Burkina Faso)"
"679": "French (Burundi)"
"467": "French (Cameroon)"
"29": "French (Canada)"
"692": "French (Cape Verde)"
"833": "French (Caribbean)"
"725": "French (Cayman Islands)"
"689": "French (Central African Republic)"
"769": "French (Chad)"
"693": "French (Christmas Island)"
"688": "French (Cocos (Keeling) Islands)"
"723": "French (Comoros)"
"464": "French (Congo (DRC))"
"690": "French (Congo)"
"691": "French (Cook Islands)"
"465": "French (Côte d'Ivoire)"
"813": "French (Curaçao)"
"694": "French (Djibouti)"
"695": "French (Dominica)"
"696": "French (Dominican Republic)"
"714": "French (Equatorial Guinea)"
"698": "French (Eritrea)"
"700": "French (Falkland Islands (Islas Malvinas))"
"702": "French (Faroe Islands)"
"699": "French (Fiji)"
"19": "French (France)"
"706": "French (French Guiana)"
"751": "French (French Polynesia)"
"703": "French (Gabon)"
"711": "French (Gambia, The)"
"705": "French (Georgia)"
"708": "French (Ghana)"
"709": "French (Gibraltar)"
"710": "French (Greenland)"
"704": "French (Grenada)"
"713": "French (Guadeloupe)"
"715": "French (Guam)"
"707": "French (Guernsey)"
"712": "French (Guinea)"
"716": "French (Guinea-Bissau)"
"717": "French (Guyana)"
"468": "French (Haiti)"
"814": "French (Heard Island and McDonald Islands)"
"718": "French (Honduras)"
"720": "French (Jamaica)"
"719": "French (Jersey)"
"722": "French (Kiribati)"
"721": "French (Kyrgyzstan)"
"726": "French (Laos)"
"729": "French (Lesotho)"
"728": "French (Liberia)"
"730": "French (Libya)"
"179": "French (Luxembourg)"
"736": "French (Macao SAR)"
"732": "French (Madagascar)"
"742": "French (Malawi)"
"741": "French (Maldives)"
"470": "French (Mali)"
"733": "French (Marshall Islands)"
"738": "French (Martinique)"
"740": "French (Mauritius)"
"785": "French (Mayotte)"
"701": "French (Micronesia)"
"731": "French (Moldova)"
"469": "French (Monaco)"
"735": "French (Mongolia)"
"739": "French (Montserrat)"
"438": "French (Morocco)"
"743": "French (Mozambique)"
"734": "French (Myanmar)"
"744": "French (Namibia)"
"749": "French (Nauru)"
"745": "French (New Caledonia)"
"748": "French (Nicaragua)"
"746": "French (Niger)"
"750": "French (Niue)"
"747": "French (Norfolk Island)"
"462": "French (North Africa)"
"834": "French (Northern America)"
"737": "French (Northern Mariana Islands)"
"756": "French (Palau)"
"755": "French (Palestinian Authority)"
"752": "French (Papua New Guinea)"
"754": "French (Pitcairn Islands)"
"473": "French (Réunion)"
"757": "French (Rwanda)"
"816": "French (Saint Barthélemy)"
"760": "French (Saint Helena, Ascension and Tristan da Cunha)"
"724": "French (Saint Kitts and Nevis)"
"727": "French (Saint Lucia)"
"817": "French (Saint Martin (French Part))"
"753": "French (Saint Pierre and Miquelon)"
"779": "French (Saint Vincent and the Grenadines)"
"784": "French (Samoa)"
"762": "French (San Marino)"
"766": "French (São Tomé and Príncipe)"
"763": "French (Senegal)"
"759": "French (Seychelles)"
"761": "French (Sierra Leone)"
"818": "French (Sint Maarten (Dutch Part))"
"758": "French (Solomon Islands)"
"764": "French (Somalia)"
"819": "French (South Georgia and the South Sandwich Islands)"
"765": "French (Suriname)"
"815": "French (SvalPI:NAME:<NAME>END_PIard and PI:NAME:<NAME>END_PI)"
"767": "French (Swaziland)"
"87": "French (Switzerland)"
"771": "French (Tajikistan)"
"776": "French (Tanzania)"
"773": "French (Timor-Leste)"
"770": "French (Togo)"
"772": "French (Tokelau)"
"774": "French (Tonga)"
"439": "French (Tunisia)"
"768": "French (Turks and Caicos Islands)"
"775": "French (Tuvalu)"
"820": "French (U.S. Minor Outlying Islands)"
"781": "French (U.S. Virgin Islands)"
"777": "French (Uganda)"
"782": "French (Vanuatu)"
"778": "French (Vatican City)"
"783": "French (Wallis and Futuna)"
"835": "French (Western Africa)"
"832": "French (Western Europe)"
"786": "French (Zambia)"
"787": "French (Zimbabwe)"
"81": "Galician"
"82": "Galician (Spain)"
"202": "Georgian"
"130": "Georgian (Georgia)"
"9": "German"
"177": "German (Austria)"
"14": "German (Germany)"
"278": "German (Liechtenstein)"
"180": "German (Luxembourg)"
"86": "German (Switzerland)"
"41": "Greek"
"279": "Greek (Cyprus)"
"42": "Greek (Greece)"
"203": "Gujarati"
"48": "Gujarati (India)"
"254": "Hausa"
"204": "Hausa (Latin)"
"124": "Hausa (Latin) (Nigeria)"
"55": "Hebrew"
"56": "Hebrew (Israel)"
"11": "Hindi"
"16": "Hindi (India)"
"104": "Hungarian"
"105": "Hungarian (Hungary)"
"205": "Icelandic"
"128": "Icelandic (Iceland)"
"206": "Igbo"
"484": "Igbo (Latin)"
"127": "Igbo (Nigeria)"
"207": "Indonesian"
"126": "Indonesian (Indonesia)"
"196": "Inuktitut"
"22": "Inuktitut (Canada)"
"30": "Inuktitut (Canadian) Canada"
"255": "Inuktitut (Latin)"
"129": "Inuktitut (Latin) (Canada)"
"208": "Irish"
"122": "Irish (Ireland)"
"209": "isiXhosa"
"165": "isiXhosa (South Africa)"
"210": "isiZulu"
"167": "isiZulu (South Africa)"
"58": "Italian"
"59": "Italian (Italy)"
"443": "Italian (Switzerland)"
"12": "Japanese"
"17": "Japanese (Japan)"
"211": "Kannada"
"52": "Kannada (India)"
"212": "Kazakh"
"131": "Kazakh (Kazakhstan)"
"213": "Khmer"
"132": "Khmer (Cambodia)"
"516": "K'iche'"
"901": "K'iche' (Guatemala)"
"517": "K'iche' (Latin)"
"215": "Kinyarwanda"
"148": "Kinyarwanda (Rwanda)"
"216": "Kiswahili"
"154": "Kiswahili (Kenya)"
"217": "Konkani"
"54": "Konkani (India)"
"60": "Korean"
"61": "Korean (Korea)"
"218": "Kurdish"
"256": "Kurdish (Arabic)"
"133": "Kurdish (Iraq)"
"219": "Kyrgyz"
"485": "Kyrgyz (Cyrillic)"
"134": "Kyrgyz (Kyrgyzstan)"
"903": "Lao"
"902": "Lao (Laos)"
"62": "Latvian"
"63": "Latvian (Latvia)"
"64": "Lithuanian"
"65": "Lithuanian (Lithuania)"
"220": "Luxembourgish"
"135": "Luxembourgish (Luxembourg)"
"221": "Macedonian"
"137": "Macedonian (Macedonia, FYRO)"
"222": "Malay"
"490": "Malay (Brunei)"
"140": "Malay (Malaysia)"
"223": "Malayalam"
"53": "Malayalam (India)"
"224": "Maltese"
"141": "Maltese (Malta)"
"225": "Maori"
"486": "Maori (Latin)"
"136": "Maori (New Zealand)"
"226": "Marathi"
"139": "Marathi (India)"
"227": "Mongolian"
"487": "Mongolian (Cyrillic)"
"138": "Mongolian (Mongolia)"
"488": "Mongolian (Mongolian)"
"489": "Mongolian (Phags-pa)"
"228": "Nepali"
"142": "Nepali (Nepal)"
"491": "Norwegian"
"229": "Norwegian (Bokmål)"
"70": "Norwegian (Bokmål) (Norway)"
"492": "Norwegian (Norway)"
"230": "Norwegian (Nynorsk)"
"71": "Norwegian (Nynorsk) (Norway)"
"231": "Odia"
"49": "Odia (India)"
"232": "Persian/Farsi"
"120": "Persian/Farsi (Iran)"
"72": "Polish"
"73": "Polish (Poland)"
"25": "Portuguese"
"74": "Portuguese (Brazil)"
"75": "Portuguese (Portugal)"
"3": "Pseudo"
"6": "Pseudoloc"
"823": "Pseudoloc (Latin) (Selfhost)"
"900": "Pseudoloc East Asian-language"
"7": "Pseudoloc Mirrored"
"233": "Punjabi"
"257": "Punjabi (Arabic)"
"493": "Punjabi (Devanagari)"
"47": "Punjabi (India)"
"144": "Punjabi (Pakistan)"
"234": "Quechua"
"495": "Quechua (Bolivia)"
"496": "Quechua (Ecuador)"
"147": "Quechua (Peru)"
"76": "Romanian"
"77": "Romanian (Romania)"
"788": "Russian (Armenia)"
"789": "Russian (Belarus)"
"790": "Russian (Kazakhstan)"
"791": "Russian (Ukraine)"
"235": "Scottish Gaelic"
"474": "Scottish Gaelic (Latin)"
"123": "Scottish Gaelic (United Kingdom)"
"236": "Serbian"
"259": "Serbian (Cyrillic)"
"152": "Serbian (Cyrillic) (Bosnia and Herzegovina)"
"498": "Serbian (Cyrillic) (Montenegro)"
"440": "Serbian (Cyrillic) (Serbia )"
"153": "Serbian (Cyrillic) (Serbia and Montenegro, Former)"
"100": "Serbian (Latin)"
"499": "Serbian (Latin) (Bosnia and Herzegovina)"
"441": "Serbian (Latin) (Montenegro)"
"101": "Serbian (Latin) (Serbia and Montenegro (Former))"
"500": "Serbian (Latin) (Serbia)"
"237": "Sesotho sa Leboa"
"143": "Sesotho sa Leboa (South Africa)"
"238": "Setswana"
"507": "Setswana (Botswana)"
"158": "Setswana (South Africa)"
"239": "Sindhi"
"258": "Sindhi (Arabic)"
"497": "Sindhi (Devanagari)"
"149": "Sindhi (Pakistan)"
"240": "Sinhala"
"150": "Sinhala (Sri Lanka)"
"78": "Slovak"
"79": "Slovak (Slovakia)"
"102": "Slovenian"
"792": "Slovenian (Sierra Leone)"
"103": "Slovenian (Slovenia)"
"10": "Spanish"
"797": "Spanish (Åland Islands)"
"552": "Spanish (Albania)"
"555": "Spanish (American Samoa)"
"830": "Spanish (Americas)"
"423": "Spanish (Andorra)"
"553": "Spanish (Angola)"
"551": "Spanish (Anguilla)"
"554": "Spanish (Antarctica)"
"550": "Spanish (Antigua and Barbuda)"
"23": "Spanish (Argentina)"
"556": "Spanish (Aruba)"
"557": "Spanish (Azerbaijan)"
"565": "Spanish (Bahamas, The)"
"559": "Spanish (Barbados)"
"425": "Spanish (Belize)"
"562": "Spanish (Benin)"
"563": "Spanish (Bermuda)"
"566": "Spanish (Bhutan)"
"424": "Spanish (Bolivia)"
"798": "Spanish (Bonaire, Saint Eustatius and Saba)"
"558": "Spanish (Bosnia and Herzegovina)"
"567": "Spanish (Botswana)"
"799": "Spanish (Bouvet Island)"
"800": "Spanish (British Indian Ocean Territory)"
"659": "Spanish (British Virgin Islands)"
"564": "Spanish (Brunei)"
"560": "Spanish (Burkina Faso)"
"561": "Spanish (Burundi)"
"574": "Spanish (Cameroon)"
"80": "Spanish (Canada)"
"428": "Spanish (Cape Verde)"
"602": "Spanish (Cayman Islands)"
"570": "Spanish (Central African Republic)"
"648": "Spanish (Chad)"
"184": "Spanish (Chile)"
"575": "Spanish (Christmas Island)"
"568": "Spanish (Cocos (Keeling) Islands)"
"185": "Spanish (Colombia)"
"600": "Spanish (Comoros)"
"569": "Spanish (Congo (DRC))"
"571": "Spanish (Congo)"
"573": "Spanish (Cook Islands)"
"426": "Spanish (Costa Rica)"
"572": "Spanish (Côte d'Ivoire)"
"427": "Spanish (Cuba)"
"801": "Spanish (Curaçao)"
"576": "Spanish (Djibouti)"
"429": "Spanish (Dominica)"
"430": "Spanish (Dominican Republic)"
"169": "Spanish (Ecuador)"
"172": "Spanish (El Salvador)"
"591": "Spanish (Equatorial Guinea)"
"577": "Spanish (Eritrea)"
"83": "Spanish (EU)"
"579": "Spanish (Falkland Islands (Islas Malvinas))"
"581": "Spanish (Faroe Islands)"
"578": "Spanish (Fiji)"
"584": "Spanish (French Guiana)"
"629": "Spanish (French Polynesia)"
"582": "Spanish (Gabon)"
"589": "Spanish (Gambia, The)"
"583": "Spanish (Georgia)"
"586": "Spanish (Ghana)"
"587": "Spanish (Gibraltar)"
"588": "Spanish (Greenland)"
"431": "Spanish (Grenada)"
"432": "Spanish (Guadeloupe)"
"592": "Spanish (Guam)"
"171": "Spanish (Guatemala)"
"585": "Spanish (Guernsey)"
"590": "Spanish (Guinea)"
"593": "Spanish (Guinea-Bissau)"
"594": "Spanish (Guyana)"
"595": "Spanish (Haiti)"
"802": "Spanish (Heard Island and McDonald Islands)"
"170": "Spanish (Honduras)"
"597": "Spanish (Jamaica)"
"596": "Spanish (Jersey)"
"599": "Spanish (Kiribati)"
"598": "Spanish (Kyrgyzstan)"
"603": "Spanish (Laos)"
"831": "Spanish (Latin America and the Caribbean)"
"606": "Spanish (Lesotho)"
"605": "Spanish (Liberia)"
"607": "Spanish (Libya)"
"615": "Spanish (Macao SAR)"
"610": "Spanish (Madagascar)"
"621": "Spanish (Malawi)"
"620": "Spanish (Maldives)"
"612": "Spanish (Mali)"
"611": "Spanish (Marshall Islands)"
"617": "Spanish (Martinique)"
"619": "Spanish (Mauritius)"
"664": "Spanish (Mayotte)"
"66": "Spanish (Mexico)"
"580": "Spanish (Micronesia)"
"609": "Spanish (Moldova)"
"608": "Spanish (Monaco)"
"614": "Spanish (Mongolia)"
"618": "Spanish (Montserrat)"
"622": "Spanish (Mozambique)"
"613": "Spanish (Myanmar)"
"623": "Spanish (Namibia)"
"627": "Spanish (Nauru)"
"624": "Spanish (New Caledonia)"
"433": "Spanish (Nicaragua)"
"625": "Spanish (Niger)"
"628": "Spanish (Niue)"
"626": "Spanish (Norfolk Island)"
"616": "Spanish (Northern Mariana Islands)"
"634": "Spanish (Palau)"
"633": "Spanish (Palestinian Authority)"
"168": "Spanish (Panama)"
"630": "Spanish (Papua New Guinea)"
"435": "Spanish (Paraguay)"
"434": "Spanish (Peru)"
"632": "Spanish (Pitcairn Islands)"
"483": "Spanish (Puerto Rico)"
"635": "Spanish (Réunion)"
"636": "Spanish (Rwanda)"
"804": "Spanish (Saint Barthélemy)"
"639": "Spanish (Saint Helena, Ascension and Tristan da Cunha)"
"601": "Spanish (Saint Kitts and Nevis)"
"604": "Spanish (Saint Lucia)"
"805": "Spanish (Saint Martin (French Part))"
"631": "Spanish (Saint Pierre and Miquelon)"
"658": "Spanish (Saint Vincent and the Grenadines)"
"663": "Spanish (Samoa)"
"641": "Spanish (San Marino)"
"645": "Spanish (São Tomé and Príncipe)"
"642": "Spanish (Senegal)"
"638": "Spanish (Seychelles)"
"640": "Spanish (Sierra Leone)"
"806": "Spanish (Sint Maarten (Dutch Part))"
"637": "Spanish (Solomon Islands)"
"643": "Spanish (Somalia)"
"807": "Spanish (South Georgia and the South Sandwich Islands)"
"15": "Spanish (Spain)"
"644": "Spanish (Suriname)"
"803": "Spanish (Svalbard and Jan Mayen)"
"646": "Spanish (Swaziland)"
"650": "Spanish (Tajikistan)"
"655": "Spanish (Tanzania)"
"652": "Spanish (Timor-Leste)"
"649": "Spanish (Togo)"
"651": "Spanish (Tokelau)"
"653": "Spanish (Tonga)"
"647": "Spanish (Turks and Caicos Islands)"
"654": "Spanish (Tuvalu)"
"808": "Spanish (U.S. Minor Outlying Islands)"
"660": "Spanish (U.S. Virgin Islands)"
"656": "Spanish (Uganda)"
"98": "Spanish (United States)"
"436": "Spanish (Uruguay)"
"661": "Spanish (Vanuatu)"
"657": "Spanish (Vatican City)"
"437": "Spanish (Venezuela)"
"662": "Spanish (Wallis and Futuna)"
"665": "Spanish (Zambia)"
"666": "Spanish (Zimbabwe)"
"84": "Swedish"
"501": "Swedish (Finland)"
"85": "Swedish (Sweden)"
"241": "Tajik"
"502": "Tajik (Arabic)"
"260": "Tajik (Cyrillic)"
"503": "Tajik (Latin)"
"155": "Tajik (Tajikistan)"
"242": "Tamil"
"50": "Tamil (India)"
"243": "Tatar"
"508": "Tatar (Arabic)"
"509": "Tatar (Cyrillic)"
"510": "Tatar (Latin)"
"159": "Tatar (Russia)"
"244": "Telugu"
"51": "Telugu (India)"
"89": "Thai"
"90": "Thai (Thailand)"
"245": "Tigrinya"
"156": "Tigrinya (Ethiopia)"
"91": "Turkish"
"92": "Turkish (Turkey)"
"246": "Turkmen"
"504": "Turkmen (Cyrillic)"
"821": "Turkmen (Cyrillic) (Turkey)"
"505": "Turkmen (Latin)"
"822": "Turkmen (Latin) (Turkey)"
"506": "Turkmen (Turkey)"
"157": "Turkmen (Turkmenistan)"
"93": "Ukrainian"
"94": "Ukrainian (Ukraine)"
"247": "Urdu"
"161": "Urdu (Pakistan)"
"248": "Uyghur"
"511": "Uyghur (Arabic)"
"160": "Uyghur (China)"
"512": "Uyghur (Cyrillic)"
"513": "Uyghur (Latin)"
"249": "Uzbek"
"514": "Uzbek (Cyrillic)"
"261": "Uzbek (Latin)"
"162": "Uzbek (Latin) (Uzbekistan)"
"793": "Uzbek (Uzbekistan)"
"117": "Valencian"
"250": "Vietnamese"
"163": "Vietnamese (Vietnam)"
"96": "Welsh"
"97": "Welsh (United Kingdom)"
"251": "Wolof"
"164": "Wolof (Senegal)"
"252": "Yoruba"
"515": "Yoruba (Latin)"
"166": "Yoruba (Nigeria)" |
[
{
"context": "ata}}</name>')\n expect(template.fromXML('<name>Wilfred</name>')).toEqual({ name: 'Wilfred' })\n expect",
"end": 245,
"score": 0.9996512532234192,
"start": 238,
"tag": "NAME",
"value": "Wilfred"
},
{
"context": "fromXML('<name>Wilfred</name>')).toEqual({ name: 'Wi... | test/model/cdata-test.coffee | admariner/cruftless | 19 | cruftless = require('../../src/cruftless')
{ element, attr, text, parse } = cruftless()
describe 'cdata', ->
it 'should parse cdata like text', ->
template = parse('<name>{{name|cdata}}</name>')
expect(template.fromXML('<name>Wilfred</name>')).toEqual({ name: 'Wilfred' })
expect(template.fromXML('<name><![CDATA[Joe]]></name>')).toEqual({ name: 'Joe' })
expect(template.toXML({ name: 'Bob' })).toEqual('<name><![CDATA[Bob]]></name>')
| 100602 | cruftless = require('../../src/cruftless')
{ element, attr, text, parse } = cruftless()
describe 'cdata', ->
it 'should parse cdata like text', ->
template = parse('<name>{{name|cdata}}</name>')
expect(template.fromXML('<name><NAME></name>')).toEqual({ name: '<NAME>' })
expect(template.fromXML('<name><![CDATA[<NAME>]]></name>')).toEqual({ name: '<NAME>' })
expect(template.toXML({ name: '<NAME>' })).toEqual('<name><![CDATA[<NAME>]]></name>')
| true | cruftless = require('../../src/cruftless')
{ element, attr, text, parse } = cruftless()
describe 'cdata', ->
it 'should parse cdata like text', ->
template = parse('<name>{{name|cdata}}</name>')
expect(template.fromXML('<name>PI:NAME:<NAME>END_PI</name>')).toEqual({ name: 'PI:NAME:<NAME>END_PI' })
expect(template.fromXML('<name><![CDATA[PI:NAME:<NAME>END_PI]]></name>')).toEqual({ name: 'PI:NAME:<NAME>END_PI' })
expect(template.toXML({ name: 'PI:NAME:<NAME>END_PI' })).toEqual('<name><![CDATA[PI:NAME:<NAME>END_PI]]></name>')
|
[
{
"context": " prompting : ->\n\n prefabTypesWithSpriteKey = ['Sprite', 'TileSprite', 'Emitter', 'Button']\n\n done = @a",
"end": 376,
"score": 0.7774574756622314,
"start": 368,
"tag": "KEY",
"value": "Sprite',"
},
{
"context": " : ->\n\n prefabTypesWithSpriteKey = ['Sprite'... | generators/prefab/index.coffee | ozankasikci/generator-phaser-coffeescript-gulp | 28 | _s = require 'underscore.string'
chalk = require 'chalk'
yosay = require 'yosay'
generators = require 'yeoman-generator'
module.exports = generators.NamedBase.extend
initializing : ->
@log yosay chalk.green "Creating a new prefab, hang tight!"
@name = @name.replace '.coffee', ''
prompting : ->
prefabTypesWithSpriteKey = ['Sprite', 'TileSprite', 'Emitter', 'Button']
done = @async()
@prompt [
{
type : 'list'
name : 'prefabType'
message : 'What type of prefab would you like to create?'
choices : ['Text', 'Group', 'Sprite', 'Button', 'Emitter', 'TileSprite', 'BitmapData']
default : 0
}
{
when : (props) -> props.prefabType in prefabTypesWithSpriteKey
type : 'input'
name : 'prefabSpriteKey'
message : "What is your prefab's sprite key?"
validate : (input) ->
return 'You must enter a sprite key' unless input
return true
}
], (props) =>
@props = props
@props.prefabClassName = _s.classify @name
@props.prefabSpriteKey = _s.slugify props.prefabSpriteKey
done()
writing :
app : ->
templates =
'Text' : '_prefabText.coffee'
'Group' : '_prefabGroup.coffee'
'Button' : '_prefabButton.coffee'
'Sprite' : '_prefabSprite.coffee'
'Emitter' : '_prefabEmitter.coffee'
'TileSprite' : '_prefabTileSprite.coffee'
'BitmapData' : '_prefabBitmapData.coffee'
@fs.copyTpl(
@templatePath(templates[@props.prefabType]), @destinationPath("app/scripts/prefabs/#{@name}.coffee"),
{ prefabSpriteKey : @props.prefabSpriteKey, prefabClassName : @props.prefabClassName }
)
end : ->
@log chalk.yellow("File name is: #{@name}.coffee")
@log chalk.yellow("Class name is: #{@props.prefabClassName}")
@log chalk.green("Created a new #{@props.prefabType} prefab successfully!")
| 175789 | _s = require 'underscore.string'
chalk = require 'chalk'
yosay = require 'yosay'
generators = require 'yeoman-generator'
module.exports = generators.NamedBase.extend
initializing : ->
@log yosay chalk.green "Creating a new prefab, hang tight!"
@name = @name.replace '.coffee', ''
prompting : ->
prefabTypesWithSpriteKey = ['<KEY> '<KEY> 'Emitter<KEY>', 'Button']
done = @async()
@prompt [
{
type : 'list'
name : 'prefabType'
message : 'What type of prefab would you like to create?'
choices : ['Text', 'Group', 'Sprite', 'Button', 'Emitter', 'TileSprite', 'BitmapData']
default : 0
}
{
when : (props) -> props.prefabType in prefabTypesWithSpriteKey
type : 'input'
name : 'prefabSpriteKey'
message : "What is your prefab's sprite key?"
validate : (input) ->
return 'You must enter a sprite key' unless input
return true
}
], (props) =>
@props = props
@props.prefabClassName = _s.classify @name
@props.prefabSpriteKey = _s.slugify props.prefabSpriteKey
done()
writing :
app : ->
templates =
'Text' : '_prefabText.coffee'
'Group' : '_prefabGroup.coffee'
'Button' : '_prefabButton.coffee'
'Sprite' : '_prefabSprite.coffee'
'Emitter' : '_prefabEmitter.coffee'
'TileSprite' : '_prefabTileSprite.coffee'
'BitmapData' : '_prefabBitmapData.coffee'
@fs.copyTpl(
@templatePath(templates[@props.prefabType]), @destinationPath("app/scripts/prefabs/#{@name}.coffee"),
{ prefabSpriteKey : @props.prefabSpriteKey, prefabClassName : @props.prefabClassName }
)
end : ->
@log chalk.yellow("File name is: #{@name}.coffee")
@log chalk.yellow("Class name is: #{@props.prefabClassName}")
@log chalk.green("Created a new #{@props.prefabType} prefab successfully!")
| true | _s = require 'underscore.string'
chalk = require 'chalk'
yosay = require 'yosay'
generators = require 'yeoman-generator'
module.exports = generators.NamedBase.extend
initializing : ->
@log yosay chalk.green "Creating a new prefab, hang tight!"
@name = @name.replace '.coffee', ''
prompting : ->
prefabTypesWithSpriteKey = ['PI:KEY:<KEY>END_PI 'PI:KEY:<KEY>END_PI 'EmitterPI:KEY:<KEY>END_PI', 'Button']
done = @async()
@prompt [
{
type : 'list'
name : 'prefabType'
message : 'What type of prefab would you like to create?'
choices : ['Text', 'Group', 'Sprite', 'Button', 'Emitter', 'TileSprite', 'BitmapData']
default : 0
}
{
when : (props) -> props.prefabType in prefabTypesWithSpriteKey
type : 'input'
name : 'prefabSpriteKey'
message : "What is your prefab's sprite key?"
validate : (input) ->
return 'You must enter a sprite key' unless input
return true
}
], (props) =>
@props = props
@props.prefabClassName = _s.classify @name
@props.prefabSpriteKey = _s.slugify props.prefabSpriteKey
done()
writing :
app : ->
templates =
'Text' : '_prefabText.coffee'
'Group' : '_prefabGroup.coffee'
'Button' : '_prefabButton.coffee'
'Sprite' : '_prefabSprite.coffee'
'Emitter' : '_prefabEmitter.coffee'
'TileSprite' : '_prefabTileSprite.coffee'
'BitmapData' : '_prefabBitmapData.coffee'
@fs.copyTpl(
@templatePath(templates[@props.prefabType]), @destinationPath("app/scripts/prefabs/#{@name}.coffee"),
{ prefabSpriteKey : @props.prefabSpriteKey, prefabClassName : @props.prefabClassName }
)
end : ->
@log chalk.yellow("File name is: #{@name}.coffee")
@log chalk.yellow("Class name is: #{@props.prefabClassName}")
@log chalk.green("Created a new #{@props.prefabType} prefab successfully!")
|
[
{
"context": " @result = @sut.onEnvelope config: {left: {name: 'Aaron', height: '5 feet tall'}, right: { name: 'Aaron',",
"end": 2493,
"score": 0.9989286661148071,
"start": 2488,
"tag": "NAME",
"value": "Aaron"
},
{
"context": " 'Aaron', height: '5 feet tall'}, right: { name: 'Aaron'... | test/not-equal-spec.coffee | sqrtofsaturn/nanocyte-component-not-equal | 0 | ReturnValue = require 'nanocyte-component-return-value'
NotEqual = require '../src/not-equal'
describe 'NotEqual', ->
beforeEach ->
@sut = new NotEqual
it 'should exist', ->
expect(@sut).to.be.an.instanceOf ReturnValue
describe '->onEnvelope', ->
describe 'when called with boolean true', ->
beforeEach ->
@result = @sut.onEnvelope config: {left: true, right: true}, message: 'anything'
it 'should not return the message', ->
expect(@result).not.to.exist
describe 'when called with boolean true and false', ->
beforeEach ->
@result = @sut.onEnvelope config: {left: true, right: false}, message: 'anything'
it 'should not return the message', ->
expect(@result).to.deep.equal 'anything'
describe 'when called with an equal left and right', ->
beforeEach ->
@result = @sut.onEnvelope config: {left: 2, right: 2}, message: 'anything'
it 'should not return the message', ->
expect(@result).not.to.exist
describe 'when called with an inequal left and right', ->
beforeEach ->
@result = @sut.onEnvelope config: {left: 1, right: 2}, message: 'anything'
it 'should return the message', ->
expect(@result).to.deep.equal 'anything'
describe 'when left and right are gregorically equal', ->
beforeEach ->
@result = @sut.onEnvelope config: {left: 'Jan 1, 2012', right: '2012-01-01'}, message: 'anything'
it 'should not return the message', ->
expect(@result).not.to.exist
describe 'when left and right are gregorically unequal', ->
beforeEach ->
@result = @sut.onEnvelope config: {left: 'Jan 2, 2012', right: '2012-01-01'}, message: 'anything'
it 'should return the message', ->
expect(@result).to.deep.equal 'anything'
describe 'when left and right are numerically equal', ->
beforeEach ->
@result = @sut.onEnvelope config: {left: '1', right: '1.0'}, message: 'anything'
it 'should not return the message', ->
expect(@result).not.to.exist
describe 'when left and right are numerically nonequal', ->
beforeEach ->
@result = @sut.onEnvelope config: {left: '1', right: '1.5'}, message: 'anything'
it 'should return the message', ->
expect(@result).to.deep.equal 'anything'
describe 'when left and right are objectorially equal', ->
beforeEach ->
@result = @sut.onEnvelope config: {left: {name: 'Aaron', height: '5 feet tall'}, right: { name: 'Aaron', height: '5 feet tall'}}, message: 'anything'
it 'should not return the message', ->
expect(@result).not.to.exist
describe 'when left and right are objectorially nonequal', ->
beforeEach ->
@result = @sut.onEnvelope config: {left: {name: 'Aaron', height: '11 feet tall'}, right: { name: 'Aaron', height: '5 feet tall'}}, message: 'anything'
it 'should return the message', ->
expect(@result).to.deep.equal 'anything'
| 108596 | ReturnValue = require 'nanocyte-component-return-value'
NotEqual = require '../src/not-equal'
describe 'NotEqual', ->
beforeEach ->
@sut = new NotEqual
it 'should exist', ->
expect(@sut).to.be.an.instanceOf ReturnValue
describe '->onEnvelope', ->
describe 'when called with boolean true', ->
beforeEach ->
@result = @sut.onEnvelope config: {left: true, right: true}, message: 'anything'
it 'should not return the message', ->
expect(@result).not.to.exist
describe 'when called with boolean true and false', ->
beforeEach ->
@result = @sut.onEnvelope config: {left: true, right: false}, message: 'anything'
it 'should not return the message', ->
expect(@result).to.deep.equal 'anything'
describe 'when called with an equal left and right', ->
beforeEach ->
@result = @sut.onEnvelope config: {left: 2, right: 2}, message: 'anything'
it 'should not return the message', ->
expect(@result).not.to.exist
describe 'when called with an inequal left and right', ->
beforeEach ->
@result = @sut.onEnvelope config: {left: 1, right: 2}, message: 'anything'
it 'should return the message', ->
expect(@result).to.deep.equal 'anything'
describe 'when left and right are gregorically equal', ->
beforeEach ->
@result = @sut.onEnvelope config: {left: 'Jan 1, 2012', right: '2012-01-01'}, message: 'anything'
it 'should not return the message', ->
expect(@result).not.to.exist
describe 'when left and right are gregorically unequal', ->
beforeEach ->
@result = @sut.onEnvelope config: {left: 'Jan 2, 2012', right: '2012-01-01'}, message: 'anything'
it 'should return the message', ->
expect(@result).to.deep.equal 'anything'
describe 'when left and right are numerically equal', ->
beforeEach ->
@result = @sut.onEnvelope config: {left: '1', right: '1.0'}, message: 'anything'
it 'should not return the message', ->
expect(@result).not.to.exist
describe 'when left and right are numerically nonequal', ->
beforeEach ->
@result = @sut.onEnvelope config: {left: '1', right: '1.5'}, message: 'anything'
it 'should return the message', ->
expect(@result).to.deep.equal 'anything'
describe 'when left and right are objectorially equal', ->
beforeEach ->
@result = @sut.onEnvelope config: {left: {name: '<NAME>', height: '5 feet tall'}, right: { name: '<NAME>', height: '5 feet tall'}}, message: 'anything'
it 'should not return the message', ->
expect(@result).not.to.exist
describe 'when left and right are objectorially nonequal', ->
beforeEach ->
@result = @sut.onEnvelope config: {left: {name: '<NAME>', height: '11 feet tall'}, right: { name: '<NAME>', height: '5 feet tall'}}, message: 'anything'
it 'should return the message', ->
expect(@result).to.deep.equal 'anything'
| true | ReturnValue = require 'nanocyte-component-return-value'
NotEqual = require '../src/not-equal'
describe 'NotEqual', ->
beforeEach ->
@sut = new NotEqual
it 'should exist', ->
expect(@sut).to.be.an.instanceOf ReturnValue
describe '->onEnvelope', ->
describe 'when called with boolean true', ->
beforeEach ->
@result = @sut.onEnvelope config: {left: true, right: true}, message: 'anything'
it 'should not return the message', ->
expect(@result).not.to.exist
describe 'when called with boolean true and false', ->
beforeEach ->
@result = @sut.onEnvelope config: {left: true, right: false}, message: 'anything'
it 'should not return the message', ->
expect(@result).to.deep.equal 'anything'
describe 'when called with an equal left and right', ->
beforeEach ->
@result = @sut.onEnvelope config: {left: 2, right: 2}, message: 'anything'
it 'should not return the message', ->
expect(@result).not.to.exist
describe 'when called with an inequal left and right', ->
beforeEach ->
@result = @sut.onEnvelope config: {left: 1, right: 2}, message: 'anything'
it 'should return the message', ->
expect(@result).to.deep.equal 'anything'
describe 'when left and right are gregorically equal', ->
beforeEach ->
@result = @sut.onEnvelope config: {left: 'Jan 1, 2012', right: '2012-01-01'}, message: 'anything'
it 'should not return the message', ->
expect(@result).not.to.exist
describe 'when left and right are gregorically unequal', ->
beforeEach ->
@result = @sut.onEnvelope config: {left: 'Jan 2, 2012', right: '2012-01-01'}, message: 'anything'
it 'should return the message', ->
expect(@result).to.deep.equal 'anything'
describe 'when left and right are numerically equal', ->
beforeEach ->
@result = @sut.onEnvelope config: {left: '1', right: '1.0'}, message: 'anything'
it 'should not return the message', ->
expect(@result).not.to.exist
describe 'when left and right are numerically nonequal', ->
beforeEach ->
@result = @sut.onEnvelope config: {left: '1', right: '1.5'}, message: 'anything'
it 'should return the message', ->
expect(@result).to.deep.equal 'anything'
describe 'when left and right are objectorially equal', ->
beforeEach ->
@result = @sut.onEnvelope config: {left: {name: 'PI:NAME:<NAME>END_PI', height: '5 feet tall'}, right: { name: 'PI:NAME:<NAME>END_PI', height: '5 feet tall'}}, message: 'anything'
it 'should not return the message', ->
expect(@result).not.to.exist
describe 'when left and right are objectorially nonequal', ->
beforeEach ->
@result = @sut.onEnvelope config: {left: {name: 'PI:NAME:<NAME>END_PI', height: '11 feet tall'}, right: { name: 'PI:NAME:<NAME>END_PI', height: '5 feet tall'}}, message: 'anything'
it 'should return the message', ->
expect(@result).to.deep.equal 'anything'
|
[
{
"context": "##*\n# server-checker : lib/checkers/http\n# Author: MrKMG (https://github.com/mrkmg)\n# Contributor: Sven Sl",
"end": 57,
"score": 0.9996359348297119,
"start": 52,
"tag": "USERNAME",
"value": "MrKMG"
},
{
"context": "checkers/http\n# Author: MrKMG (https://github.com/m... | src/lib/plugins/http.coffee | mrkmg/service-checker | 12 | ###*
# server-checker : lib/checkers/http
# Author: MrKMG (https://github.com/mrkmg)
# Contributor: Sven Slootweg (joepie91) (http://cryto.net/~joepie91/)
#
# MIT License
###
Promise = require 'bluebird'
http_node = require 'http'
https_node = require 'https'
_ = require 'underscore'
run = (options, ssl) ->
Promise
.try ->
makeRequest options, ssl
.then (request) ->
runRequest(options, request)
.then(checkResponse)
.catch _.identity
makeRequest = (options, ssl) ->
_.defaults options,
host: 'localhost'
port: if ssl then 443 else 80
method: 'GET'
path: '/'
strictSSL: false
rejectUnauthorized: true
ca: null
timeout: 5000
agent: false
handler = if ssl then https_node else http_node
handler.request _.pick options,
'host'
'port'
'method'
'path'
'strictSSL'
'rejectUnauthorized'
'ca'
runRequest = (options, request) ->
new Promise (resolve, reject) ->
cancelEvent = (event_name) ->
request.removeAllListeners event_name
request.on event_name, _.noop
cleanupRequest = ->
clearTimeout request_timeout
cancelEvent 'response'
cancelEvent 'error'
request.destroy()
doTimeout = ->
cleanupRequest()
err = new Error 'Request exceeded timeout of ' + options.timeout + 'ms'
err.code = 'TIMEOUT'
reject err
request.on 'response', (response) ->
cleanupRequest()
resolve response
request.on 'error', (err) ->
cleanupRequest()
reject err
request_timeout = setTimeout doTimeout, options.timeout
request.end()
checkResponse = (response) ->
if response.statusCode >= 400
error = new Error('HTTP Status code in error range: ' + response.statusCode)
error.code = 'HTTPSTATUSCODE'
throw error
null
module.exports =
http: (options) ->
run options, false
https: (options) ->
run options, true
| 194945 | ###*
# server-checker : lib/checkers/http
# Author: MrKMG (https://github.com/mrkmg)
# Contributor: <NAME> (joepie91) (http://cryto.net/~joepie91/)
#
# MIT License
###
Promise = require 'bluebird'
http_node = require 'http'
https_node = require 'https'
_ = require 'underscore'
run = (options, ssl) ->
Promise
.try ->
makeRequest options, ssl
.then (request) ->
runRequest(options, request)
.then(checkResponse)
.catch _.identity
makeRequest = (options, ssl) ->
_.defaults options,
host: 'localhost'
port: if ssl then 443 else 80
method: 'GET'
path: '/'
strictSSL: false
rejectUnauthorized: true
ca: null
timeout: 5000
agent: false
handler = if ssl then https_node else http_node
handler.request _.pick options,
'host'
'port'
'method'
'path'
'strictSSL'
'rejectUnauthorized'
'ca'
runRequest = (options, request) ->
new Promise (resolve, reject) ->
cancelEvent = (event_name) ->
request.removeAllListeners event_name
request.on event_name, _.noop
cleanupRequest = ->
clearTimeout request_timeout
cancelEvent 'response'
cancelEvent 'error'
request.destroy()
doTimeout = ->
cleanupRequest()
err = new Error 'Request exceeded timeout of ' + options.timeout + 'ms'
err.code = 'TIMEOUT'
reject err
request.on 'response', (response) ->
cleanupRequest()
resolve response
request.on 'error', (err) ->
cleanupRequest()
reject err
request_timeout = setTimeout doTimeout, options.timeout
request.end()
checkResponse = (response) ->
if response.statusCode >= 400
error = new Error('HTTP Status code in error range: ' + response.statusCode)
error.code = 'HTTPSTATUSCODE'
throw error
null
module.exports =
http: (options) ->
run options, false
https: (options) ->
run options, true
| true | ###*
# server-checker : lib/checkers/http
# Author: MrKMG (https://github.com/mrkmg)
# Contributor: PI:NAME:<NAME>END_PI (joepie91) (http://cryto.net/~joepie91/)
#
# MIT License
###
Promise = require 'bluebird'
http_node = require 'http'
https_node = require 'https'
_ = require 'underscore'
run = (options, ssl) ->
Promise
.try ->
makeRequest options, ssl
.then (request) ->
runRequest(options, request)
.then(checkResponse)
.catch _.identity
makeRequest = (options, ssl) ->
_.defaults options,
host: 'localhost'
port: if ssl then 443 else 80
method: 'GET'
path: '/'
strictSSL: false
rejectUnauthorized: true
ca: null
timeout: 5000
agent: false
handler = if ssl then https_node else http_node
handler.request _.pick options,
'host'
'port'
'method'
'path'
'strictSSL'
'rejectUnauthorized'
'ca'
runRequest = (options, request) ->
new Promise (resolve, reject) ->
cancelEvent = (event_name) ->
request.removeAllListeners event_name
request.on event_name, _.noop
cleanupRequest = ->
clearTimeout request_timeout
cancelEvent 'response'
cancelEvent 'error'
request.destroy()
doTimeout = ->
cleanupRequest()
err = new Error 'Request exceeded timeout of ' + options.timeout + 'ms'
err.code = 'TIMEOUT'
reject err
request.on 'response', (response) ->
cleanupRequest()
resolve response
request.on 'error', (err) ->
cleanupRequest()
reject err
request_timeout = setTimeout doTimeout, options.timeout
request.end()
checkResponse = (response) ->
if response.statusCode >= 400
error = new Error('HTTP Status code in error range: ' + response.statusCode)
error.code = 'HTTPSTATUSCODE'
throw error
null
module.exports =
http: (options) ->
run options, false
https: (options) ->
run options, true
|
[
{
"context": "ileoverview Tests for no-ex-assign rule.\n# @author Stephen Murray <spmurrayzzz>\n###\n\n'use strict'\n\n#---------------",
"end": 74,
"score": 0.9998013377189636,
"start": 60,
"tag": "NAME",
"value": "Stephen Murray"
},
{
"context": " for no-ex-assign rule.\n# @author ... | src/tests/rules/no-ex-assign.coffee | danielbayley/eslint-plugin-coffee | 21 | ###*
# @fileoverview Tests for no-ex-assign rule.
# @author Stephen Murray <spmurrayzzz>
###
'use strict'
#------------------------------------------------------------------------------
# Requirements
#------------------------------------------------------------------------------
rule = require 'eslint/lib/rules/no-ex-assign'
{RuleTester} = require 'eslint'
path = require 'path'
#------------------------------------------------------------------------------
# Tests
#------------------------------------------------------------------------------
ruleTester = new RuleTester parser: path.join __dirname, '../../..'
ruleTester.run 'no-ex-assign', rule,
valid: [
'''
try
catch e
three = 2 + 1
'''
'''
try
catch {e}
@something = 2
'''
'''
->
try
catch e
return no
'''
]
invalid: [
code: '''
try
catch e
e = 10
'''
errors: [messageId: 'unexpected', type: 'Identifier']
,
code: '''
try
catch ex
ex = 10
'''
errors: [messageId: 'unexpected', type: 'Identifier']
,
code: '''
try
catch ex
[ex] = []
'''
errors: [messageId: 'unexpected', type: 'Identifier']
,
code: '''
try
catch ex
{x: ex = 0} = {}
'''
errors: [messageId: 'unexpected', type: 'Identifier']
,
code: '''
try
catch {message}
message = 10
'''
errors: [messageId: 'unexpected', type: 'Identifier']
]
| 132689 | ###*
# @fileoverview Tests for no-ex-assign rule.
# @author <NAME> <spmurrayzzz>
###
'use strict'
#------------------------------------------------------------------------------
# Requirements
#------------------------------------------------------------------------------
rule = require 'eslint/lib/rules/no-ex-assign'
{RuleTester} = require 'eslint'
path = require 'path'
#------------------------------------------------------------------------------
# Tests
#------------------------------------------------------------------------------
ruleTester = new RuleTester parser: path.join __dirname, '../../..'
ruleTester.run 'no-ex-assign', rule,
valid: [
'''
try
catch e
three = 2 + 1
'''
'''
try
catch {e}
@something = 2
'''
'''
->
try
catch e
return no
'''
]
invalid: [
code: '''
try
catch e
e = 10
'''
errors: [messageId: 'unexpected', type: 'Identifier']
,
code: '''
try
catch ex
ex = 10
'''
errors: [messageId: 'unexpected', type: 'Identifier']
,
code: '''
try
catch ex
[ex] = []
'''
errors: [messageId: 'unexpected', type: 'Identifier']
,
code: '''
try
catch ex
{x: ex = 0} = {}
'''
errors: [messageId: 'unexpected', type: 'Identifier']
,
code: '''
try
catch {message}
message = 10
'''
errors: [messageId: 'unexpected', type: 'Identifier']
]
| true | ###*
# @fileoverview Tests for no-ex-assign rule.
# @author PI:NAME:<NAME>END_PI <spmurrayzzz>
###
'use strict'
#------------------------------------------------------------------------------
# Requirements
#------------------------------------------------------------------------------
rule = require 'eslint/lib/rules/no-ex-assign'
{RuleTester} = require 'eslint'
path = require 'path'
#------------------------------------------------------------------------------
# Tests
#------------------------------------------------------------------------------
ruleTester = new RuleTester parser: path.join __dirname, '../../..'
ruleTester.run 'no-ex-assign', rule,
valid: [
'''
try
catch e
three = 2 + 1
'''
'''
try
catch {e}
@something = 2
'''
'''
->
try
catch e
return no
'''
]
invalid: [
code: '''
try
catch e
e = 10
'''
errors: [messageId: 'unexpected', type: 'Identifier']
,
code: '''
try
catch ex
ex = 10
'''
errors: [messageId: 'unexpected', type: 'Identifier']
,
code: '''
try
catch ex
[ex] = []
'''
errors: [messageId: 'unexpected', type: 'Identifier']
,
code: '''
try
catch ex
{x: ex = 0} = {}
'''
errors: [messageId: 'unexpected', type: 'Identifier']
,
code: '''
try
catch {message}
message = 10
'''
errors: [messageId: 'unexpected', type: 'Identifier']
]
|
[
{
"context": "#\n# Mines main file\n#\n# Copyright (C) 2012 Nikolay Nemshilov\n#\n\n# hook up dependencies\ncore = require('core",
"end": 60,
"score": 0.9998840093612671,
"start": 43,
"tag": "NAME",
"value": "Nikolay Nemshilov"
}
] | E006/main.coffee | lovely-io/lovely.io-show | 1 | #
# Mines main file
#
# Copyright (C) 2012 Nikolay Nemshilov
#
# hook up dependencies
core = require('core')
$ = require('dom')
Timer = require('timer')
# local variables assignments
ext = core.ext
Class = core.Class
Element = $.Element
# glue in your files
include 'src/game'
include 'src/smile'
include 'src/stats'
include 'src/field'
include 'src/cell'
exports = ext Game,
version: '%{version}'
| 15097 | #
# Mines main file
#
# Copyright (C) 2012 <NAME>
#
# hook up dependencies
core = require('core')
$ = require('dom')
Timer = require('timer')
# local variables assignments
ext = core.ext
Class = core.Class
Element = $.Element
# glue in your files
include 'src/game'
include 'src/smile'
include 'src/stats'
include 'src/field'
include 'src/cell'
exports = ext Game,
version: '%{version}'
| true | #
# Mines main file
#
# Copyright (C) 2012 PI:NAME:<NAME>END_PI
#
# hook up dependencies
core = require('core')
$ = require('dom')
Timer = require('timer')
# local variables assignments
ext = core.ext
Class = core.Class
Element = $.Element
# glue in your files
include 'src/game'
include 'src/smile'
include 'src/stats'
include 'src/field'
include 'src/cell'
exports = ext Game,
version: '%{version}'
|
[
{
"context": " expect(names.normalize('HURD MARK V')).toBe 'Mark V. Hurd'\n",
"end": 176,
"score": 0.9997982382774353,
"start": 164,
"tag": "NAME",
"value": "Mark V. Hurd"
}
] | spec/names-spec.coffee | kevinsawicki/filings | 13 | names = require '../lib/names'
describe 'names', ->
describe '.normalize()', ->
it 'normalizes names', ->
expect(names.normalize('HURD MARK V')).toBe 'Mark V. Hurd'
| 173149 | names = require '../lib/names'
describe 'names', ->
describe '.normalize()', ->
it 'normalizes names', ->
expect(names.normalize('HURD MARK V')).toBe '<NAME>'
| true | names = require '../lib/names'
describe 'names', ->
describe '.normalize()', ->
it 'normalizes names', ->
expect(names.normalize('HURD MARK V')).toBe 'PI:NAME:<NAME>END_PI'
|
[
{
"context": "key: 'block-callout'\n\npatterns: [\n\n # Matches a callout reference ",
"end": 19,
"score": 0.9659743309020996,
"start": 6,
"tag": "KEY",
"value": "block-callout"
}
] | grammars/repositories/partials/block-callout-grammar.cson | andrewcarver/atom-language-asciidoc | 45 | key: 'block-callout'
patterns: [
# Matches a callout reference inside literal text.
#
# Examples
# <1> (optionally prefixed by //, #, -- or ;; line comment chars)
# <1> <2> (multiple callouts on one line)
# <!--1--> (for XML-based languages)
#
name: 'callout.source.code.asciidoc'
match: '(?:(?:\\/\\/|#|--|;;) ?)?( )?(?<!\\\\)(<)!?(--|)(\\d+)\\3(>)(?=(?: ?<!?\\3\\d+\\3>)*$)'
captures:
2: name: 'constant.other.symbol.asciidoc'
4: name: 'constant.numeric.asciidoc'
5: name: 'constant.other.symbol.asciidoc'
]
| 143920 | key: '<KEY>'
patterns: [
# Matches a callout reference inside literal text.
#
# Examples
# <1> (optionally prefixed by //, #, -- or ;; line comment chars)
# <1> <2> (multiple callouts on one line)
# <!--1--> (for XML-based languages)
#
name: 'callout.source.code.asciidoc'
match: '(?:(?:\\/\\/|#|--|;;) ?)?( )?(?<!\\\\)(<)!?(--|)(\\d+)\\3(>)(?=(?: ?<!?\\3\\d+\\3>)*$)'
captures:
2: name: 'constant.other.symbol.asciidoc'
4: name: 'constant.numeric.asciidoc'
5: name: 'constant.other.symbol.asciidoc'
]
| true | key: 'PI:KEY:<KEY>END_PI'
patterns: [
# Matches a callout reference inside literal text.
#
# Examples
# <1> (optionally prefixed by //, #, -- or ;; line comment chars)
# <1> <2> (multiple callouts on one line)
# <!--1--> (for XML-based languages)
#
name: 'callout.source.code.asciidoc'
match: '(?:(?:\\/\\/|#|--|;;) ?)?( )?(?<!\\\\)(<)!?(--|)(\\d+)\\3(>)(?=(?: ?<!?\\3\\d+\\3>)*$)'
captures:
2: name: 'constant.other.symbol.asciidoc'
4: name: 'constant.numeric.asciidoc'
5: name: 'constant.other.symbol.asciidoc'
]
|
[
{
"context": "#\n# Name : wow\n# Author : Matthieu Aussaguel, http://mynameismatthieu.com/, @mattaussaguel\n# V",
"end": 48,
"score": 0.9998914003372192,
"start": 30,
"tag": "NAME",
"value": "Matthieu Aussaguel"
},
{
"context": " Matthieu Aussaguel, http://mynameismatthieu.com/, ... | static/stil/app/bower_components/WOW/src/wow.coffee | Infinityloopsistemas/SIVA | 1 | #
# Name : wow
# Author : Matthieu Aussaguel, http://mynameismatthieu.com/, @mattaussaguel
# Version : 0.1.3
# Repo : https://github.com/matthieua/WOW
# Website : http://mynameismatthieu.com/wow
#
extend = (object, args...) ->
result = object or {}
for replacement in args
for key, value of replacement or {}
if typeof result[key] is "object"
result[key] = extend(result[key], value)
else
result[key] ||= value
result
class @WOW
defaults:
boxClass: 'wow'
animateClass: 'animated'
offset: 0
constructor: (options = {}) ->
@config = extend(options, @defaults)
@scrolled = true
# set initial config
init: ->
if document.readyState is "complete"
@start()
else
document.addEventListener 'DOMContentLoaded', @start
start: =>
@element = window.document.documentElement
@boxes = @element.getElementsByClassName(@config.boxClass)
if @boxes.length
@applyStyle(box, true) for box in @boxes
window.addEventListener('scroll', @scrollHandler, false)
window.addEventListener('resize', @scrollHandler, false)
@interval = setInterval @scrollCallback, 50
# unbind the scroll event
stop: ->
window.removeEventListener('scroll', @scrollHandler, false)
window.removeEventListener('resize', @scrollHandler, false)
clearInterval @interval if @interval?
# show box element
show: (box) ->
@applyStyle(box)
box.className = "#{box.className} #{@config.animateClass}"
applyStyle: (box, hidden) ->
duration = box.getAttribute('data-wow-duration')
delay = box.getAttribute('data-wow-delay')
iteration = box.getAttribute('data-wow-iteration')
box.setAttribute 'style', @customStyle(hidden, duration, delay, iteration)
customStyle: (hidden, duration, delay, iteration) ->
style = if hidden then "
visibility: hidden;
-webkit-animation-name: none;
-moz-animation-name: none;
animation-name: none;
" else "
visibility: visible;
"
style += "
-webkit-animation-duration: #{duration};
-moz-animation-duration: #{duration};
animation-duration: #{duration};
" if duration
style += "
-webkit-animation-delay: #{delay};
-moz-animation-delay: #{delay};
animation-delay: #{delay};
" if delay
style += "
-webkit-animation-iteration-count: #{iteration};
-moz-animation-iteration-count: #{iteration};
animation-iteration-count: #{iteration};
" if iteration
style
# fast window.scroll callback
scrollHandler: =>
@scrolled = true
scrollCallback: =>
if @scrolled
@scrolled = false
@boxes = for box in @boxes when box
if @isVisible(box)
@show(box)
continue
box
@stop() unless @boxes.length
# Calculate element offset top
offsetTop: (element) ->
top = element.offsetTop
top += element.offsetTop while element = element.offsetParent
top
# check if box is visible
isVisible: (box) ->
offset = box.getAttribute('data-wow-offset') or @config.offset
viewTop = window.pageYOffset
viewBottom = viewTop + @element.clientHeight - offset
top = @offsetTop(box)
bottom = top + box.clientHeight
top <= viewBottom and bottom >= viewTop
| 21757 | #
# Name : wow
# Author : <NAME>, http://mynameismatthieu.com/, @mattaussaguel
# Version : 0.1.3
# Repo : https://github.com/matthieua/WOW
# Website : http://mynameismatthieu.com/wow
#
extend = (object, args...) ->
result = object or {}
for replacement in args
for key, value of replacement or {}
if typeof result[key] is "object"
result[key] = extend(result[key], value)
else
result[key] ||= value
result
class @WOW
defaults:
boxClass: 'wow'
animateClass: 'animated'
offset: 0
constructor: (options = {}) ->
@config = extend(options, @defaults)
@scrolled = true
# set initial config
init: ->
if document.readyState is "complete"
@start()
else
document.addEventListener 'DOMContentLoaded', @start
start: =>
@element = window.document.documentElement
@boxes = @element.getElementsByClassName(@config.boxClass)
if @boxes.length
@applyStyle(box, true) for box in @boxes
window.addEventListener('scroll', @scrollHandler, false)
window.addEventListener('resize', @scrollHandler, false)
@interval = setInterval @scrollCallback, 50
# unbind the scroll event
stop: ->
window.removeEventListener('scroll', @scrollHandler, false)
window.removeEventListener('resize', @scrollHandler, false)
clearInterval @interval if @interval?
# show box element
show: (box) ->
@applyStyle(box)
box.className = "#{box.className} #{@config.animateClass}"
applyStyle: (box, hidden) ->
duration = box.getAttribute('data-wow-duration')
delay = box.getAttribute('data-wow-delay')
iteration = box.getAttribute('data-wow-iteration')
box.setAttribute 'style', @customStyle(hidden, duration, delay, iteration)
customStyle: (hidden, duration, delay, iteration) ->
style = if hidden then "
visibility: hidden;
-webkit-animation-name: none;
-moz-animation-name: none;
animation-name: none;
" else "
visibility: visible;
"
style += "
-webkit-animation-duration: #{duration};
-moz-animation-duration: #{duration};
animation-duration: #{duration};
" if duration
style += "
-webkit-animation-delay: #{delay};
-moz-animation-delay: #{delay};
animation-delay: #{delay};
" if delay
style += "
-webkit-animation-iteration-count: #{iteration};
-moz-animation-iteration-count: #{iteration};
animation-iteration-count: #{iteration};
" if iteration
style
# fast window.scroll callback
scrollHandler: =>
@scrolled = true
scrollCallback: =>
if @scrolled
@scrolled = false
@boxes = for box in @boxes when box
if @isVisible(box)
@show(box)
continue
box
@stop() unless @boxes.length
# Calculate element offset top
offsetTop: (element) ->
top = element.offsetTop
top += element.offsetTop while element = element.offsetParent
top
# check if box is visible
isVisible: (box) ->
offset = box.getAttribute('data-wow-offset') or @config.offset
viewTop = window.pageYOffset
viewBottom = viewTop + @element.clientHeight - offset
top = @offsetTop(box)
bottom = top + box.clientHeight
top <= viewBottom and bottom >= viewTop
| true | #
# Name : wow
# Author : PI:NAME:<NAME>END_PI, http://mynameismatthieu.com/, @mattaussaguel
# Version : 0.1.3
# Repo : https://github.com/matthieua/WOW
# Website : http://mynameismatthieu.com/wow
#
extend = (object, args...) ->
result = object or {}
for replacement in args
for key, value of replacement or {}
if typeof result[key] is "object"
result[key] = extend(result[key], value)
else
result[key] ||= value
result
class @WOW
defaults:
boxClass: 'wow'
animateClass: 'animated'
offset: 0
constructor: (options = {}) ->
@config = extend(options, @defaults)
@scrolled = true
# set initial config
init: ->
if document.readyState is "complete"
@start()
else
document.addEventListener 'DOMContentLoaded', @start
start: =>
@element = window.document.documentElement
@boxes = @element.getElementsByClassName(@config.boxClass)
if @boxes.length
@applyStyle(box, true) for box in @boxes
window.addEventListener('scroll', @scrollHandler, false)
window.addEventListener('resize', @scrollHandler, false)
@interval = setInterval @scrollCallback, 50
# unbind the scroll event
stop: ->
window.removeEventListener('scroll', @scrollHandler, false)
window.removeEventListener('resize', @scrollHandler, false)
clearInterval @interval if @interval?
# show box element
show: (box) ->
@applyStyle(box)
box.className = "#{box.className} #{@config.animateClass}"
applyStyle: (box, hidden) ->
duration = box.getAttribute('data-wow-duration')
delay = box.getAttribute('data-wow-delay')
iteration = box.getAttribute('data-wow-iteration')
box.setAttribute 'style', @customStyle(hidden, duration, delay, iteration)
customStyle: (hidden, duration, delay, iteration) ->
style = if hidden then "
visibility: hidden;
-webkit-animation-name: none;
-moz-animation-name: none;
animation-name: none;
" else "
visibility: visible;
"
style += "
-webkit-animation-duration: #{duration};
-moz-animation-duration: #{duration};
animation-duration: #{duration};
" if duration
style += "
-webkit-animation-delay: #{delay};
-moz-animation-delay: #{delay};
animation-delay: #{delay};
" if delay
style += "
-webkit-animation-iteration-count: #{iteration};
-moz-animation-iteration-count: #{iteration};
animation-iteration-count: #{iteration};
" if iteration
style
# fast window.scroll callback
scrollHandler: =>
@scrolled = true
scrollCallback: =>
if @scrolled
@scrolled = false
@boxes = for box in @boxes when box
if @isVisible(box)
@show(box)
continue
box
@stop() unless @boxes.length
# Calculate element offset top
offsetTop: (element) ->
top = element.offsetTop
top += element.offsetTop while element = element.offsetParent
top
# check if box is visible
isVisible: (box) ->
offset = box.getAttribute('data-wow-offset') or @config.offset
viewTop = window.pageYOffset
viewBottom = viewTop + @element.clientHeight - offset
top = @offsetTop(box)
bottom = top + box.clientHeight
top <= viewBottom and bottom >= viewTop
|
[
{
"context": "a Blitzcrank For sexual harassment\",\n \"I reported Rammus for refusing to communicate with the team\",\n ",
"end": 501,
"score": 0.6707136034965515,
"start": 498,
"tag": "NAME",
"value": "Ram"
},
{
"context": "ted Trundle for excessive trolling\",\n \"I reported Or... | scripts/today-lol.coffee | RiotGamesMinions/lefay | 7 | # LoL puns
#
# today in lol - get a LoL pun
#
today = [
"I reported Maokai for having a high amount of leaves",
"Caitlyn wouldn't let me farm in lane so I reported her for harassment",
"I reported a Cho'Gath For intentional feeding",
"I reported a Brand for excessive flaming",
"I reported Leona for being disruptive",
"I reported an Irelia for refusing to surrender",
"I reported a Tryndamere For Negative Attitude",
"I reported a Blitzcrank For sexual harassment",
"I reported Rammus for refusing to communicate with the team",
"I reported Trundle for excessive trolling",
"I reported Orianna because she kept dropping The Ball",
"Annie reported me. ᶘ ᵒᴥᵒᶅ",
"I reported our Sona for failure to communicate",
"I reported Lee Sin, for having no map awareness",
"I reported Lux for using colorful language",
"I reported Anivia's Wall for assisting enemy team",
"I reported Lee Sin because he didn't read the Summoner's Code",
"Nasus was a total ****. Spent all game barking orders",
"I reported a Gragas for drunk gaming",
"I reported a Shaco for decieving me",
"I reported the AFK Teemo for not being on duty",
"I reported their Soraka for Assisting the Enemy Team",
"I reported Skarner for bugging me",
"I reported Wukong - wouldn't stop monkeying around",
"I reported Cho'Gath. He kept screaming at me",
"I reported Twisted Fate. He stacked the deck in their favor.",
"Jarvan really held our team together.",
"Veigar tried, but he always came up a little short."
]
module.exports = (robot) ->
robot.respond /today in lol/i, (msg) ->
msg.send "Today #{msg.random today}."
| 216622 | # LoL puns
#
# today in lol - get a LoL pun
#
today = [
"I reported Maokai for having a high amount of leaves",
"Caitlyn wouldn't let me farm in lane so I reported her for harassment",
"I reported a Cho'Gath For intentional feeding",
"I reported a Brand for excessive flaming",
"I reported Leona for being disruptive",
"I reported an Irelia for refusing to surrender",
"I reported a Tryndamere For Negative Attitude",
"I reported a Blitzcrank For sexual harassment",
"I reported <NAME>mus for refusing to communicate with the team",
"I reported Trundle for excessive trolling",
"I reported <NAME> because she kept dropping The Ball",
"<NAME> reported me. ᶘ ᵒᴥᵒᶅ",
"I reported our Sona for failure to communicate",
"I reported <NAME>, for having no map awareness",
"I reported Lux for using colorful language",
"I reported Anivia's Wall for assisting enemy team",
"I reported <NAME> because he didn't read the Summoner's Code",
"Nasus was a total ****. Spent all game barking orders",
"I reported a Gragas for drunk gaming",
"I reported a Shaco for decieving me",
"I reported the AFK Teemo for not being on duty",
"I reported their Soraka for Assisting the Enemy Team",
"I reported Skarner for bugging me",
"I reported Wukong - wouldn't stop monkeying around",
"I reported Cho'Gath. He kept screaming at me",
"I reported Twisted Fate. He stacked the deck in their favor.",
"<NAME> really held our team together.",
"Veigar tried, but he always came up a little short."
]
module.exports = (robot) ->
robot.respond /today in lol/i, (msg) ->
msg.send "Today #{msg.random today}."
| true | # LoL puns
#
# today in lol - get a LoL pun
#
today = [
"I reported Maokai for having a high amount of leaves",
"Caitlyn wouldn't let me farm in lane so I reported her for harassment",
"I reported a Cho'Gath For intentional feeding",
"I reported a Brand for excessive flaming",
"I reported Leona for being disruptive",
"I reported an Irelia for refusing to surrender",
"I reported a Tryndamere For Negative Attitude",
"I reported a Blitzcrank For sexual harassment",
"I reported PI:NAME:<NAME>END_PImus for refusing to communicate with the team",
"I reported Trundle for excessive trolling",
"I reported PI:NAME:<NAME>END_PI because she kept dropping The Ball",
"PI:NAME:<NAME>END_PI reported me. ᶘ ᵒᴥᵒᶅ",
"I reported our Sona for failure to communicate",
"I reported PI:NAME:<NAME>END_PI, for having no map awareness",
"I reported Lux for using colorful language",
"I reported Anivia's Wall for assisting enemy team",
"I reported PI:NAME:<NAME>END_PI because he didn't read the Summoner's Code",
"Nasus was a total ****. Spent all game barking orders",
"I reported a Gragas for drunk gaming",
"I reported a Shaco for decieving me",
"I reported the AFK Teemo for not being on duty",
"I reported their Soraka for Assisting the Enemy Team",
"I reported Skarner for bugging me",
"I reported Wukong - wouldn't stop monkeying around",
"I reported Cho'Gath. He kept screaming at me",
"I reported Twisted Fate. He stacked the deck in their favor.",
"PI:NAME:<NAME>END_PI really held our team together.",
"Veigar tried, but he always came up a little short."
]
module.exports = (robot) ->
robot.respond /today in lol/i, (msg) ->
msg.send "Today #{msg.random today}."
|
[
{
"context": "############\n##\n## Copyright 2018 M. Hoppe & N. Justus\n##\n## Licensed under the Apache Licen",
"end": 42,
"score": 0.9998449087142944,
"start": 34,
"tag": "NAME",
"value": "M. Hoppe"
},
{
"context": "############\n##\n## Copyright 2018 M. Hoppe & N. Justus\n##\n## L... | app/assets/javascripts/application.coffee | LiScI-Lab/Guardian-of-Times | 3 | ############
##
## Copyright 2018 M. Hoppe & N. Justus
##
## Licensed under the Apache License, Version 2.0 (the "License");
## you may not use this file except in compliance with the License.
## You may obtain a copy of the License at
##
## http://www.apache.org/licenses/LICENSE-2.0
##
## Unless required by applicable law or agreed to in writing, software
## distributed under the License is distributed on an "AS IS" BASIS,
## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
## See the License for the specific language governing permissions and
## limitations under the License.
##
############
#= require rails-ujs
#= require turbolinks
#= require jquery
#= require materialize
#= require cocoon
#
#= require i18n
#= require i18n/translations
#= require moment
#= require Chart.bundle
#= require chartkick
#
#= require_tree .
window.timetracker || (window.timetracker = {})
timetracker.app = {}
timetracker.app.init = () ->
console.log('Application init')
timetracker.app.materialize()
elem = document.querySelector('.collapsible.expandable')
instance = M.Collapsible.init(elem, {
accordion: false
})
return
timetracker.app.cocoonize = (elem) ->
$(elem).on 'cocoon:after-remove', (e, elem) ->
if $(elem.context).siblings "[id$=discarded_at]"
$(elem.context).siblings("[id$=_destroy]").val(false)
$(elem.context).siblings("[id$=discarded_at]").val(new Date(Date.now()))
return
$(elem).on 'cocoon:after-insert', (ev, elem) ->
console.log(elem)
timetracker.app.materialize(elem)
return
timetracker.app.materialize = (elem) ->
elem || (elem = $('body'))
M.AutoInit(elem[0])
$(".dropdown-trigger.no-autoinit", elem).dropdown
constrainWidth: false
autoTrigger: true
coverTrigger: false
sidenavs = $('.sidenav.no-autoinit')
sidenavs.sidenav()
sidenavs.each (_, nav) ->
id = $('.sidenav.no-autoinit').attr('id')
$(".sidenav-trigger[data-target=\"#{id}\"").click () ->
$(nav).sidenav('open')
return
return
$('input[type="text"].character-count', elem).not('.date,.time,.datetime,.select-dropdown').characterCounter()
$('textarea', elem).characterCounter()
$('.modal', elem).modal()
$('.datepicker.no-autoinit', elem).not('.manual').each (_, e) ->
e = $(e)
e.datepicker({
setDefaultDate: true
autoClose: true
format: I18n.t('js.date.picker.format')
defaultDate: new Date(moment(e.val(), I18n.t('js.date.formats.default')).format())
setDefaultDate: true
yearRange: 100
showClearBtn: e.hasClass('optional')
i18n: {
cancel: I18n.t('js.picker.action.cancel')
clear: I18n.t('js.picker.action.clear')
done: I18n.t('js.picker.action.done')
months: I18n.t('date.month_names')[1..12]
monthsShort: I18n.t('date.abbr_month_names')[1..12]
weekdays: I18n.t('date.day_names')
weekdaysShort: I18n.t('date.abbr_day_names')
weekdaysAbbrev: I18n.t('date.abbr_day_names')
}
})
return
$('.timepicker', elem).not('.manual').each (_, e) ->
e = $(e)
e.timepicker({
autoClose: true
twelveHour: I18n.t('js.time.twelve_hour')
showClearBtn: e.hasClass('optional')
i18n: {
cancel: I18n.t('js.picker.action.cancel')
clear: I18n.t('js.picker.action.clear')
done: I18n.t('js.picker.action.done')
}
})
$('input.boolean.tooltipped,input.date.tooltipped', elem).each (_, e) ->
e = $(e)
parent = e.parents('.boolean,.date')
data = e.data()
data['html'] = data['tooltip']
M.Tooltip.init(parent,data)
i = M.Tooltip.getInstance(e)
if i isnt undefined
M.Tooltip.getInstance(e).destroy()
M.updateTextFields()
return
timetracker.app.init_chips = (elem, tags, autocomplete_tags) ->
data = []
if tags.length > 0
data = tags.map((v) -> {tag: v})
if elem[0].hasAttribute("for")
target = elem.siblings("##{elem.attr('for')}")
updateTarget = () ->
chips = M.Chips.getInstance(elem).chipsData
str = ''
chips.forEach (chip) ->
if str != ''
str = "#{str}, "
str = "#{str}#{chip.tag}"
target.val(str)
return
autocomplete_data = {}
autocomplete_tags.forEach (tag) ->
autocomplete_data[tag] = null
return
elem.chips
data: data
autocompleteOptions:
data: autocomplete_data
limit: Infinity
minLength: 1
onChipAdd: updateTarget
onChipDelete: updateTarget
return
$(document).on 'turbolinks:load', timetracker.app.init
jQuery.fn.changeTag = (newTag) ->
q = this
@each (i, el) ->
h = '<' + el.outerHTML.replace(/(^<\w+|\w+>$)/g, newTag) + '>'
try
el.outerHTML = h
catch e
#elem not in dom
q[i] = jQuery(h)[0]
return
this
| 69860 | ############
##
## Copyright 2018 <NAME> & <NAME>
##
## Licensed under the Apache License, Version 2.0 (the "License");
## you may not use this file except in compliance with the License.
## You may obtain a copy of the License at
##
## http://www.apache.org/licenses/LICENSE-2.0
##
## Unless required by applicable law or agreed to in writing, software
## distributed under the License is distributed on an "AS IS" BASIS,
## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
## See the License for the specific language governing permissions and
## limitations under the License.
##
############
#= require rails-ujs
#= require turbolinks
#= require jquery
#= require materialize
#= require cocoon
#
#= require i18n
#= require i18n/translations
#= require moment
#= require Chart.bundle
#= require chartkick
#
#= require_tree .
window.timetracker || (window.timetracker = {})
timetracker.app = {}
timetracker.app.init = () ->
console.log('Application init')
timetracker.app.materialize()
elem = document.querySelector('.collapsible.expandable')
instance = M.Collapsible.init(elem, {
accordion: false
})
return
timetracker.app.cocoonize = (elem) ->
$(elem).on 'cocoon:after-remove', (e, elem) ->
if $(elem.context).siblings "[id$=discarded_at]"
$(elem.context).siblings("[id$=_destroy]").val(false)
$(elem.context).siblings("[id$=discarded_at]").val(new Date(Date.now()))
return
$(elem).on 'cocoon:after-insert', (ev, elem) ->
console.log(elem)
timetracker.app.materialize(elem)
return
timetracker.app.materialize = (elem) ->
elem || (elem = $('body'))
M.AutoInit(elem[0])
$(".dropdown-trigger.no-autoinit", elem).dropdown
constrainWidth: false
autoTrigger: true
coverTrigger: false
sidenavs = $('.sidenav.no-autoinit')
sidenavs.sidenav()
sidenavs.each (_, nav) ->
id = $('.sidenav.no-autoinit').attr('id')
$(".sidenav-trigger[data-target=\"#{id}\"").click () ->
$(nav).sidenav('open')
return
return
$('input[type="text"].character-count', elem).not('.date,.time,.datetime,.select-dropdown').characterCounter()
$('textarea', elem).characterCounter()
$('.modal', elem).modal()
$('.datepicker.no-autoinit', elem).not('.manual').each (_, e) ->
e = $(e)
e.datepicker({
setDefaultDate: true
autoClose: true
format: I18n.t('js.date.picker.format')
defaultDate: new Date(moment(e.val(), I18n.t('js.date.formats.default')).format())
setDefaultDate: true
yearRange: 100
showClearBtn: e.hasClass('optional')
i18n: {
cancel: I18n.t('js.picker.action.cancel')
clear: I18n.t('js.picker.action.clear')
done: I18n.t('js.picker.action.done')
months: I18n.t('date.month_names')[1..12]
monthsShort: I18n.t('date.abbr_month_names')[1..12]
weekdays: I18n.t('date.day_names')
weekdaysShort: I18n.t('date.abbr_day_names')
weekdaysAbbrev: I18n.t('date.abbr_day_names')
}
})
return
$('.timepicker', elem).not('.manual').each (_, e) ->
e = $(e)
e.timepicker({
autoClose: true
twelveHour: I18n.t('js.time.twelve_hour')
showClearBtn: e.hasClass('optional')
i18n: {
cancel: I18n.t('js.picker.action.cancel')
clear: I18n.t('js.picker.action.clear')
done: I18n.t('js.picker.action.done')
}
})
$('input.boolean.tooltipped,input.date.tooltipped', elem).each (_, e) ->
e = $(e)
parent = e.parents('.boolean,.date')
data = e.data()
data['html'] = data['tooltip']
M.Tooltip.init(parent,data)
i = M.Tooltip.getInstance(e)
if i isnt undefined
M.Tooltip.getInstance(e).destroy()
M.updateTextFields()
return
timetracker.app.init_chips = (elem, tags, autocomplete_tags) ->
data = []
if tags.length > 0
data = tags.map((v) -> {tag: v})
if elem[0].hasAttribute("for")
target = elem.siblings("##{elem.attr('for')}")
updateTarget = () ->
chips = M.Chips.getInstance(elem).chipsData
str = ''
chips.forEach (chip) ->
if str != ''
str = "#{str}, "
str = "#{str}#{chip.tag}"
target.val(str)
return
autocomplete_data = {}
autocomplete_tags.forEach (tag) ->
autocomplete_data[tag] = null
return
elem.chips
data: data
autocompleteOptions:
data: autocomplete_data
limit: Infinity
minLength: 1
onChipAdd: updateTarget
onChipDelete: updateTarget
return
$(document).on 'turbolinks:load', timetracker.app.init
jQuery.fn.changeTag = (newTag) ->
q = this
@each (i, el) ->
h = '<' + el.outerHTML.replace(/(^<\w+|\w+>$)/g, newTag) + '>'
try
el.outerHTML = h
catch e
#elem not in dom
q[i] = jQuery(h)[0]
return
this
| true | ############
##
## Copyright 2018 PI:NAME:<NAME>END_PI & PI:NAME:<NAME>END_PI
##
## Licensed under the Apache License, Version 2.0 (the "License");
## you may not use this file except in compliance with the License.
## You may obtain a copy of the License at
##
## http://www.apache.org/licenses/LICENSE-2.0
##
## Unless required by applicable law or agreed to in writing, software
## distributed under the License is distributed on an "AS IS" BASIS,
## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
## See the License for the specific language governing permissions and
## limitations under the License.
##
############
#= require rails-ujs
#= require turbolinks
#= require jquery
#= require materialize
#= require cocoon
#
#= require i18n
#= require i18n/translations
#= require moment
#= require Chart.bundle
#= require chartkick
#
#= require_tree .
window.timetracker || (window.timetracker = {})
timetracker.app = {}
timetracker.app.init = () ->
console.log('Application init')
timetracker.app.materialize()
elem = document.querySelector('.collapsible.expandable')
instance = M.Collapsible.init(elem, {
accordion: false
})
return
timetracker.app.cocoonize = (elem) ->
$(elem).on 'cocoon:after-remove', (e, elem) ->
if $(elem.context).siblings "[id$=discarded_at]"
$(elem.context).siblings("[id$=_destroy]").val(false)
$(elem.context).siblings("[id$=discarded_at]").val(new Date(Date.now()))
return
$(elem).on 'cocoon:after-insert', (ev, elem) ->
console.log(elem)
timetracker.app.materialize(elem)
return
timetracker.app.materialize = (elem) ->
elem || (elem = $('body'))
M.AutoInit(elem[0])
$(".dropdown-trigger.no-autoinit", elem).dropdown
constrainWidth: false
autoTrigger: true
coverTrigger: false
sidenavs = $('.sidenav.no-autoinit')
sidenavs.sidenav()
sidenavs.each (_, nav) ->
id = $('.sidenav.no-autoinit').attr('id')
$(".sidenav-trigger[data-target=\"#{id}\"").click () ->
$(nav).sidenav('open')
return
return
$('input[type="text"].character-count', elem).not('.date,.time,.datetime,.select-dropdown').characterCounter()
$('textarea', elem).characterCounter()
$('.modal', elem).modal()
$('.datepicker.no-autoinit', elem).not('.manual').each (_, e) ->
e = $(e)
e.datepicker({
setDefaultDate: true
autoClose: true
format: I18n.t('js.date.picker.format')
defaultDate: new Date(moment(e.val(), I18n.t('js.date.formats.default')).format())
setDefaultDate: true
yearRange: 100
showClearBtn: e.hasClass('optional')
i18n: {
cancel: I18n.t('js.picker.action.cancel')
clear: I18n.t('js.picker.action.clear')
done: I18n.t('js.picker.action.done')
months: I18n.t('date.month_names')[1..12]
monthsShort: I18n.t('date.abbr_month_names')[1..12]
weekdays: I18n.t('date.day_names')
weekdaysShort: I18n.t('date.abbr_day_names')
weekdaysAbbrev: I18n.t('date.abbr_day_names')
}
})
return
$('.timepicker', elem).not('.manual').each (_, e) ->
e = $(e)
e.timepicker({
autoClose: true
twelveHour: I18n.t('js.time.twelve_hour')
showClearBtn: e.hasClass('optional')
i18n: {
cancel: I18n.t('js.picker.action.cancel')
clear: I18n.t('js.picker.action.clear')
done: I18n.t('js.picker.action.done')
}
})
$('input.boolean.tooltipped,input.date.tooltipped', elem).each (_, e) ->
e = $(e)
parent = e.parents('.boolean,.date')
data = e.data()
data['html'] = data['tooltip']
M.Tooltip.init(parent,data)
i = M.Tooltip.getInstance(e)
if i isnt undefined
M.Tooltip.getInstance(e).destroy()
M.updateTextFields()
return
timetracker.app.init_chips = (elem, tags, autocomplete_tags) ->
data = []
if tags.length > 0
data = tags.map((v) -> {tag: v})
if elem[0].hasAttribute("for")
target = elem.siblings("##{elem.attr('for')}")
updateTarget = () ->
chips = M.Chips.getInstance(elem).chipsData
str = ''
chips.forEach (chip) ->
if str != ''
str = "#{str}, "
str = "#{str}#{chip.tag}"
target.val(str)
return
autocomplete_data = {}
autocomplete_tags.forEach (tag) ->
autocomplete_data[tag] = null
return
elem.chips
data: data
autocompleteOptions:
data: autocomplete_data
limit: Infinity
minLength: 1
onChipAdd: updateTarget
onChipDelete: updateTarget
return
$(document).on 'turbolinks:load', timetracker.app.init
jQuery.fn.changeTag = (newTag) ->
q = this
@each (i, el) ->
h = '<' + el.outerHTML.replace(/(^<\w+|\w+>$)/g, newTag) + '>'
try
el.outerHTML = h
catch e
#elem not in dom
q[i] = jQuery(h)[0]
return
this
|
[
{
"context": "# Copyright Joyent, Inc. and other Node contributors.\n#\n# Permission",
"end": 18,
"score": 0.9981219172477722,
"start": 12,
"tag": "NAME",
"value": "Joyent"
}
] | test/disabled/test-sendfd.coffee | lxe/io.coffee | 0 | # Copyright Joyent, Inc. and other Node contributors.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the
# following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
# NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
# USE OR OTHER DEALINGS IN THE SOFTWARE.
# Test sending and receiving a file descriptor.
#
# This test is pretty complex. It ends up spawning test/fixtures/recvfd.js
# as a child to test desired behavior. What happens is
#
# 1. Create an in-memory pipe via pipe(2). These two file descriptors
# are not visible to any other process, and so make a good test-case
# for sharing.
# 2. Create a a UNIX socket at SOCK_PATH. When a client connects to this
# path, they are sent the write end of the pipe from above.
# 3. The client is sent n JSON representations of the DATA variable, each
# with a different ordinal. We send these delimited by '\n' strings
# so that the receiving end can avoid any coalescing that happens
# due to the stream nature of the socket (e.g. '{}{}' is not a valid
# JSON string).
# 4. The child process receives file descriptors and JSON blobs and,
# whenever it has at least one of each, writes a modified JSON blob
# to the FD. The blob is modified to include the child's process ID.
# 5. Once the child process has sent n responses, it closes the write end
# of the pipe, which signals to the parent that there is no more data
# coming.
# 6. The parent listens to the read end of the pipe, accumulating JSON
# blobs (again, delimited by '\n') and verifying that a) the 'pid'
# attribute belongs to the child and b) the 'ord' field has not been
# seen in a response yet. This is intended to ensure that all blobs
# sent out have been relayed back to us.
common = require("../common")
assert = require("assert")
buffer = require("buffer")
child_process = require("child_process")
fs = require("fs")
net = require("net")
netBinding = process.binding("net")
path = require("path")
DATA =
ppid: process.pid
ord: 0
SOCK_PATH = path.join(__dirname, "..", path.basename(__filename, ".js") + ".sock")
logChild = (d) ->
d = d.toString() if typeof d is "object"
d.split("\n").forEach (l) ->
common.debug "CHILD: " + l if l.length > 0
return
return
# Create a pipe
#
# We establish a listener on the read end of the pipe so that we can
# validate any data sent back by the child. We send the write end of the
# pipe to the child and close it off in our process.
pipeFDs = netBinding.pipe()
assert.equal pipeFDs.length, 2
seenOrdinals = []
pipeReadStream = new net.Stream()
pipeReadStream.on "data", (data) ->
data.toString("utf8").trim().split("\n").forEach (d) ->
rd = JSON.parse(d)
assert.equal rd.pid, cpp
assert.equal seenOrdinals.indexOf(rd.ord), -1
seenOrdinals.unshift rd.ord
return
return
pipeReadStream.open pipeFDs[0]
pipeReadStream.resume()
# Create a UNIX socket at SOCK_PATH and send DATA and the write end
# of the pipe to whoever connects.
#
# We send two messages here, both with the same pipe FD: one string, and
# one buffer. We want to make sure that both datatypes are handled
# correctly.
srv = net.createServer((s) ->
str = JSON.stringify(DATA) + "\n"
DATA.ord = DATA.ord + 1
buf = new buffer.Buffer(str.length)
buf.write JSON.stringify(DATA) + "\n", "utf8"
s.write str, "utf8", pipeFDs[1]
if s.write(buf, pipeFDs[1])
netBinding.close pipeFDs[1]
else
s.on "drain", ->
netBinding.close pipeFDs[1]
return
return
)
srv.listen SOCK_PATH
# Spawn a child running test/fixtures/recvfd.js
cp = child_process.spawn(process.argv[0], [
path.join(common.fixturesDir, "recvfd.js")
SOCK_PATH
])
cp.stdout.on "data", logChild
cp.stderr.on "data", logChild
# When the child exits, clean up and validate its exit status
cpp = cp.pid
cp.on "exit", (code, signal) ->
srv.close()
# fs.unlinkSync(SOCK_PATH);
assert.equal code, 0
assert.equal seenOrdinals.length, 2
return
# vim:ts=2 sw=2 et
| 64703 | # Copyright <NAME>, Inc. and other Node contributors.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the
# following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
# NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
# USE OR OTHER DEALINGS IN THE SOFTWARE.
# Test sending and receiving a file descriptor.
#
# This test is pretty complex. It ends up spawning test/fixtures/recvfd.js
# as a child to test desired behavior. What happens is
#
# 1. Create an in-memory pipe via pipe(2). These two file descriptors
# are not visible to any other process, and so make a good test-case
# for sharing.
# 2. Create a a UNIX socket at SOCK_PATH. When a client connects to this
# path, they are sent the write end of the pipe from above.
# 3. The client is sent n JSON representations of the DATA variable, each
# with a different ordinal. We send these delimited by '\n' strings
# so that the receiving end can avoid any coalescing that happens
# due to the stream nature of the socket (e.g. '{}{}' is not a valid
# JSON string).
# 4. The child process receives file descriptors and JSON blobs and,
# whenever it has at least one of each, writes a modified JSON blob
# to the FD. The blob is modified to include the child's process ID.
# 5. Once the child process has sent n responses, it closes the write end
# of the pipe, which signals to the parent that there is no more data
# coming.
# 6. The parent listens to the read end of the pipe, accumulating JSON
# blobs (again, delimited by '\n') and verifying that a) the 'pid'
# attribute belongs to the child and b) the 'ord' field has not been
# seen in a response yet. This is intended to ensure that all blobs
# sent out have been relayed back to us.
common = require("../common")
assert = require("assert")
buffer = require("buffer")
child_process = require("child_process")
fs = require("fs")
net = require("net")
netBinding = process.binding("net")
path = require("path")
DATA =
ppid: process.pid
ord: 0
SOCK_PATH = path.join(__dirname, "..", path.basename(__filename, ".js") + ".sock")
logChild = (d) ->
d = d.toString() if typeof d is "object"
d.split("\n").forEach (l) ->
common.debug "CHILD: " + l if l.length > 0
return
return
# Create a pipe
#
# We establish a listener on the read end of the pipe so that we can
# validate any data sent back by the child. We send the write end of the
# pipe to the child and close it off in our process.
pipeFDs = netBinding.pipe()
assert.equal pipeFDs.length, 2
seenOrdinals = []
pipeReadStream = new net.Stream()
pipeReadStream.on "data", (data) ->
data.toString("utf8").trim().split("\n").forEach (d) ->
rd = JSON.parse(d)
assert.equal rd.pid, cpp
assert.equal seenOrdinals.indexOf(rd.ord), -1
seenOrdinals.unshift rd.ord
return
return
pipeReadStream.open pipeFDs[0]
pipeReadStream.resume()
# Create a UNIX socket at SOCK_PATH and send DATA and the write end
# of the pipe to whoever connects.
#
# We send two messages here, both with the same pipe FD: one string, and
# one buffer. We want to make sure that both datatypes are handled
# correctly.
srv = net.createServer((s) ->
str = JSON.stringify(DATA) + "\n"
DATA.ord = DATA.ord + 1
buf = new buffer.Buffer(str.length)
buf.write JSON.stringify(DATA) + "\n", "utf8"
s.write str, "utf8", pipeFDs[1]
if s.write(buf, pipeFDs[1])
netBinding.close pipeFDs[1]
else
s.on "drain", ->
netBinding.close pipeFDs[1]
return
return
)
srv.listen SOCK_PATH
# Spawn a child running test/fixtures/recvfd.js
cp = child_process.spawn(process.argv[0], [
path.join(common.fixturesDir, "recvfd.js")
SOCK_PATH
])
cp.stdout.on "data", logChild
cp.stderr.on "data", logChild
# When the child exits, clean up and validate its exit status
cpp = cp.pid
cp.on "exit", (code, signal) ->
srv.close()
# fs.unlinkSync(SOCK_PATH);
assert.equal code, 0
assert.equal seenOrdinals.length, 2
return
# vim:ts=2 sw=2 et
| true | # Copyright PI:NAME:<NAME>END_PI, Inc. and other Node contributors.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the
# following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
# NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
# USE OR OTHER DEALINGS IN THE SOFTWARE.
# Test sending and receiving a file descriptor.
#
# This test is pretty complex. It ends up spawning test/fixtures/recvfd.js
# as a child to test desired behavior. What happens is
#
# 1. Create an in-memory pipe via pipe(2). These two file descriptors
# are not visible to any other process, and so make a good test-case
# for sharing.
# 2. Create a a UNIX socket at SOCK_PATH. When a client connects to this
# path, they are sent the write end of the pipe from above.
# 3. The client is sent n JSON representations of the DATA variable, each
# with a different ordinal. We send these delimited by '\n' strings
# so that the receiving end can avoid any coalescing that happens
# due to the stream nature of the socket (e.g. '{}{}' is not a valid
# JSON string).
# 4. The child process receives file descriptors and JSON blobs and,
# whenever it has at least one of each, writes a modified JSON blob
# to the FD. The blob is modified to include the child's process ID.
# 5. Once the child process has sent n responses, it closes the write end
# of the pipe, which signals to the parent that there is no more data
# coming.
# 6. The parent listens to the read end of the pipe, accumulating JSON
# blobs (again, delimited by '\n') and verifying that a) the 'pid'
# attribute belongs to the child and b) the 'ord' field has not been
# seen in a response yet. This is intended to ensure that all blobs
# sent out have been relayed back to us.
common = require("../common")
assert = require("assert")
buffer = require("buffer")
child_process = require("child_process")
fs = require("fs")
net = require("net")
netBinding = process.binding("net")
path = require("path")
DATA =
ppid: process.pid
ord: 0
SOCK_PATH = path.join(__dirname, "..", path.basename(__filename, ".js") + ".sock")
logChild = (d) ->
d = d.toString() if typeof d is "object"
d.split("\n").forEach (l) ->
common.debug "CHILD: " + l if l.length > 0
return
return
# Create a pipe
#
# We establish a listener on the read end of the pipe so that we can
# validate any data sent back by the child. We send the write end of the
# pipe to the child and close it off in our process.
pipeFDs = netBinding.pipe()
assert.equal pipeFDs.length, 2
seenOrdinals = []
pipeReadStream = new net.Stream()
pipeReadStream.on "data", (data) ->
data.toString("utf8").trim().split("\n").forEach (d) ->
rd = JSON.parse(d)
assert.equal rd.pid, cpp
assert.equal seenOrdinals.indexOf(rd.ord), -1
seenOrdinals.unshift rd.ord
return
return
pipeReadStream.open pipeFDs[0]
pipeReadStream.resume()
# Create a UNIX socket at SOCK_PATH and send DATA and the write end
# of the pipe to whoever connects.
#
# We send two messages here, both with the same pipe FD: one string, and
# one buffer. We want to make sure that both datatypes are handled
# correctly.
srv = net.createServer((s) ->
str = JSON.stringify(DATA) + "\n"
DATA.ord = DATA.ord + 1
buf = new buffer.Buffer(str.length)
buf.write JSON.stringify(DATA) + "\n", "utf8"
s.write str, "utf8", pipeFDs[1]
if s.write(buf, pipeFDs[1])
netBinding.close pipeFDs[1]
else
s.on "drain", ->
netBinding.close pipeFDs[1]
return
return
)
srv.listen SOCK_PATH
# Spawn a child running test/fixtures/recvfd.js
cp = child_process.spawn(process.argv[0], [
path.join(common.fixturesDir, "recvfd.js")
SOCK_PATH
])
cp.stdout.on "data", logChild
cp.stderr.on "data", logChild
# When the child exits, clean up and validate its exit status
cpp = cp.pid
cp.on "exit", (code, signal) ->
srv.close()
# fs.unlinkSync(SOCK_PATH);
assert.equal code, 0
assert.equal seenOrdinals.length, 2
return
# vim:ts=2 sw=2 et
|
[
{
"context": " ->method('POST')\\n ->rules(['password' => 'required|min:3|max:225', 'email' => 'required|email']) \\n!",
"end": 380,
"score": 0.6970294117927551,
"start": 372,
"tag": "PASSWORD",
"value": "required"
},
{
"context": "\\n ->rules(['password' => 'required|min:3|... | snippets/laravel_former.cson | kikoseijo/atom-bootstrap-snippets | 0 | '.text.html.php.blade, .text.html.hack':
'Former login from spanish version':
prefix: 'former_login_form_es'
leftLabelHTML: '<span style="color:#1B81B6">Ⓢ</span>'
rightLabelHTML: '<span style="color:#f4645f">Former</span> Login Form (spanish)'
body: "{!! Former::horizontal_open()\n ->route('login')\n ->method('POST')\n ->rules(['password' => 'required|min:3|max:225', 'email' => 'required|email']) \n!!}\n{!! Former::email('email')->label('Direccion de correo') !!}\n{!! Former::password('password')->label('Contraseña') !!}\n{!! Former::checkbox('remember')->check()->text(' Recordarme en este equipo')->label(' ') !!}\n{!! Former::actions()\n ->large_dark_submit('Acceder ahora'); \n!!}\n{!! Former::close() !!}"
'Former Contact from spanish version':
prefix: 'former_contact_form'
leftLabelHTML: '<span style="color:#1B81B6">Ⓢ</span>'
rightLabelHTML: '<span style="color:#f4645f">Former</span> Contact Form'
body: ""
| 162624 | '.text.html.php.blade, .text.html.hack':
'Former login from spanish version':
prefix: 'former_login_form_es'
leftLabelHTML: '<span style="color:#1B81B6">Ⓢ</span>'
rightLabelHTML: '<span style="color:#f4645f">Former</span> Login Form (spanish)'
body: "{!! Former::horizontal_open()\n ->route('login')\n ->method('POST')\n ->rules(['password' => '<PASSWORD>|min:3|max:2<PASSWORD>', 'email' => 'required|email']) \n!!}\n{!! Former::email('email')->label('Direccion de correo') !!}\n{!! Former::password('<PASSWORD>')->label('Contraseña') !!}\n{!! Former::checkbox('remember')->check()->text(' Recordarme en este equipo')->label(' ') !!}\n{!! Former::actions()\n ->large_dark_submit('Acceder ahora'); \n!!}\n{!! Former::close() !!}"
'Former Contact from spanish version':
prefix: 'former_contact_form'
leftLabelHTML: '<span style="color:#1B81B6">Ⓢ</span>'
rightLabelHTML: '<span style="color:#f4645f">Former</span> Contact Form'
body: ""
| true | '.text.html.php.blade, .text.html.hack':
'Former login from spanish version':
prefix: 'former_login_form_es'
leftLabelHTML: '<span style="color:#1B81B6">Ⓢ</span>'
rightLabelHTML: '<span style="color:#f4645f">Former</span> Login Form (spanish)'
body: "{!! Former::horizontal_open()\n ->route('login')\n ->method('POST')\n ->rules(['password' => 'PI:PASSWORD:<PASSWORD>END_PI|min:3|max:2PI:PASSWORD:<PASSWORD>END_PI', 'email' => 'required|email']) \n!!}\n{!! Former::email('email')->label('Direccion de correo') !!}\n{!! Former::password('PI:PASSWORD:<PASSWORD>END_PI')->label('Contraseña') !!}\n{!! Former::checkbox('remember')->check()->text(' Recordarme en este equipo')->label(' ') !!}\n{!! Former::actions()\n ->large_dark_submit('Acceder ahora'); \n!!}\n{!! Former::close() !!}"
'Former Contact from spanish version':
prefix: 'former_contact_form'
leftLabelHTML: '<span style="color:#1B81B6">Ⓢ</span>'
rightLabelHTML: '<span style="color:#f4645f">Former</span> Contact Form'
body: ""
|
[
{
"context": "ME_LAYER_ID]\n\t\t\tStackId: STACK_ID\n\t\t\tSshKeyName: \"counterplay\"\n\t\t\tOs: \"Custom\"\n\t\t\tAmiId: \"ami-93e0faf2\"\n\t\t\tInst",
"end": 6929,
"score": 0.663785994052887,
"start": 6918,
"tag": "KEY",
"value": "counterplay"
}
] | scripts/aws-utility/list-steal-time-servers.coffee | willroberts/duelyst | 5 | AWS = require 'aws-sdk'
Promise = require 'bluebird'
prettyjson = require 'prettyjson'
_ = require 'underscore'
moment = require 'moment'
ProgressBar = require 'progress'
moniker = require 'moniker'
inquirer = require 'inquirer'
request = require 'request'
requestAsync = Promise.promisify(request)
ec2 = new AWS.EC2({region:'us-west-2'})
opsworks = new AWS.OpsWorks({region:'us-east-1'})
cloudwatch = new AWS.CloudWatch({region:'us-east-1'})
Promise.promisifyAll(ec2)
Promise.promisifyAll(opsworks)
Promise.promisifyAll(cloudwatch)
environment = "staging"
STACK_ID = "25f5d045-5e8f-4fb4-a7b4-4bdbd90935c1"
GAME_LAYER_ID = "e67f9dfa-b0f5-44f7-ab82-900ab0f1734f"
AI_LAYER_ID = "678a9191-d9e0-4ba3-b2f2-ac788e38abfa"
if process.env.NODE_ENV == "production"
console.log "PRODUCTION MODE"
environment = "production"
STACK_ID = "67804928-7fd2-449f-aec7-15acfba70874"
GAME_LAYER_ID = "5de77de8-f748-4df4-a85a-e40dccc1a05f"
AI_LAYER_ID = "cece3db3-e013-4acc-9ca8-ef59113f41e3"
###*
# console.log data as a table
# @public
# @param {String} data data to print out.
###
logAsTable = (dataRows)->
keys = _.keys(dataRows[0])
Table = require('cli-table')
t = new Table({
head: keys
})
_.each dataRows, (r)->
values = _.values(r)
values = _.map values, (v)->
if v instanceof Date
v = moment(v).format("YYYY-MM-DD")
return v || ""
t.push values
strTable = t.toString()
console.log(strTable)
return strTable
###*
# Custom error used by the confirmation prompt promise
# @class
###
class DidNotConfirmError extends Error
constructor: (@message = "You did not confirm.") ->
@name = "DidNotConfirmError"
@status = 404
@description = "You did not confirm."
Error.captureStackTrace(this, DidNotConfirmError)
super(@message)
###*
# Show a general purpose confirmation prompt
# @public
# @param {String} msg Custom confirmation message.
# @return {Promise} Promise that will resolve if the user confirms with a 'Y' or reject with DidNotConfirmError otherwise.
###
confirmAsync = (msg="...")->
return new Promise (resolve,reject)->
inquirer.prompt [{
name:'confirm'
message:"<#{environment}> #{msg} continue? Y/N?"
}],(answers)->
if answers.confirm.toLowerCase() == "y"
resolve()
else
reject(new DidNotConfirmError())
console.log "grabbing instance data for opsworks..."
console.time "done loading instance data"
Promise.all([
opsworks.describeInstancesAsync({
LayerId:AI_LAYER_ID
}),
opsworks.describeInstancesAsync({
LayerId:GAME_LAYER_ID
})
])
.bind {}
.spread (aiInstances,gameInstances)-> # after getting instances, load metric data from CloudWatch for CPU STEAL TIME
console.timeEnd "done loading instance data"
# console.log gameInstances
aiInstances = _.map(aiInstances.Instances, (instance)->
return _.pick(instance,[
"InstanceId",
"Hostname",
"PrivateIp",
"PublicIp",
"Status"
])
)
gameInstances = _.map(gameInstances.Instances, (instance)->
return _.pick(instance,[
"InstanceId",
"Hostname",
"PrivateIp",
"PublicIp",
"Status"
])
)
@.aiInstances = _.filter aiInstances, (i)-> return i["Status"] == "online"
@.gameInstances = _.filter gameInstances, (i)-> return i["Status"] == "online"
bar = new ProgressBar('getting metric data [:bar] :percent :etas', {
complete: '=',
incomplete: ' ',
width: 20,
total: parseInt(@.gameInstances.length)
})
return Promise.map(@.gameInstances,(instance,index)=>
return cloudwatch.getMetricStatisticsAsync({
Namespace: "AWS/OpsWorks"
MetricName: "cpu_steal"
Period: 60 * 60 # 1 hour
Statistics: ["Maximum"]
StartTime: moment().utc().subtract(1,'hour').toDate()
EndTime: moment().utc().toDate()
Dimensions: [
{
Name: "InstanceId"
Value: instance.InstanceId
}
]
}).then (result)->
instance.MaxStealTime = result["Datapoints"][0]["Maximum"]
bar.tick()
{concurrency:25})
.then ()-> # get HEALTH (player count) data for each server
# @.gameInstances = _.filter(@.gameInstances, (instance)-> return instance["MaxStealTime"] > 1.0 )
bar = new ProgressBar('getting health data [:bar] :percent :etas', {
complete: '=',
incomplete: ' ',
width: 20,
total: parseInt(@.gameInstances.length)
})
return Promise.map(@.gameInstances,(instance,index)=>
return requestAsync({url: "http://#{instance.PublicIp}/health"})
.spread (res,body)-> return JSON.parse(body)
.then (response)->
instance.Players = response.players
instance.Games = response.games
bar.tick()
{concurrency:25})
.then ()-> # load CONSUL maintenance info data for each instance
bar = new ProgressBar('getting consul data [:bar] :percent :etas', {
complete: '=',
incomplete: ' ',
width: 20,
total: parseInt(@.gameInstances.length)
})
return Promise.map(@.gameInstances,(instance,index)=>
url = "https://consul.duelyst.com/v1/health/node/#{environment}-#{instance.Hostname}"
return requestAsync({url: url})
.spread (res,body)->
return JSON.parse(body)
.then (response)->
maintenance = _.find(response,(item)-> return item["CheckID"] == "_node_maintenance")
if maintenance
instance.MaintMode = true
else
instance.MaintMode = false
bar.tick()
{concurrency:25})
.then (results)-> # when all data is loaded, retire any instances that have HIGH steal time
instances = @.gameInstances
# instances = _.filter(instances, (instance)-> return instance["MaxStealTime"] > 1.0 )
instances = _.sortBy(instances,"MaxStealTime")
logAsTable(instances)
@.retiredInstances = instances
# @.retiredInstances = _.filter @.retiredInstances, (i)-> i.Hostname == "api-game1s-wakeful-substance"
@.retiredInstances = _.filter @.retiredInstances, (instance)-> return instance["MaxStealTime"] > 5.0 and instance["MaintMode"] != true
retiredInstanceIds = _.map(@.retiredInstances,(i)-> return i.InstanceId)
if @.retiredInstances.length > 0
console.log "retiring instances: ", _.map(@.retiredInstances,(i)-> return i.Hostname)
return confirmAsync("Retiring instances.")
.then ()-> #... after confirmation retire
if @.retiredInstances.length > 0
retiredInstanceIds = _.map(@.retiredInstances,(i)-> return i.InstanceId)
params = {
Command:
Name:"execute_recipes"
Args:
recipes: ["sarlac::consul_maint_on"]
StackId: STACK_ID
InstanceIds: retiredInstanceIds
Comment: "Batch retiring instances"
}
console.log params
return opsworks.createDeploymentAsync(params)
.then ()-> # for each retired instance, create a substitute
allPromises = []
for instance in @.retiredInstances
match = instance.Hostname.match(/^(([a-z]+\-)+)+([a-z]+[0-9]+[a-z]*)(\-[a-z]+)*$/)
instanceName = match[1]
instanceNumber = match[3]
newName = "#{instanceName}#{instanceNumber}-#{moniker.choose()}"
console.log "creating new instance #{newName}"
instanceParams =
Hostname: newName
LayerIds: [GAME_LAYER_ID]
StackId: STACK_ID
SshKeyName: "counterplay"
Os: "Custom"
AmiId: "ami-93e0faf2"
InstallUpdatesOnBoot: false
InstanceType: "m4.large"
# console.log instanceParams
allPromises.push opsworks.createInstanceAsync(instanceParams)
return Promise.all(allPromises)
.then (results)-> # start each new substitute instance
if results?.length > 0
bar = new ProgressBar('starting instances [:bar] :percent :etas', {
complete: '=',
incomplete: ' ',
width: 20,
total: parseInt(results.length)
})
return Promise.map results, (instance) ->
return opsworks.startInstanceAsync({
InstanceId: instance.InstanceId
}).then ()->
bar.tick()
.then ()-> # done...
console.log "ALL DONE"
.catch DidNotConfirmError, (e)->
console.log "ABORTED"
| 47060 | AWS = require 'aws-sdk'
Promise = require 'bluebird'
prettyjson = require 'prettyjson'
_ = require 'underscore'
moment = require 'moment'
ProgressBar = require 'progress'
moniker = require 'moniker'
inquirer = require 'inquirer'
request = require 'request'
requestAsync = Promise.promisify(request)
ec2 = new AWS.EC2({region:'us-west-2'})
opsworks = new AWS.OpsWorks({region:'us-east-1'})
cloudwatch = new AWS.CloudWatch({region:'us-east-1'})
Promise.promisifyAll(ec2)
Promise.promisifyAll(opsworks)
Promise.promisifyAll(cloudwatch)
environment = "staging"
STACK_ID = "25f5d045-5e8f-4fb4-a7b4-4bdbd90935c1"
GAME_LAYER_ID = "e67f9dfa-b0f5-44f7-ab82-900ab0f1734f"
AI_LAYER_ID = "678a9191-d9e0-4ba3-b2f2-ac788e38abfa"
if process.env.NODE_ENV == "production"
console.log "PRODUCTION MODE"
environment = "production"
STACK_ID = "67804928-7fd2-449f-aec7-15acfba70874"
GAME_LAYER_ID = "5de77de8-f748-4df4-a85a-e40dccc1a05f"
AI_LAYER_ID = "cece3db3-e013-4acc-9ca8-ef59113f41e3"
###*
# console.log data as a table
# @public
# @param {String} data data to print out.
###
logAsTable = (dataRows)->
keys = _.keys(dataRows[0])
Table = require('cli-table')
t = new Table({
head: keys
})
_.each dataRows, (r)->
values = _.values(r)
values = _.map values, (v)->
if v instanceof Date
v = moment(v).format("YYYY-MM-DD")
return v || ""
t.push values
strTable = t.toString()
console.log(strTable)
return strTable
###*
# Custom error used by the confirmation prompt promise
# @class
###
class DidNotConfirmError extends Error
constructor: (@message = "You did not confirm.") ->
@name = "DidNotConfirmError"
@status = 404
@description = "You did not confirm."
Error.captureStackTrace(this, DidNotConfirmError)
super(@message)
###*
# Show a general purpose confirmation prompt
# @public
# @param {String} msg Custom confirmation message.
# @return {Promise} Promise that will resolve if the user confirms with a 'Y' or reject with DidNotConfirmError otherwise.
###
confirmAsync = (msg="...")->
return new Promise (resolve,reject)->
inquirer.prompt [{
name:'confirm'
message:"<#{environment}> #{msg} continue? Y/N?"
}],(answers)->
if answers.confirm.toLowerCase() == "y"
resolve()
else
reject(new DidNotConfirmError())
console.log "grabbing instance data for opsworks..."
console.time "done loading instance data"
Promise.all([
opsworks.describeInstancesAsync({
LayerId:AI_LAYER_ID
}),
opsworks.describeInstancesAsync({
LayerId:GAME_LAYER_ID
})
])
.bind {}
.spread (aiInstances,gameInstances)-> # after getting instances, load metric data from CloudWatch for CPU STEAL TIME
console.timeEnd "done loading instance data"
# console.log gameInstances
aiInstances = _.map(aiInstances.Instances, (instance)->
return _.pick(instance,[
"InstanceId",
"Hostname",
"PrivateIp",
"PublicIp",
"Status"
])
)
gameInstances = _.map(gameInstances.Instances, (instance)->
return _.pick(instance,[
"InstanceId",
"Hostname",
"PrivateIp",
"PublicIp",
"Status"
])
)
@.aiInstances = _.filter aiInstances, (i)-> return i["Status"] == "online"
@.gameInstances = _.filter gameInstances, (i)-> return i["Status"] == "online"
bar = new ProgressBar('getting metric data [:bar] :percent :etas', {
complete: '=',
incomplete: ' ',
width: 20,
total: parseInt(@.gameInstances.length)
})
return Promise.map(@.gameInstances,(instance,index)=>
return cloudwatch.getMetricStatisticsAsync({
Namespace: "AWS/OpsWorks"
MetricName: "cpu_steal"
Period: 60 * 60 # 1 hour
Statistics: ["Maximum"]
StartTime: moment().utc().subtract(1,'hour').toDate()
EndTime: moment().utc().toDate()
Dimensions: [
{
Name: "InstanceId"
Value: instance.InstanceId
}
]
}).then (result)->
instance.MaxStealTime = result["Datapoints"][0]["Maximum"]
bar.tick()
{concurrency:25})
.then ()-> # get HEALTH (player count) data for each server
# @.gameInstances = _.filter(@.gameInstances, (instance)-> return instance["MaxStealTime"] > 1.0 )
bar = new ProgressBar('getting health data [:bar] :percent :etas', {
complete: '=',
incomplete: ' ',
width: 20,
total: parseInt(@.gameInstances.length)
})
return Promise.map(@.gameInstances,(instance,index)=>
return requestAsync({url: "http://#{instance.PublicIp}/health"})
.spread (res,body)-> return JSON.parse(body)
.then (response)->
instance.Players = response.players
instance.Games = response.games
bar.tick()
{concurrency:25})
.then ()-> # load CONSUL maintenance info data for each instance
bar = new ProgressBar('getting consul data [:bar] :percent :etas', {
complete: '=',
incomplete: ' ',
width: 20,
total: parseInt(@.gameInstances.length)
})
return Promise.map(@.gameInstances,(instance,index)=>
url = "https://consul.duelyst.com/v1/health/node/#{environment}-#{instance.Hostname}"
return requestAsync({url: url})
.spread (res,body)->
return JSON.parse(body)
.then (response)->
maintenance = _.find(response,(item)-> return item["CheckID"] == "_node_maintenance")
if maintenance
instance.MaintMode = true
else
instance.MaintMode = false
bar.tick()
{concurrency:25})
.then (results)-> # when all data is loaded, retire any instances that have HIGH steal time
instances = @.gameInstances
# instances = _.filter(instances, (instance)-> return instance["MaxStealTime"] > 1.0 )
instances = _.sortBy(instances,"MaxStealTime")
logAsTable(instances)
@.retiredInstances = instances
# @.retiredInstances = _.filter @.retiredInstances, (i)-> i.Hostname == "api-game1s-wakeful-substance"
@.retiredInstances = _.filter @.retiredInstances, (instance)-> return instance["MaxStealTime"] > 5.0 and instance["MaintMode"] != true
retiredInstanceIds = _.map(@.retiredInstances,(i)-> return i.InstanceId)
if @.retiredInstances.length > 0
console.log "retiring instances: ", _.map(@.retiredInstances,(i)-> return i.Hostname)
return confirmAsync("Retiring instances.")
.then ()-> #... after confirmation retire
if @.retiredInstances.length > 0
retiredInstanceIds = _.map(@.retiredInstances,(i)-> return i.InstanceId)
params = {
Command:
Name:"execute_recipes"
Args:
recipes: ["sarlac::consul_maint_on"]
StackId: STACK_ID
InstanceIds: retiredInstanceIds
Comment: "Batch retiring instances"
}
console.log params
return opsworks.createDeploymentAsync(params)
.then ()-> # for each retired instance, create a substitute
allPromises = []
for instance in @.retiredInstances
match = instance.Hostname.match(/^(([a-z]+\-)+)+([a-z]+[0-9]+[a-z]*)(\-[a-z]+)*$/)
instanceName = match[1]
instanceNumber = match[3]
newName = "#{instanceName}#{instanceNumber}-#{moniker.choose()}"
console.log "creating new instance #{newName}"
instanceParams =
Hostname: newName
LayerIds: [GAME_LAYER_ID]
StackId: STACK_ID
SshKeyName: "<KEY>"
Os: "Custom"
AmiId: "ami-93e0faf2"
InstallUpdatesOnBoot: false
InstanceType: "m4.large"
# console.log instanceParams
allPromises.push opsworks.createInstanceAsync(instanceParams)
return Promise.all(allPromises)
.then (results)-> # start each new substitute instance
if results?.length > 0
bar = new ProgressBar('starting instances [:bar] :percent :etas', {
complete: '=',
incomplete: ' ',
width: 20,
total: parseInt(results.length)
})
return Promise.map results, (instance) ->
return opsworks.startInstanceAsync({
InstanceId: instance.InstanceId
}).then ()->
bar.tick()
.then ()-> # done...
console.log "ALL DONE"
.catch DidNotConfirmError, (e)->
console.log "ABORTED"
| true | AWS = require 'aws-sdk'
Promise = require 'bluebird'
prettyjson = require 'prettyjson'
_ = require 'underscore'
moment = require 'moment'
ProgressBar = require 'progress'
moniker = require 'moniker'
inquirer = require 'inquirer'
request = require 'request'
requestAsync = Promise.promisify(request)
ec2 = new AWS.EC2({region:'us-west-2'})
opsworks = new AWS.OpsWorks({region:'us-east-1'})
cloudwatch = new AWS.CloudWatch({region:'us-east-1'})
Promise.promisifyAll(ec2)
Promise.promisifyAll(opsworks)
Promise.promisifyAll(cloudwatch)
environment = "staging"
STACK_ID = "25f5d045-5e8f-4fb4-a7b4-4bdbd90935c1"
GAME_LAYER_ID = "e67f9dfa-b0f5-44f7-ab82-900ab0f1734f"
AI_LAYER_ID = "678a9191-d9e0-4ba3-b2f2-ac788e38abfa"
if process.env.NODE_ENV == "production"
console.log "PRODUCTION MODE"
environment = "production"
STACK_ID = "67804928-7fd2-449f-aec7-15acfba70874"
GAME_LAYER_ID = "5de77de8-f748-4df4-a85a-e40dccc1a05f"
AI_LAYER_ID = "cece3db3-e013-4acc-9ca8-ef59113f41e3"
###*
# console.log data as a table
# @public
# @param {String} data data to print out.
###
logAsTable = (dataRows)->
keys = _.keys(dataRows[0])
Table = require('cli-table')
t = new Table({
head: keys
})
_.each dataRows, (r)->
values = _.values(r)
values = _.map values, (v)->
if v instanceof Date
v = moment(v).format("YYYY-MM-DD")
return v || ""
t.push values
strTable = t.toString()
console.log(strTable)
return strTable
###*
# Custom error used by the confirmation prompt promise
# @class
###
class DidNotConfirmError extends Error
constructor: (@message = "You did not confirm.") ->
@name = "DidNotConfirmError"
@status = 404
@description = "You did not confirm."
Error.captureStackTrace(this, DidNotConfirmError)
super(@message)
###*
# Show a general purpose confirmation prompt
# @public
# @param {String} msg Custom confirmation message.
# @return {Promise} Promise that will resolve if the user confirms with a 'Y' or reject with DidNotConfirmError otherwise.
###
confirmAsync = (msg="...")->
return new Promise (resolve,reject)->
inquirer.prompt [{
name:'confirm'
message:"<#{environment}> #{msg} continue? Y/N?"
}],(answers)->
if answers.confirm.toLowerCase() == "y"
resolve()
else
reject(new DidNotConfirmError())
console.log "grabbing instance data for opsworks..."
console.time "done loading instance data"
Promise.all([
opsworks.describeInstancesAsync({
LayerId:AI_LAYER_ID
}),
opsworks.describeInstancesAsync({
LayerId:GAME_LAYER_ID
})
])
.bind {}
.spread (aiInstances,gameInstances)-> # after getting instances, load metric data from CloudWatch for CPU STEAL TIME
console.timeEnd "done loading instance data"
# console.log gameInstances
aiInstances = _.map(aiInstances.Instances, (instance)->
return _.pick(instance,[
"InstanceId",
"Hostname",
"PrivateIp",
"PublicIp",
"Status"
])
)
gameInstances = _.map(gameInstances.Instances, (instance)->
return _.pick(instance,[
"InstanceId",
"Hostname",
"PrivateIp",
"PublicIp",
"Status"
])
)
@.aiInstances = _.filter aiInstances, (i)-> return i["Status"] == "online"
@.gameInstances = _.filter gameInstances, (i)-> return i["Status"] == "online"
bar = new ProgressBar('getting metric data [:bar] :percent :etas', {
complete: '=',
incomplete: ' ',
width: 20,
total: parseInt(@.gameInstances.length)
})
return Promise.map(@.gameInstances,(instance,index)=>
return cloudwatch.getMetricStatisticsAsync({
Namespace: "AWS/OpsWorks"
MetricName: "cpu_steal"
Period: 60 * 60 # 1 hour
Statistics: ["Maximum"]
StartTime: moment().utc().subtract(1,'hour').toDate()
EndTime: moment().utc().toDate()
Dimensions: [
{
Name: "InstanceId"
Value: instance.InstanceId
}
]
}).then (result)->
instance.MaxStealTime = result["Datapoints"][0]["Maximum"]
bar.tick()
{concurrency:25})
.then ()-> # get HEALTH (player count) data for each server
# @.gameInstances = _.filter(@.gameInstances, (instance)-> return instance["MaxStealTime"] > 1.0 )
bar = new ProgressBar('getting health data [:bar] :percent :etas', {
complete: '=',
incomplete: ' ',
width: 20,
total: parseInt(@.gameInstances.length)
})
return Promise.map(@.gameInstances,(instance,index)=>
return requestAsync({url: "http://#{instance.PublicIp}/health"})
.spread (res,body)-> return JSON.parse(body)
.then (response)->
instance.Players = response.players
instance.Games = response.games
bar.tick()
{concurrency:25})
.then ()-> # load CONSUL maintenance info data for each instance
bar = new ProgressBar('getting consul data [:bar] :percent :etas', {
complete: '=',
incomplete: ' ',
width: 20,
total: parseInt(@.gameInstances.length)
})
return Promise.map(@.gameInstances,(instance,index)=>
url = "https://consul.duelyst.com/v1/health/node/#{environment}-#{instance.Hostname}"
return requestAsync({url: url})
.spread (res,body)->
return JSON.parse(body)
.then (response)->
maintenance = _.find(response,(item)-> return item["CheckID"] == "_node_maintenance")
if maintenance
instance.MaintMode = true
else
instance.MaintMode = false
bar.tick()
{concurrency:25})
.then (results)-> # when all data is loaded, retire any instances that have HIGH steal time
instances = @.gameInstances
# instances = _.filter(instances, (instance)-> return instance["MaxStealTime"] > 1.0 )
instances = _.sortBy(instances,"MaxStealTime")
logAsTable(instances)
@.retiredInstances = instances
# @.retiredInstances = _.filter @.retiredInstances, (i)-> i.Hostname == "api-game1s-wakeful-substance"
@.retiredInstances = _.filter @.retiredInstances, (instance)-> return instance["MaxStealTime"] > 5.0 and instance["MaintMode"] != true
retiredInstanceIds = _.map(@.retiredInstances,(i)-> return i.InstanceId)
if @.retiredInstances.length > 0
console.log "retiring instances: ", _.map(@.retiredInstances,(i)-> return i.Hostname)
return confirmAsync("Retiring instances.")
.then ()-> #... after confirmation retire
if @.retiredInstances.length > 0
retiredInstanceIds = _.map(@.retiredInstances,(i)-> return i.InstanceId)
params = {
Command:
Name:"execute_recipes"
Args:
recipes: ["sarlac::consul_maint_on"]
StackId: STACK_ID
InstanceIds: retiredInstanceIds
Comment: "Batch retiring instances"
}
console.log params
return opsworks.createDeploymentAsync(params)
.then ()-> # for each retired instance, create a substitute
allPromises = []
for instance in @.retiredInstances
match = instance.Hostname.match(/^(([a-z]+\-)+)+([a-z]+[0-9]+[a-z]*)(\-[a-z]+)*$/)
instanceName = match[1]
instanceNumber = match[3]
newName = "#{instanceName}#{instanceNumber}-#{moniker.choose()}"
console.log "creating new instance #{newName}"
instanceParams =
Hostname: newName
LayerIds: [GAME_LAYER_ID]
StackId: STACK_ID
SshKeyName: "PI:KEY:<KEY>END_PI"
Os: "Custom"
AmiId: "ami-93e0faf2"
InstallUpdatesOnBoot: false
InstanceType: "m4.large"
# console.log instanceParams
allPromises.push opsworks.createInstanceAsync(instanceParams)
return Promise.all(allPromises)
.then (results)-> # start each new substitute instance
if results?.length > 0
bar = new ProgressBar('starting instances [:bar] :percent :etas', {
complete: '=',
incomplete: ' ',
width: 20,
total: parseInt(results.length)
})
return Promise.map results, (instance) ->
return opsworks.startInstanceAsync({
InstanceId: instance.InstanceId
}).then ()->
bar.tick()
.then ()-> # done...
console.log "ALL DONE"
.catch DidNotConfirmError, (e)->
console.log "ABORTED"
|
[
{
"context": "name%22%3A%22jsonapi.methods%22%2C%22key%22%3A%22059d32c621cee00c7990857d17cbd5f9730c77735dbf905654f90124d2cbaa9f%22%2C%22username%22%3A%22admin%22%2",
"end": 262,
"score": 0.8315593004226685,
"start": 213,
"tag": "KEY",
"value": "59d32c621cee00c7990857d17cbd5f9730c77735dbf9056... | site/generate.coffee | NationsGlory/ngjsonapi | 109 | #!/usr/bin/env coffee
fs = require 'fs'
wintersmith = require 'wintersmith'
request = require 'request'
url = "http://localhost:25565/api/2/call?json=%5B%7B%22name%22%3A%22jsonapi.methods%22%2C%22key%22%3A%22059d32c621cee00c7990857d17cbd5f9730c77735dbf905654f90124d2cbaa9f%22%2C%22username%22%3A%22admin%22%2C%22arguments%22%3A%5B%5D%2C%22tag%22%3A%221%22%7D%5D"
cmd = process.argv.pop()
if ['build', 'preview', 'build_locals'].indexOf(cmd) is -1
cmd = 'build'
env = null
if cmd isnt "build_locals"
env = wintersmith __dirname + '/config.json'
# perform basic setup (pull in new readme, etc)
if cmd is "build"
env.build (err) ->
throw err if err
console.log 'Done!'
else if cmd is "preview"
env.preview (err, server) ->
throw err if err
console.log 'Server Running!'
else if cmd is "build_locals"
console.log "build_locals"
request url, (err, response, body) ->
fs.writeFileSync 'locals.json', JSON.stringify(methods: JSON.parse(body)[0].success) | 189046 | #!/usr/bin/env coffee
fs = require 'fs'
wintersmith = require 'wintersmith'
request = require 'request'
url = "http://localhost:25565/api/2/call?json=%5B%7B%22name%22%3A%22jsonapi.methods%22%2C%22key%22%3A%220<KEY>f<KEY>cbaa<KEY>f%22%2C%22username%22%3A%22admin%22%2C%22arguments%22%3A%5B%5D%2C%22tag%22%3A%221%22%7D%5D"
cmd = process.argv.pop()
if ['build', 'preview', 'build_locals'].indexOf(cmd) is -1
cmd = 'build'
env = null
if cmd isnt "build_locals"
env = wintersmith __dirname + '/config.json'
# perform basic setup (pull in new readme, etc)
if cmd is "build"
env.build (err) ->
throw err if err
console.log 'Done!'
else if cmd is "preview"
env.preview (err, server) ->
throw err if err
console.log 'Server Running!'
else if cmd is "build_locals"
console.log "build_locals"
request url, (err, response, body) ->
fs.writeFileSync 'locals.json', JSON.stringify(methods: JSON.parse(body)[0].success) | true | #!/usr/bin/env coffee
fs = require 'fs'
wintersmith = require 'wintersmith'
request = require 'request'
url = "http://localhost:25565/api/2/call?json=%5B%7B%22name%22%3A%22jsonapi.methods%22%2C%22key%22%3A%220PI:KEY:<KEY>END_PIfPI:KEY:<KEY>END_PIcbaaPI:KEY:<KEY>END_PIf%22%2C%22username%22%3A%22admin%22%2C%22arguments%22%3A%5B%5D%2C%22tag%22%3A%221%22%7D%5D"
cmd = process.argv.pop()
if ['build', 'preview', 'build_locals'].indexOf(cmd) is -1
cmd = 'build'
env = null
if cmd isnt "build_locals"
env = wintersmith __dirname + '/config.json'
# perform basic setup (pull in new readme, etc)
if cmd is "build"
env.build (err) ->
throw err if err
console.log 'Done!'
else if cmd is "preview"
env.preview (err, server) ->
throw err if err
console.log 'Server Running!'
else if cmd is "build_locals"
console.log "build_locals"
request url, (err, response, body) ->
fs.writeFileSync 'locals.json', JSON.stringify(methods: JSON.parse(body)[0].success) |
[
{
"context": "ch (done) ->\n user = new User\n name: 'Full name'\n email: 'test@test.com'\n username:",
"end": 331,
"score": 0.9994164705276489,
"start": 322,
"tag": "NAME",
"value": "Full name"
},
{
"context": "ew User\n name: 'Full name'\n em... | test/mocha/article/model.coffee | zerodi/mean-CJS | 1 | ###
Module dependencies
###
should = require 'should'
mongoose = require 'mongoose'
User = mongoose.model 'User'
Article = mongoose.model 'Article'
user = undefined
article = undefined
#The tests
describe '<Unit Test>', ->
describe 'Model Article:', ->
beforeEach (done) ->
user = new User
name: 'Full name'
email: 'test@test.com'
username: 'user'
password: 'password'
user.save ->
article = new Article
title: 'Article Title'
content: 'Article Content'
user: user
done()
return
return
describe 'Method Save', ->
it 'should be able to save without problems', (done) ->
article.save (err) ->
should.not.exist err
done()
return
it 'should be able to show an error when try to save without title', (done) ->
article.title = ''
article.save (err) ->
should.exist err
done()
return
return
aftreEach (done) ->
Article.remove {}
User.remove {}
done()
return
after (done) ->
Article.remove().exec()
User.remove().exec()
done()
return
return
return | 83268 | ###
Module dependencies
###
should = require 'should'
mongoose = require 'mongoose'
User = mongoose.model 'User'
Article = mongoose.model 'Article'
user = undefined
article = undefined
#The tests
describe '<Unit Test>', ->
describe 'Model Article:', ->
beforeEach (done) ->
user = new User
name: '<NAME>'
email: '<EMAIL>'
username: 'user'
password: '<PASSWORD>'
user.save ->
article = new Article
title: 'Article Title'
content: 'Article Content'
user: user
done()
return
return
describe 'Method Save', ->
it 'should be able to save without problems', (done) ->
article.save (err) ->
should.not.exist err
done()
return
it 'should be able to show an error when try to save without title', (done) ->
article.title = ''
article.save (err) ->
should.exist err
done()
return
return
aftreEach (done) ->
Article.remove {}
User.remove {}
done()
return
after (done) ->
Article.remove().exec()
User.remove().exec()
done()
return
return
return | true | ###
Module dependencies
###
should = require 'should'
mongoose = require 'mongoose'
User = mongoose.model 'User'
Article = mongoose.model 'Article'
user = undefined
article = undefined
#The tests
describe '<Unit Test>', ->
describe 'Model Article:', ->
beforeEach (done) ->
user = new User
name: 'PI:NAME:<NAME>END_PI'
email: 'PI:EMAIL:<EMAIL>END_PI'
username: 'user'
password: 'PI:PASSWORD:<PASSWORD>END_PI'
user.save ->
article = new Article
title: 'Article Title'
content: 'Article Content'
user: user
done()
return
return
describe 'Method Save', ->
it 'should be able to save without problems', (done) ->
article.save (err) ->
should.not.exist err
done()
return
it 'should be able to show an error when try to save without title', (done) ->
article.title = ''
article.save (err) ->
should.exist err
done()
return
return
aftreEach (done) ->
Article.remove {}
User.remove {}
done()
return
after (done) ->
Article.remove().exec()
User.remove().exec()
done()
return
return
return |
[
{
"context": "tutsplus.com/tutorials/javascript-ajax/inettuts/ (James Padolsey)\n @view backend_dashboard: ->\n html ->\n ",
"end": 605,
"score": 0.9995174407958984,
"start": 591,
"tag": "NAME",
"value": "James Padolsey"
}
] | modules/backend/inline_views/view_dashboard.coffee | nodize/nodizecms | 32 | @include = ->
@client '/admin/backend_dashboard.js': ->
@connect()
#
# Test event, received on new connection
#
@on "testEvent": (event)->
#console.log "connection in dashboard"
#console.log event
#alert "connection"
now = new Date()
hour = now.getHours()
min = now.getMinutes()
sec = now.getSeconds()
j$( "#intrxo .widget-content").text( hour+":"+min+":"+sec+" "+event.data.message)
#
# Displaying dashboard
#
# Javascript & css from http://net.tutsplus.com/tutorials/javascript-ajax/inettuts/ (James Padolsey)
@view backend_dashboard: ->
html ->
head ->
title "dashboard"
link type: 'text/css', rel: 'stylesheet', href: @assetsPath+'/css/dashboard.css'
div "#head", ->
h1 "Dashboard"
div "#columns.dashboard", ->
ul "#dashboard-column1.dashboard-column", ->
li "#intro.widget.color-white", ->
div ".widget-head", ->
h3 "widget title"
div ".widget-content", ->
p "The content..."
p "Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod
tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam,
quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo
consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse
cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non
proident, sunt in culpa qui officia deserunt mollit anim id est laborum.
Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod
tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam,
"
###li ".widget.color-blue", ->
div ".widget-head", ->
h3 "widget title"
div ".widget-content", ->
p "Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod
tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam,
quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo
consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse
cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non
proident, sunt in culpa qui officia deserunt mollit anim id est laborum."
###
# ul "#dashboard-column3.dashboard-column", ->
# li ".widget.color-green", ->
# div ".widget-head", ->
# h3 "widget title"
# div ".widget-content", ->
# p "The content..."
ul "#dashboard-column-test.dashboard-column", ->
li ".widget.color-yellow", ->
div ".widget-head", ->
h3 "Users & Memory used"
div ".widget-content", ->
input ".knob",
id:"dashboard-knob-users"
value:"35"
"data-width":"80"
"data-fgColor":"#ffec03"
"data-skin":"tron"
"data-thickness":".2"
"data-displayPrevious":true
"data-readOnly" : true
"data-max" : 5
"data-title" : 'users'
input ".knob",
id:"dashboard-knob-memory"
value:"35"
"data-width":"80"
"data-fgColor":"#ffec03"
"data-skin":"tron"
"data-thickness":".2"
"data-displayPrevious":true
"data-readOnly" : true
"data-max" : 100
"data-title" : 'memory'
coffeescript ->
#
# Knob activation
#
$ = jQuery
$ ->
$(".knob").knob draw: ->
# "tron" case
if @$.data("skin") is "tron"
a = @angle(@cv) # Angle
sa = @startAngle # Previous start angle
sat = @startAngle # Start angle
ea = undefined
# Previous end angle
eat = sat + a # End angle
r = true
@g.lineWidth = @lineWidth
@o.cursor and (sat = eat - 0.3) and (eat = eat + 0.3)
if @o.displayPrevious
ea = @startAngle + @angle(@value)
@o.cursor and (sa = ea - 0.3) and (ea = ea + 0.3)
@g.beginPath()
@g.strokeStyle = @previousColor
@g.arc @xy, @xy, @radius - @lineWidth, sa, ea, false
@g.stroke()
@g.beginPath()
@g.strokeStyle = (if r then @o.fgColor else @fgColor)
@g.arc @xy, @xy, @radius - @lineWidth, sat, eat, false
@g.stroke()
@g.lineWidth = 2
@g.beginPath()
@g.strokeStyle = @o.fgColor
@g.arc @xy, @xy, @radius - @lineWidth + 1 + @lineWidth * 2 / 3, 0, 2 * Math.PI, false
@g.stroke()
@g.fillStyle = "#FFF"
@g.textBaseline = "mb"
@g.fillText(@$.data("title"), 30, 90)
false
#
# Using JQuery to load the javascript code that is in charge to manage events (socket.io)
#
window.j$ = jQuery.noConflict()
jQuery.noConflict().ajax
url: "/admin/backend_dashboard.js"
dataType : "script"
iNettuts =
jQuery: jQuery.noConflict()
settings:
columns: ".dashboard-column"
widgetSelector: ".widget"
handleSelector: ".widget-head"
contentSelector: ".widget-content"
widgetDefault:
movable: true
removable: true
collapsible: true
editable: true
colorClasses: [ "color-yellow", "color-red", "color-blue", "color-white", "color-orange", "color-green" ]
widgetIndividual:
intro:
movable: false
removable: false
collapsible: false
editable: false
gallery:
colorClasses: [ "color-yellow", "color-red", "color-white" ]
init: ->
@attachStylesheet "/backend/javascript/dashboard/inettuts.js.css"
@addWidgetControls()
@makeSortable()
getWidgetSettings: (id) ->
$ = @jQuery
settings = @settings
(if (id and settings.widgetIndividual[id]) then $.extend({}, settings.widgetDefault, settings.widgetIndividual[id]) else settings.widgetDefault)
addWidgetControls: ->
iNettuts = this
$ = @jQuery
settings = @settings
#
# Loop through each widget:
#
$(settings.widgetSelector, $(settings.columns)).each ->
#
# Merge individual settings with default widget settings
#
thisWidgetSettings = iNettuts.getWidgetSettings(@id)
#
# (if "removable" option is TRUE):
#
if thisWidgetSettings.removable
#
# Add CLOSE (REMOVE) button & functionality
#
$("<a href=\"#\" class=\"remove\">CLOSE</a>").mousedown((e) ->
e.stopPropagation()
).click(->
#if confirm("This widget will be removed, ok?")
$(this).parents(settings.widgetSelector).animate
opacity: 0
, ->
$(this).wrap("<div/>").parent().slideUp ->
$(this).remove()
false
).appendTo $(settings.handleSelector, this)
#
# (if "editable" option is TRUE):
#
if thisWidgetSettings.editable
#
# Create new anchor element with class of 'edit':
#
$("<a href=\"#\" class=\"edit\">EDIT</a>").mousedown((e) ->
#
# Stop event bubbling
#
e.stopPropagation()
).toggle(->
#
# // Toggle: (1st state):
#
#
# Change background image so the button now reads 'close edit':
#
$(this).css(
backgroundPosition: "-66px 0"
width: "55px"
)
#
# Traverse to widget (list item):
#
.parents(settings.widgetSelector)
#
# Find the edit-box, show it, then focus <input/>:
#
.find(".edit-box")
.show()
.find("input")
.focus()
false
, ->
#
# Toggle: (2nd state):
#
#
# Reset background and width (will default to CSS specified in StyleSheet):
#
$(this).css(
backgroundPosition: ""
width: "")
#
# Traverse to widget (list item):
#
.parents(settings.widgetSelector)
#
# Find the edit-box and hide it:
#
.find(".edit-box").hide()
#
# Return false, prevent default action:
#
false
#
# Append this button to the widget handle:
#
).appendTo $(settings.handleSelector, this)
#
# Add the actual editing section (edit-box):
#
$("<div class=\"edit-box\" style=\"display:none;\"/>").append("<ul><li class=\"item\"><label>Title</label><input size=12 value=\"" + $("h3", this).text() + "\"/></li>").append((->
colorList = "<li class=\"item\"><label>Color:</label><ul class=\"colors\">"
$(thisWidgetSettings.colorClasses).each ->
colorList += "<li class=\"" + this + "\"/>"
colorList + "</ul>"
)()).append("</ul>").insertAfter $(settings.handleSelector, this)
#
# (if "collapsible" option is TRUE):
#
if thisWidgetSettings.collapsible
#
# Add COLLAPSE button and functionality
#
$("<a href=\"#\" class=\"collapse\">COLLAPSE</a>").mousedown((e) ->
e.stopPropagation()
).toggle(->
$(this).css(backgroundPosition: "-38px 0").parents(settings.widgetSelector).find(settings.contentSelector).hide()
false
, ->
$(this).css(backgroundPosition: "").parents(settings.widgetSelector).find(settings.contentSelector).show()
false
).prependTo $(settings.handleSelector, this)
$(".edit-box").each ->
$("input", this).keyup ->
$(this).parents(settings.widgetSelector).find("h3").text (if $(this).val().length > 20 then $(this).val().substr(0, 20) + "..." else $(this).val())
$("ul.colors li", this).click ->
colorStylePattern = /\bcolor-[\w]{1,}\b/
thisWidgetColorClass = $(this).parents(settings.widgetSelector).attr("class").match(colorStylePattern)
$(this).parents(settings.widgetSelector).removeClass(thisWidgetColorClass[0]).addClass $(this).attr("class").match(colorStylePattern)[0] if thisWidgetColorClass
false
attachStylesheet: (href) ->
$ = @jQuery
$("<link href=\"" + href + "\" rel=\"stylesheet\" type=\"text/css\" />").appendTo "head"
makeSortable: ->
iNettuts = this
$ = @jQuery
settings = @settings
$sortableItems = (->
notSortable = ""
$(settings.widgetSelector, $(settings.columns)).each (i) ->
unless iNettuts.getWidgetSettings(@id).movable
@id = "widget-no-id-" + i unless @id
notSortable += "#" + @id + ","
$ "> li:not(" + notSortable + ")", settings.columns
)()
$sortableItems.find(settings.handleSelector).css(cursor: "move")
#
# Mousedown function
#
.mousedown((e) ->
$sortableItems.css width: ""
#
# Traverse to parent (the widget):
#
$(this).parent().css
#
# Explicitely set width as computed width:
#
width: $(this).parent().width() + "px"
#width:100
#"background-color":"#F00"
#z-index":9999
)
#
# Mouseup function
#
.mouseup ->
#
# Check if widget is currently in the process of dragging
#
unless $(this).parent().hasClass("dragging")
$(this).parent().css width: ""
#
# If it IS currently being dragged then we want to
# temporarily disable dragging, while widget is
# reverting to original position.
#
else
$(settings.columns).sortable "disable"
#
# Select the columns and initiate 'sortable':
#
$(settings.columns).sortable
#
# Specify those items which will be sortable:
#
items: $sortableItems
#
# Connect each column with every other column:
#
connectWith: $(settings.columns)
#
# Set the handle to the top bar:
#
handle: settings.handleSelector
#
# Define class of placeholder (styled in inettuts.js.css)
#
placeholder: "widget-placeholder"
#
# Make sure placeholder size is retained:
#
forcePlaceholderSize: true
#
# Animated revent lasts how long?
#
revert: 300
#
# Delay before action:
#
delay: 100
#
# Opacity of 'helper' (the thing that's dragged):
#
opacity: 0.8
#
# Set constraint of dragging to the document's edge:
#
containment: "document"
#
# Function to be called when dragging starts:
#
start: (e, ui) ->
$(ui.helper).addClass "dragging"
#
# You can change the css of the widget being moved
#
#$(ui.helper).css width: 150
$(ui.helper).css "-webkit-transform": "rotate(-5deg)", "-moz-transform": "rotate(-4deg)"
#
# Function to be called when dragging stops:
#
stop: (e, ui) ->
#
# Reset width of units and remove dragging class:
#
$(ui.item).css(width: "").removeClass "dragging"
$(ui.item).css "-webkit-transform": "rotate(0deg)", "-moz-transform": "rotate(0deg)"
#
# Re-enable sorting (we disabled it on mouseup of the handle):
#
$(settings.columns).sortable "enable"
iNettuts.init() | 180103 | @include = ->
@client '/admin/backend_dashboard.js': ->
@connect()
#
# Test event, received on new connection
#
@on "testEvent": (event)->
#console.log "connection in dashboard"
#console.log event
#alert "connection"
now = new Date()
hour = now.getHours()
min = now.getMinutes()
sec = now.getSeconds()
j$( "#intrxo .widget-content").text( hour+":"+min+":"+sec+" "+event.data.message)
#
# Displaying dashboard
#
# Javascript & css from http://net.tutsplus.com/tutorials/javascript-ajax/inettuts/ (<NAME>)
@view backend_dashboard: ->
html ->
head ->
title "dashboard"
link type: 'text/css', rel: 'stylesheet', href: @assetsPath+'/css/dashboard.css'
div "#head", ->
h1 "Dashboard"
div "#columns.dashboard", ->
ul "#dashboard-column1.dashboard-column", ->
li "#intro.widget.color-white", ->
div ".widget-head", ->
h3 "widget title"
div ".widget-content", ->
p "The content..."
p "Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod
tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam,
quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo
consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse
cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non
proident, sunt in culpa qui officia deserunt mollit anim id est laborum.
Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod
tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam,
"
###li ".widget.color-blue", ->
div ".widget-head", ->
h3 "widget title"
div ".widget-content", ->
p "Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod
tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam,
quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo
consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse
cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non
proident, sunt in culpa qui officia deserunt mollit anim id est laborum."
###
# ul "#dashboard-column3.dashboard-column", ->
# li ".widget.color-green", ->
# div ".widget-head", ->
# h3 "widget title"
# div ".widget-content", ->
# p "The content..."
ul "#dashboard-column-test.dashboard-column", ->
li ".widget.color-yellow", ->
div ".widget-head", ->
h3 "Users & Memory used"
div ".widget-content", ->
input ".knob",
id:"dashboard-knob-users"
value:"35"
"data-width":"80"
"data-fgColor":"#ffec03"
"data-skin":"tron"
"data-thickness":".2"
"data-displayPrevious":true
"data-readOnly" : true
"data-max" : 5
"data-title" : 'users'
input ".knob",
id:"dashboard-knob-memory"
value:"35"
"data-width":"80"
"data-fgColor":"#ffec03"
"data-skin":"tron"
"data-thickness":".2"
"data-displayPrevious":true
"data-readOnly" : true
"data-max" : 100
"data-title" : 'memory'
coffeescript ->
#
# Knob activation
#
$ = jQuery
$ ->
$(".knob").knob draw: ->
# "tron" case
if @$.data("skin") is "tron"
a = @angle(@cv) # Angle
sa = @startAngle # Previous start angle
sat = @startAngle # Start angle
ea = undefined
# Previous end angle
eat = sat + a # End angle
r = true
@g.lineWidth = @lineWidth
@o.cursor and (sat = eat - 0.3) and (eat = eat + 0.3)
if @o.displayPrevious
ea = @startAngle + @angle(@value)
@o.cursor and (sa = ea - 0.3) and (ea = ea + 0.3)
@g.beginPath()
@g.strokeStyle = @previousColor
@g.arc @xy, @xy, @radius - @lineWidth, sa, ea, false
@g.stroke()
@g.beginPath()
@g.strokeStyle = (if r then @o.fgColor else @fgColor)
@g.arc @xy, @xy, @radius - @lineWidth, sat, eat, false
@g.stroke()
@g.lineWidth = 2
@g.beginPath()
@g.strokeStyle = @o.fgColor
@g.arc @xy, @xy, @radius - @lineWidth + 1 + @lineWidth * 2 / 3, 0, 2 * Math.PI, false
@g.stroke()
@g.fillStyle = "#FFF"
@g.textBaseline = "mb"
@g.fillText(@$.data("title"), 30, 90)
false
#
# Using JQuery to load the javascript code that is in charge to manage events (socket.io)
#
window.j$ = jQuery.noConflict()
jQuery.noConflict().ajax
url: "/admin/backend_dashboard.js"
dataType : "script"
iNettuts =
jQuery: jQuery.noConflict()
settings:
columns: ".dashboard-column"
widgetSelector: ".widget"
handleSelector: ".widget-head"
contentSelector: ".widget-content"
widgetDefault:
movable: true
removable: true
collapsible: true
editable: true
colorClasses: [ "color-yellow", "color-red", "color-blue", "color-white", "color-orange", "color-green" ]
widgetIndividual:
intro:
movable: false
removable: false
collapsible: false
editable: false
gallery:
colorClasses: [ "color-yellow", "color-red", "color-white" ]
init: ->
@attachStylesheet "/backend/javascript/dashboard/inettuts.js.css"
@addWidgetControls()
@makeSortable()
getWidgetSettings: (id) ->
$ = @jQuery
settings = @settings
(if (id and settings.widgetIndividual[id]) then $.extend({}, settings.widgetDefault, settings.widgetIndividual[id]) else settings.widgetDefault)
addWidgetControls: ->
iNettuts = this
$ = @jQuery
settings = @settings
#
# Loop through each widget:
#
$(settings.widgetSelector, $(settings.columns)).each ->
#
# Merge individual settings with default widget settings
#
thisWidgetSettings = iNettuts.getWidgetSettings(@id)
#
# (if "removable" option is TRUE):
#
if thisWidgetSettings.removable
#
# Add CLOSE (REMOVE) button & functionality
#
$("<a href=\"#\" class=\"remove\">CLOSE</a>").mousedown((e) ->
e.stopPropagation()
).click(->
#if confirm("This widget will be removed, ok?")
$(this).parents(settings.widgetSelector).animate
opacity: 0
, ->
$(this).wrap("<div/>").parent().slideUp ->
$(this).remove()
false
).appendTo $(settings.handleSelector, this)
#
# (if "editable" option is TRUE):
#
if thisWidgetSettings.editable
#
# Create new anchor element with class of 'edit':
#
$("<a href=\"#\" class=\"edit\">EDIT</a>").mousedown((e) ->
#
# Stop event bubbling
#
e.stopPropagation()
).toggle(->
#
# // Toggle: (1st state):
#
#
# Change background image so the button now reads 'close edit':
#
$(this).css(
backgroundPosition: "-66px 0"
width: "55px"
)
#
# Traverse to widget (list item):
#
.parents(settings.widgetSelector)
#
# Find the edit-box, show it, then focus <input/>:
#
.find(".edit-box")
.show()
.find("input")
.focus()
false
, ->
#
# Toggle: (2nd state):
#
#
# Reset background and width (will default to CSS specified in StyleSheet):
#
$(this).css(
backgroundPosition: ""
width: "")
#
# Traverse to widget (list item):
#
.parents(settings.widgetSelector)
#
# Find the edit-box and hide it:
#
.find(".edit-box").hide()
#
# Return false, prevent default action:
#
false
#
# Append this button to the widget handle:
#
).appendTo $(settings.handleSelector, this)
#
# Add the actual editing section (edit-box):
#
$("<div class=\"edit-box\" style=\"display:none;\"/>").append("<ul><li class=\"item\"><label>Title</label><input size=12 value=\"" + $("h3", this).text() + "\"/></li>").append((->
colorList = "<li class=\"item\"><label>Color:</label><ul class=\"colors\">"
$(thisWidgetSettings.colorClasses).each ->
colorList += "<li class=\"" + this + "\"/>"
colorList + "</ul>"
)()).append("</ul>").insertAfter $(settings.handleSelector, this)
#
# (if "collapsible" option is TRUE):
#
if thisWidgetSettings.collapsible
#
# Add COLLAPSE button and functionality
#
$("<a href=\"#\" class=\"collapse\">COLLAPSE</a>").mousedown((e) ->
e.stopPropagation()
).toggle(->
$(this).css(backgroundPosition: "-38px 0").parents(settings.widgetSelector).find(settings.contentSelector).hide()
false
, ->
$(this).css(backgroundPosition: "").parents(settings.widgetSelector).find(settings.contentSelector).show()
false
).prependTo $(settings.handleSelector, this)
$(".edit-box").each ->
$("input", this).keyup ->
$(this).parents(settings.widgetSelector).find("h3").text (if $(this).val().length > 20 then $(this).val().substr(0, 20) + "..." else $(this).val())
$("ul.colors li", this).click ->
colorStylePattern = /\bcolor-[\w]{1,}\b/
thisWidgetColorClass = $(this).parents(settings.widgetSelector).attr("class").match(colorStylePattern)
$(this).parents(settings.widgetSelector).removeClass(thisWidgetColorClass[0]).addClass $(this).attr("class").match(colorStylePattern)[0] if thisWidgetColorClass
false
attachStylesheet: (href) ->
$ = @jQuery
$("<link href=\"" + href + "\" rel=\"stylesheet\" type=\"text/css\" />").appendTo "head"
makeSortable: ->
iNettuts = this
$ = @jQuery
settings = @settings
$sortableItems = (->
notSortable = ""
$(settings.widgetSelector, $(settings.columns)).each (i) ->
unless iNettuts.getWidgetSettings(@id).movable
@id = "widget-no-id-" + i unless @id
notSortable += "#" + @id + ","
$ "> li:not(" + notSortable + ")", settings.columns
)()
$sortableItems.find(settings.handleSelector).css(cursor: "move")
#
# Mousedown function
#
.mousedown((e) ->
$sortableItems.css width: ""
#
# Traverse to parent (the widget):
#
$(this).parent().css
#
# Explicitely set width as computed width:
#
width: $(this).parent().width() + "px"
#width:100
#"background-color":"#F00"
#z-index":9999
)
#
# Mouseup function
#
.mouseup ->
#
# Check if widget is currently in the process of dragging
#
unless $(this).parent().hasClass("dragging")
$(this).parent().css width: ""
#
# If it IS currently being dragged then we want to
# temporarily disable dragging, while widget is
# reverting to original position.
#
else
$(settings.columns).sortable "disable"
#
# Select the columns and initiate 'sortable':
#
$(settings.columns).sortable
#
# Specify those items which will be sortable:
#
items: $sortableItems
#
# Connect each column with every other column:
#
connectWith: $(settings.columns)
#
# Set the handle to the top bar:
#
handle: settings.handleSelector
#
# Define class of placeholder (styled in inettuts.js.css)
#
placeholder: "widget-placeholder"
#
# Make sure placeholder size is retained:
#
forcePlaceholderSize: true
#
# Animated revent lasts how long?
#
revert: 300
#
# Delay before action:
#
delay: 100
#
# Opacity of 'helper' (the thing that's dragged):
#
opacity: 0.8
#
# Set constraint of dragging to the document's edge:
#
containment: "document"
#
# Function to be called when dragging starts:
#
start: (e, ui) ->
$(ui.helper).addClass "dragging"
#
# You can change the css of the widget being moved
#
#$(ui.helper).css width: 150
$(ui.helper).css "-webkit-transform": "rotate(-5deg)", "-moz-transform": "rotate(-4deg)"
#
# Function to be called when dragging stops:
#
stop: (e, ui) ->
#
# Reset width of units and remove dragging class:
#
$(ui.item).css(width: "").removeClass "dragging"
$(ui.item).css "-webkit-transform": "rotate(0deg)", "-moz-transform": "rotate(0deg)"
#
# Re-enable sorting (we disabled it on mouseup of the handle):
#
$(settings.columns).sortable "enable"
iNettuts.init() | true | @include = ->
@client '/admin/backend_dashboard.js': ->
@connect()
#
# Test event, received on new connection
#
@on "testEvent": (event)->
#console.log "connection in dashboard"
#console.log event
#alert "connection"
now = new Date()
hour = now.getHours()
min = now.getMinutes()
sec = now.getSeconds()
j$( "#intrxo .widget-content").text( hour+":"+min+":"+sec+" "+event.data.message)
#
# Displaying dashboard
#
# Javascript & css from http://net.tutsplus.com/tutorials/javascript-ajax/inettuts/ (PI:NAME:<NAME>END_PI)
@view backend_dashboard: ->
html ->
head ->
title "dashboard"
link type: 'text/css', rel: 'stylesheet', href: @assetsPath+'/css/dashboard.css'
div "#head", ->
h1 "Dashboard"
div "#columns.dashboard", ->
ul "#dashboard-column1.dashboard-column", ->
li "#intro.widget.color-white", ->
div ".widget-head", ->
h3 "widget title"
div ".widget-content", ->
p "The content..."
p "Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod
tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam,
quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo
consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse
cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non
proident, sunt in culpa qui officia deserunt mollit anim id est laborum.
Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod
tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam,
"
###li ".widget.color-blue", ->
div ".widget-head", ->
h3 "widget title"
div ".widget-content", ->
p "Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod
tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam,
quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo
consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse
cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non
proident, sunt in culpa qui officia deserunt mollit anim id est laborum."
###
# ul "#dashboard-column3.dashboard-column", ->
# li ".widget.color-green", ->
# div ".widget-head", ->
# h3 "widget title"
# div ".widget-content", ->
# p "The content..."
ul "#dashboard-column-test.dashboard-column", ->
li ".widget.color-yellow", ->
div ".widget-head", ->
h3 "Users & Memory used"
div ".widget-content", ->
input ".knob",
id:"dashboard-knob-users"
value:"35"
"data-width":"80"
"data-fgColor":"#ffec03"
"data-skin":"tron"
"data-thickness":".2"
"data-displayPrevious":true
"data-readOnly" : true
"data-max" : 5
"data-title" : 'users'
input ".knob",
id:"dashboard-knob-memory"
value:"35"
"data-width":"80"
"data-fgColor":"#ffec03"
"data-skin":"tron"
"data-thickness":".2"
"data-displayPrevious":true
"data-readOnly" : true
"data-max" : 100
"data-title" : 'memory'
coffeescript ->
#
# Knob activation
#
$ = jQuery
$ ->
$(".knob").knob draw: ->
# "tron" case
if @$.data("skin") is "tron"
a = @angle(@cv) # Angle
sa = @startAngle # Previous start angle
sat = @startAngle # Start angle
ea = undefined
# Previous end angle
eat = sat + a # End angle
r = true
@g.lineWidth = @lineWidth
@o.cursor and (sat = eat - 0.3) and (eat = eat + 0.3)
if @o.displayPrevious
ea = @startAngle + @angle(@value)
@o.cursor and (sa = ea - 0.3) and (ea = ea + 0.3)
@g.beginPath()
@g.strokeStyle = @previousColor
@g.arc @xy, @xy, @radius - @lineWidth, sa, ea, false
@g.stroke()
@g.beginPath()
@g.strokeStyle = (if r then @o.fgColor else @fgColor)
@g.arc @xy, @xy, @radius - @lineWidth, sat, eat, false
@g.stroke()
@g.lineWidth = 2
@g.beginPath()
@g.strokeStyle = @o.fgColor
@g.arc @xy, @xy, @radius - @lineWidth + 1 + @lineWidth * 2 / 3, 0, 2 * Math.PI, false
@g.stroke()
@g.fillStyle = "#FFF"
@g.textBaseline = "mb"
@g.fillText(@$.data("title"), 30, 90)
false
#
# Using JQuery to load the javascript code that is in charge to manage events (socket.io)
#
window.j$ = jQuery.noConflict()
jQuery.noConflict().ajax
url: "/admin/backend_dashboard.js"
dataType : "script"
iNettuts =
jQuery: jQuery.noConflict()
settings:
columns: ".dashboard-column"
widgetSelector: ".widget"
handleSelector: ".widget-head"
contentSelector: ".widget-content"
widgetDefault:
movable: true
removable: true
collapsible: true
editable: true
colorClasses: [ "color-yellow", "color-red", "color-blue", "color-white", "color-orange", "color-green" ]
widgetIndividual:
intro:
movable: false
removable: false
collapsible: false
editable: false
gallery:
colorClasses: [ "color-yellow", "color-red", "color-white" ]
init: ->
@attachStylesheet "/backend/javascript/dashboard/inettuts.js.css"
@addWidgetControls()
@makeSortable()
getWidgetSettings: (id) ->
$ = @jQuery
settings = @settings
(if (id and settings.widgetIndividual[id]) then $.extend({}, settings.widgetDefault, settings.widgetIndividual[id]) else settings.widgetDefault)
addWidgetControls: ->
iNettuts = this
$ = @jQuery
settings = @settings
#
# Loop through each widget:
#
$(settings.widgetSelector, $(settings.columns)).each ->
#
# Merge individual settings with default widget settings
#
thisWidgetSettings = iNettuts.getWidgetSettings(@id)
#
# (if "removable" option is TRUE):
#
if thisWidgetSettings.removable
#
# Add CLOSE (REMOVE) button & functionality
#
$("<a href=\"#\" class=\"remove\">CLOSE</a>").mousedown((e) ->
e.stopPropagation()
).click(->
#if confirm("This widget will be removed, ok?")
$(this).parents(settings.widgetSelector).animate
opacity: 0
, ->
$(this).wrap("<div/>").parent().slideUp ->
$(this).remove()
false
).appendTo $(settings.handleSelector, this)
#
# (if "editable" option is TRUE):
#
if thisWidgetSettings.editable
#
# Create new anchor element with class of 'edit':
#
$("<a href=\"#\" class=\"edit\">EDIT</a>").mousedown((e) ->
#
# Stop event bubbling
#
e.stopPropagation()
).toggle(->
#
# // Toggle: (1st state):
#
#
# Change background image so the button now reads 'close edit':
#
$(this).css(
backgroundPosition: "-66px 0"
width: "55px"
)
#
# Traverse to widget (list item):
#
.parents(settings.widgetSelector)
#
# Find the edit-box, show it, then focus <input/>:
#
.find(".edit-box")
.show()
.find("input")
.focus()
false
, ->
#
# Toggle: (2nd state):
#
#
# Reset background and width (will default to CSS specified in StyleSheet):
#
$(this).css(
backgroundPosition: ""
width: "")
#
# Traverse to widget (list item):
#
.parents(settings.widgetSelector)
#
# Find the edit-box and hide it:
#
.find(".edit-box").hide()
#
# Return false, prevent default action:
#
false
#
# Append this button to the widget handle:
#
).appendTo $(settings.handleSelector, this)
#
# Add the actual editing section (edit-box):
#
$("<div class=\"edit-box\" style=\"display:none;\"/>").append("<ul><li class=\"item\"><label>Title</label><input size=12 value=\"" + $("h3", this).text() + "\"/></li>").append((->
colorList = "<li class=\"item\"><label>Color:</label><ul class=\"colors\">"
$(thisWidgetSettings.colorClasses).each ->
colorList += "<li class=\"" + this + "\"/>"
colorList + "</ul>"
)()).append("</ul>").insertAfter $(settings.handleSelector, this)
#
# (if "collapsible" option is TRUE):
#
if thisWidgetSettings.collapsible
#
# Add COLLAPSE button and functionality
#
$("<a href=\"#\" class=\"collapse\">COLLAPSE</a>").mousedown((e) ->
e.stopPropagation()
).toggle(->
$(this).css(backgroundPosition: "-38px 0").parents(settings.widgetSelector).find(settings.contentSelector).hide()
false
, ->
$(this).css(backgroundPosition: "").parents(settings.widgetSelector).find(settings.contentSelector).show()
false
).prependTo $(settings.handleSelector, this)
$(".edit-box").each ->
$("input", this).keyup ->
$(this).parents(settings.widgetSelector).find("h3").text (if $(this).val().length > 20 then $(this).val().substr(0, 20) + "..." else $(this).val())
$("ul.colors li", this).click ->
colorStylePattern = /\bcolor-[\w]{1,}\b/
thisWidgetColorClass = $(this).parents(settings.widgetSelector).attr("class").match(colorStylePattern)
$(this).parents(settings.widgetSelector).removeClass(thisWidgetColorClass[0]).addClass $(this).attr("class").match(colorStylePattern)[0] if thisWidgetColorClass
false
attachStylesheet: (href) ->
$ = @jQuery
$("<link href=\"" + href + "\" rel=\"stylesheet\" type=\"text/css\" />").appendTo "head"
makeSortable: ->
iNettuts = this
$ = @jQuery
settings = @settings
$sortableItems = (->
notSortable = ""
$(settings.widgetSelector, $(settings.columns)).each (i) ->
unless iNettuts.getWidgetSettings(@id).movable
@id = "widget-no-id-" + i unless @id
notSortable += "#" + @id + ","
$ "> li:not(" + notSortable + ")", settings.columns
)()
$sortableItems.find(settings.handleSelector).css(cursor: "move")
#
# Mousedown function
#
.mousedown((e) ->
$sortableItems.css width: ""
#
# Traverse to parent (the widget):
#
$(this).parent().css
#
# Explicitely set width as computed width:
#
width: $(this).parent().width() + "px"
#width:100
#"background-color":"#F00"
#z-index":9999
)
#
# Mouseup function
#
.mouseup ->
#
# Check if widget is currently in the process of dragging
#
unless $(this).parent().hasClass("dragging")
$(this).parent().css width: ""
#
# If it IS currently being dragged then we want to
# temporarily disable dragging, while widget is
# reverting to original position.
#
else
$(settings.columns).sortable "disable"
#
# Select the columns and initiate 'sortable':
#
$(settings.columns).sortable
#
# Specify those items which will be sortable:
#
items: $sortableItems
#
# Connect each column with every other column:
#
connectWith: $(settings.columns)
#
# Set the handle to the top bar:
#
handle: settings.handleSelector
#
# Define class of placeholder (styled in inettuts.js.css)
#
placeholder: "widget-placeholder"
#
# Make sure placeholder size is retained:
#
forcePlaceholderSize: true
#
# Animated revent lasts how long?
#
revert: 300
#
# Delay before action:
#
delay: 100
#
# Opacity of 'helper' (the thing that's dragged):
#
opacity: 0.8
#
# Set constraint of dragging to the document's edge:
#
containment: "document"
#
# Function to be called when dragging starts:
#
start: (e, ui) ->
$(ui.helper).addClass "dragging"
#
# You can change the css of the widget being moved
#
#$(ui.helper).css width: 150
$(ui.helper).css "-webkit-transform": "rotate(-5deg)", "-moz-transform": "rotate(-4deg)"
#
# Function to be called when dragging stops:
#
stop: (e, ui) ->
#
# Reset width of units and remove dragging class:
#
$(ui.item).css(width: "").removeClass "dragging"
$(ui.item).css "-webkit-transform": "rotate(0deg)", "-moz-transform": "rotate(0deg)"
#
# Re-enable sorting (we disabled it on mouseup of the handle):
#
$(settings.columns).sortable "enable"
iNettuts.init() |
[
{
"context": "nal notes required for the script>\n#\n# Author:\n# knjcode <knjcode@gmail.com>\n\n_ = require 'lodash'\njaco = ",
"end": 453,
"score": 0.999701738357544,
"start": 446,
"tag": "USERNAME",
"value": "knjcode"
},
{
"context": "required for the script>\n#\n# Author:\n# ... | src/slack-taboo.coffee | knjcode/hubot-slack-taboo | 0 | # Description
# A hubot script that does the things
#
# Configuration:
# SLACK_API_TOKEN - Slack API Token (default. undefined )
# HUBOT_SLACK_TABOO_CHANNEL - Target channel
# (default. taboo_exp)
# HUBOT_SLACK_TABOO_DURATION - Duration to reap in seconds (default. 5)
# HUBOT_SLACK_TABOO_MECABDIC - Set dir as a system dicdir
#
# Commands:
# N/A
#
# Notes:
# <optional notes required for the script>
#
# Author:
# knjcode <knjcode@gmail.com>
_ = require 'lodash'
jaco = require 'jaco'
Mecab = require 'mecab-async'
mecab = new Mecab()
apitoken = process.env.SLACK_API_TOKEN
targetroom = process.env.HUBOT_SLACK_TABOO_CHANNEL ? "taboo_exp"
duration = process.env.HUBOT_SLACK_TABOO_DURATION ? 3
mecabdic = process.env.HUBOT_SLACK_TABOO_MECABDIC
tabooChars = []
hiraganaChars = []
for i in [12353..12435]
hiraganaChars.push String.fromCharCode(i)
commands = ['taboo', 'addtaboo', 'maddtaboo', 'reset']
module.exports = (robot) ->
if mecabdic
Mecab.command = "mecab -d " + mecabdic
console.log "Mecab command: " + Mecab.command
robot.brain.on "loaded", ->
# "loaded" event is called every time robot.brain changed
# data loading is needed only once after a reboot
if !loaded
try
tabooChars = JSON.parse robot.brain.get "hubot-slack-taboo-tabooChars"
catch error
robot.logger.info("JSON parse error (reason: #{error})")
loaded = true
if !tabooChars
tabooChars = []
robot.hear /^taboo$/, (res) ->
msgs = ["禁止文字数:" + tabooChars.length.toString()]
diff = _.difference(hiraganaChars,tabooChars)
if tabooChars.length < diff.length
msgs.push "禁止文字:" + tabooChars
else
msgs.push "使用可能文字:" + diff
res.send msgs.join("\n")
addTaboo = (tabooChar) ->
tabooChars.push(tabooChar)
robot.brain.set "hubot-slack-taboo-tabooChars", JSON.stringify tabooChars
robot.hear /^addtaboo$/, (res) ->
diff = _.difference(hiraganaChars,tabooChars)
newtaboo = diff[Math.floor(Math.random() * diff.length)]
addTaboo(newtaboo)
res.send "禁止文字に「" + newtaboo + "」を追加しました"
robot.hear /^maddtaboo (.)$/, (res) ->
newtaboo = res.match[1]
diff = _.difference(hiraganaChars,tabooChars)
if ~diff.indexOf(newtaboo)
addTaboo(newtaboo)
res.send "禁止文字に「" + newtaboo + "」を追加しました"
robot.hear /^reset$/, (res) ->
tabooChars = []
res.send "禁止文字をリセットしました"
robot.hear /.*/, (res) ->
for command in commands
if res.message.text is command
return
isDelete = false
tabooRegex = RegExp("[#{jaco.katakanize(tabooChars)}]")
if tabooRegex.test jaco.katakanize res.message.text
isDelete = true
else
tokens = mecab.parseSync res.message.text
console.log tokens
readings = (token[8] for token in tokens)
matches = []
for token in tokens
if token[8]
if tabooRegex.test jaco.katakanize token[8]
matches.push token
console.log "Reading: " + readings.join('')
console.log 'matches num: ' + matches.length.toString()
console.log 'matches: ' + JSON.stringify matches
if matches.length > 0
isDelete = true
if isDelete
if targetroom
if res.message.room != targetroom
return
if matches
msgs = []
for match in matches
if match[8]
msgs.push match[0] + "(" + match[8] + ")"
res.send "Delete! " + msgs.join()
else
res.send "Delete!"
msgid = res.message.id
channel = res.message.rawMessage.channel
rmjob = ->
echannel = escape(channel)
emsgid = escape(msgid)
eapitoken = escape(apitoken)
robot.http("https://slack.com/api/chat.delete?token=#{eapitoken}&ts=#{emsgid}&channel=#{echannel}")
.get() (err, resp, body) ->
try
json = JSON.parse(body)
if json.ok
robot.logger.info("Removed #{res.message.user.name}'s message \"#{res.message.text}\" in #{res.message.room}")
else
robot.logger.error("Failed to remove message")
catch error
robot.logger.error("Failed to request removing message #{msgid} in #{channel} (reason: #{error})")
setTimeout(rmjob, duration * 1000)
| 1574 | # Description
# A hubot script that does the things
#
# Configuration:
# SLACK_API_TOKEN - Slack API Token (default. undefined )
# HUBOT_SLACK_TABOO_CHANNEL - Target channel
# (default. taboo_exp)
# HUBOT_SLACK_TABOO_DURATION - Duration to reap in seconds (default. 5)
# HUBOT_SLACK_TABOO_MECABDIC - Set dir as a system dicdir
#
# Commands:
# N/A
#
# Notes:
# <optional notes required for the script>
#
# Author:
# knjcode <<EMAIL>>
_ = require 'lodash'
jaco = require 'jaco'
Mecab = require 'mecab-async'
mecab = new Mecab()
apitoken = process.env.SLACK_API_TOKEN
targetroom = process.env.HUBOT_SLACK_TABOO_CHANNEL ? "taboo_exp"
duration = process.env.HUBOT_SLACK_TABOO_DURATION ? 3
mecabdic = process.env.HUBOT_SLACK_TABOO_MECABDIC
tabooChars = []
hiraganaChars = []
for i in [12353..12435]
hiraganaChars.push String.fromCharCode(i)
commands = ['taboo', 'addtaboo', 'maddtaboo', 'reset']
module.exports = (robot) ->
if mecabdic
Mecab.command = "mecab -d " + mecabdic
console.log "Mecab command: " + Mecab.command
robot.brain.on "loaded", ->
# "loaded" event is called every time robot.brain changed
# data loading is needed only once after a reboot
if !loaded
try
tabooChars = JSON.parse robot.brain.get "hubot-slack-taboo-tabooChars"
catch error
robot.logger.info("JSON parse error (reason: #{error})")
loaded = true
if !tabooChars
tabooChars = []
robot.hear /^taboo$/, (res) ->
msgs = ["禁止文字数:" + tabooChars.length.toString()]
diff = _.difference(hiraganaChars,tabooChars)
if tabooChars.length < diff.length
msgs.push "禁止文字:" + tabooChars
else
msgs.push "使用可能文字:" + diff
res.send msgs.join("\n")
addTaboo = (tabooChar) ->
tabooChars.push(tabooChar)
robot.brain.set "hubot-slack-taboo-tabooChars", JSON.stringify tabooChars
robot.hear /^addtaboo$/, (res) ->
diff = _.difference(hiraganaChars,tabooChars)
newtaboo = diff[Math.floor(Math.random() * diff.length)]
addTaboo(newtaboo)
res.send "禁止文字に「" + newtaboo + "」を追加しました"
robot.hear /^maddtaboo (.)$/, (res) ->
newtaboo = res.match[1]
diff = _.difference(hiraganaChars,tabooChars)
if ~diff.indexOf(newtaboo)
addTaboo(newtaboo)
res.send "禁止文字に「" + newtaboo + "」を追加しました"
robot.hear /^reset$/, (res) ->
tabooChars = []
res.send "禁止文字をリセットしました"
robot.hear /.*/, (res) ->
for command in commands
if res.message.text is command
return
isDelete = false
tabooRegex = RegExp("[#{jaco.katakanize(tabooChars)}]")
if tabooRegex.test jaco.katakanize res.message.text
isDelete = true
else
tokens = mecab.parseSync res.message.text
console.log tokens
readings = (token[8] for token in tokens)
matches = []
for token in tokens
if token[8]
if tabooRegex.test jaco.katakanize token[8]
matches.push token
console.log "Reading: " + readings.join('')
console.log 'matches num: ' + matches.length.toString()
console.log 'matches: ' + JSON.stringify matches
if matches.length > 0
isDelete = true
if isDelete
if targetroom
if res.message.room != targetroom
return
if matches
msgs = []
for match in matches
if match[8]
msgs.push match[0] + "(" + match[8] + ")"
res.send "Delete! " + msgs.join()
else
res.send "Delete!"
msgid = res.message.id
channel = res.message.rawMessage.channel
rmjob = ->
echannel = escape(channel)
emsgid = escape(msgid)
eapitoken = escape(apitoken)
robot.http("https://slack.com/api/chat.delete?token=#{eapitoken}&ts=#{emsgid}&channel=#{echannel}")
.get() (err, resp, body) ->
try
json = JSON.parse(body)
if json.ok
robot.logger.info("Removed #{res.message.user.name}'s message \"#{res.message.text}\" in #{res.message.room}")
else
robot.logger.error("Failed to remove message")
catch error
robot.logger.error("Failed to request removing message #{msgid} in #{channel} (reason: #{error})")
setTimeout(rmjob, duration * 1000)
| true | # Description
# A hubot script that does the things
#
# Configuration:
# SLACK_API_TOKEN - Slack API Token (default. undefined )
# HUBOT_SLACK_TABOO_CHANNEL - Target channel
# (default. taboo_exp)
# HUBOT_SLACK_TABOO_DURATION - Duration to reap in seconds (default. 5)
# HUBOT_SLACK_TABOO_MECABDIC - Set dir as a system dicdir
#
# Commands:
# N/A
#
# Notes:
# <optional notes required for the script>
#
# Author:
# knjcode <PI:EMAIL:<EMAIL>END_PI>
_ = require 'lodash'
jaco = require 'jaco'
Mecab = require 'mecab-async'
mecab = new Mecab()
apitoken = process.env.SLACK_API_TOKEN
targetroom = process.env.HUBOT_SLACK_TABOO_CHANNEL ? "taboo_exp"
duration = process.env.HUBOT_SLACK_TABOO_DURATION ? 3
mecabdic = process.env.HUBOT_SLACK_TABOO_MECABDIC
tabooChars = []
hiraganaChars = []
for i in [12353..12435]
hiraganaChars.push String.fromCharCode(i)
commands = ['taboo', 'addtaboo', 'maddtaboo', 'reset']
module.exports = (robot) ->
if mecabdic
Mecab.command = "mecab -d " + mecabdic
console.log "Mecab command: " + Mecab.command
robot.brain.on "loaded", ->
# "loaded" event is called every time robot.brain changed
# data loading is needed only once after a reboot
if !loaded
try
tabooChars = JSON.parse robot.brain.get "hubot-slack-taboo-tabooChars"
catch error
robot.logger.info("JSON parse error (reason: #{error})")
loaded = true
if !tabooChars
tabooChars = []
robot.hear /^taboo$/, (res) ->
msgs = ["禁止文字数:" + tabooChars.length.toString()]
diff = _.difference(hiraganaChars,tabooChars)
if tabooChars.length < diff.length
msgs.push "禁止文字:" + tabooChars
else
msgs.push "使用可能文字:" + diff
res.send msgs.join("\n")
addTaboo = (tabooChar) ->
tabooChars.push(tabooChar)
robot.brain.set "hubot-slack-taboo-tabooChars", JSON.stringify tabooChars
robot.hear /^addtaboo$/, (res) ->
diff = _.difference(hiraganaChars,tabooChars)
newtaboo = diff[Math.floor(Math.random() * diff.length)]
addTaboo(newtaboo)
res.send "禁止文字に「" + newtaboo + "」を追加しました"
robot.hear /^maddtaboo (.)$/, (res) ->
newtaboo = res.match[1]
diff = _.difference(hiraganaChars,tabooChars)
if ~diff.indexOf(newtaboo)
addTaboo(newtaboo)
res.send "禁止文字に「" + newtaboo + "」を追加しました"
robot.hear /^reset$/, (res) ->
tabooChars = []
res.send "禁止文字をリセットしました"
robot.hear /.*/, (res) ->
for command in commands
if res.message.text is command
return
isDelete = false
tabooRegex = RegExp("[#{jaco.katakanize(tabooChars)}]")
if tabooRegex.test jaco.katakanize res.message.text
isDelete = true
else
tokens = mecab.parseSync res.message.text
console.log tokens
readings = (token[8] for token in tokens)
matches = []
for token in tokens
if token[8]
if tabooRegex.test jaco.katakanize token[8]
matches.push token
console.log "Reading: " + readings.join('')
console.log 'matches num: ' + matches.length.toString()
console.log 'matches: ' + JSON.stringify matches
if matches.length > 0
isDelete = true
if isDelete
if targetroom
if res.message.room != targetroom
return
if matches
msgs = []
for match in matches
if match[8]
msgs.push match[0] + "(" + match[8] + ")"
res.send "Delete! " + msgs.join()
else
res.send "Delete!"
msgid = res.message.id
channel = res.message.rawMessage.channel
rmjob = ->
echannel = escape(channel)
emsgid = escape(msgid)
eapitoken = escape(apitoken)
robot.http("https://slack.com/api/chat.delete?token=#{eapitoken}&ts=#{emsgid}&channel=#{echannel}")
.get() (err, resp, body) ->
try
json = JSON.parse(body)
if json.ok
robot.logger.info("Removed #{res.message.user.name}'s message \"#{res.message.text}\" in #{res.message.room}")
else
robot.logger.error("Failed to remove message")
catch error
robot.logger.error("Failed to request removing message #{msgid} in #{channel} (reason: #{error})")
setTimeout(rmjob, duration * 1000)
|
[
{
"context": "# copyright 2015 by mike lodato (zvxryb@gmail.com)\n# this work is subject to the ",
"end": 31,
"score": 0.9998661875724792,
"start": 20,
"tag": "NAME",
"value": "mike lodato"
},
{
"context": "# copyright 2015 by mike lodato (zvxryb@gmail.com)\n# this work is subject to... | src/geometry/vector.coffee | zvxryb/openjscad-solve | 1 | # copyright 2015 by mike lodato (zvxryb@gmail.com)
# this work is subject to the terms of the MIT license
define ['math/expr', 'geometry/primitive', 'geometry/scalar', 'core/utils', 'core/errors'], (Expr, Primitive, Scalar, utils, errors) ->
class VecN extends Primitive
constructor: (@vars...) ->
errors.TypeError.assert(x, Expr) for x in @vars
super()
abs2: -> Expr.add((x.pow(Expr.const(2)) for x in @vars)...)
abs: -> abs2().pow(Expr.const(1, 2))
scale: (other) ->
expr = switch
when other instanceof Expr then other
when other instanceof Scalar then other.var
else throw new errors.TypeError.throw(other, Expr, Scalar)
new VecN((expr.mul(x) for x in @vars)...)
@add: (lhs, rhs) ->
errors.TypeError.assert(lhs, VecN)
errors.TypeError.assert(rhs, VecN)
new VecN(utils.zipWith(Expr.add, lhs.vars, rhs.vars)...)
add: (other) -> @constructor.add(this, other)
@dot: (lhs, rhs) ->
errors.TypeError.assert(lhs, VecN)
errors.TypeError.assert(rhs, VecN)
Expr.add(utils.zipWith(Expr.mul, lhs.vars, rhs.vars)...)
dot: (other) -> @constructor.dot(this, other)
isUnit: -> @abs2().eq(Expr.const(1))
isOrthogonalTo: (other) -> @dot(other)
isEqualTo: (other) -> utils.zipWith(Expr.eq, @vars, other.vars)
class Vec2 extends VecN
constructor: (prefix) ->
[@x, @y] = Expr.vars(prefix, 'x', 'y')
super(@x, @y)
class Vec3 extends VecN
constructor: (prefix) ->
[@x, @y, @z] = Expr.vars(prefix, 'x', 'y', 'z')
super(@x, @y, @z)
(
VecN: VecN
Vec2: Vec2
Vec3: Vec3
)
| 9352 | # copyright 2015 by <NAME> (<EMAIL>)
# this work is subject to the terms of the MIT license
define ['math/expr', 'geometry/primitive', 'geometry/scalar', 'core/utils', 'core/errors'], (Expr, Primitive, Scalar, utils, errors) ->
class VecN extends Primitive
constructor: (@vars...) ->
errors.TypeError.assert(x, Expr) for x in @vars
super()
abs2: -> Expr.add((x.pow(Expr.const(2)) for x in @vars)...)
abs: -> abs2().pow(Expr.const(1, 2))
scale: (other) ->
expr = switch
when other instanceof Expr then other
when other instanceof Scalar then other.var
else throw new errors.TypeError.throw(other, Expr, Scalar)
new VecN((expr.mul(x) for x in @vars)...)
@add: (lhs, rhs) ->
errors.TypeError.assert(lhs, VecN)
errors.TypeError.assert(rhs, VecN)
new VecN(utils.zipWith(Expr.add, lhs.vars, rhs.vars)...)
add: (other) -> @constructor.add(this, other)
@dot: (lhs, rhs) ->
errors.TypeError.assert(lhs, VecN)
errors.TypeError.assert(rhs, VecN)
Expr.add(utils.zipWith(Expr.mul, lhs.vars, rhs.vars)...)
dot: (other) -> @constructor.dot(this, other)
isUnit: -> @abs2().eq(Expr.const(1))
isOrthogonalTo: (other) -> @dot(other)
isEqualTo: (other) -> utils.zipWith(Expr.eq, @vars, other.vars)
class Vec2 extends VecN
constructor: (prefix) ->
[@x, @y] = Expr.vars(prefix, 'x', 'y')
super(@x, @y)
class Vec3 extends VecN
constructor: (prefix) ->
[@x, @y, @z] = Expr.vars(prefix, 'x', 'y', 'z')
super(@x, @y, @z)
(
VecN: VecN
Vec2: Vec2
Vec3: Vec3
)
| true | # copyright 2015 by PI:NAME:<NAME>END_PI (PI:EMAIL:<EMAIL>END_PI)
# this work is subject to the terms of the MIT license
define ['math/expr', 'geometry/primitive', 'geometry/scalar', 'core/utils', 'core/errors'], (Expr, Primitive, Scalar, utils, errors) ->
class VecN extends Primitive
constructor: (@vars...) ->
errors.TypeError.assert(x, Expr) for x in @vars
super()
abs2: -> Expr.add((x.pow(Expr.const(2)) for x in @vars)...)
abs: -> abs2().pow(Expr.const(1, 2))
scale: (other) ->
expr = switch
when other instanceof Expr then other
when other instanceof Scalar then other.var
else throw new errors.TypeError.throw(other, Expr, Scalar)
new VecN((expr.mul(x) for x in @vars)...)
@add: (lhs, rhs) ->
errors.TypeError.assert(lhs, VecN)
errors.TypeError.assert(rhs, VecN)
new VecN(utils.zipWith(Expr.add, lhs.vars, rhs.vars)...)
add: (other) -> @constructor.add(this, other)
@dot: (lhs, rhs) ->
errors.TypeError.assert(lhs, VecN)
errors.TypeError.assert(rhs, VecN)
Expr.add(utils.zipWith(Expr.mul, lhs.vars, rhs.vars)...)
dot: (other) -> @constructor.dot(this, other)
isUnit: -> @abs2().eq(Expr.const(1))
isOrthogonalTo: (other) -> @dot(other)
isEqualTo: (other) -> utils.zipWith(Expr.eq, @vars, other.vars)
class Vec2 extends VecN
constructor: (prefix) ->
[@x, @y] = Expr.vars(prefix, 'x', 'y')
super(@x, @y)
class Vec3 extends VecN
constructor: (prefix) ->
[@x, @y, @z] = Expr.vars(prefix, 'x', 'y', 'z')
super(@x, @y, @z)
(
VecN: VecN
Vec2: Vec2
Vec3: Vec3
)
|
[
{
"context": "ou have any questions, please feel free to contact Helaine Blumenthal (helaine@wikiedu.org). We look forward to working",
"end": 1201,
"score": 0.9998903274536133,
"start": 1183,
"tag": "NAME",
"value": "Helaine Blumenthal"
},
{
"context": ", please feel free to contact... | source/javascripts/data/WizardConfig.coffee | WikiEducationFoundation/WikiEduWizard | 1 | ## THIS FILE IS THE DATA CONTENT AND STEP ORDER CONFIGRATION FOR THE WIZARD AS WELL AS ASSIGNMENT PATHWAYS ##
## UNCOMMENTING THE DATA INSIDE THE PATHWAYS SECTION WILL ADD MORE STEPS INTO THOSE ALTERNATIVE PATHWAYS ##
WizardConfig = {
## The intro_steps are the steps before the wizard branches into different assignment types.
intro_steps: [
# Intro
{
id: "intro"
title: '<center>Welcome to the<br />Assignment Design Wizard!</center>'
login_instructions: 'Click Login with Wikipedia to get started'
instructions: ''
inputs: []
sections: [
{
content: [
"<p class='large'>Thank you for visiting the Assignment Design Wizard. Wiki Ed is excited to announce that we will be launching a new set of tools for creating and tracking a Wikipedia assignment for the Fall 2015 term. You may look through the wizard now, but we are advising instructors teaching with Wikipedia in the fall to wait for our new tools to be ready to plan their assignments. Those teaching during the summer, should go through the current wizard to plan their Wikipedia projects. IF you have any questions, please feel free to contact Helaine Blumenthal (helaine@wikiedu.org). We look forward to working with you in the Fall!</p>"
"<p class='large'>This tool will help you to easily create a customized Wikipedia classroom assignment and customized syllabus for your course.</p>"
"<p class='large'>When you’re finished, you'll have a ready-to-use lesson plan, with weekly assignments, published directly onto a sandbox page on Wikipedia where you can customize it even further.</p>"
"<p class='large'>Let’s start by filling in some basics about you and your course:</p>"
]
}
]
}
# Assignment selection
{
id: "assignment_selection"
title: 'Assignment type selection'
infoTitle: 'About assignment selections'
formTitle: 'Available assignments:'
instructions: "You can teach with Wikipedia in several different ways, and it's important to design an assignment that is suitable for Wikipedia <em>and</em> achieves your student learning objectives. Your first step is to choose which assignment(s) you'll be asking your students to complete as part of the course."
inputs: []
sections: [
{
title: ''
content: [
"<p>We've created some guidelines to help you, but you'll need to make some key decisions, such as: which learning objectives are you targeting with this assignment? What skills do your students already have? How much time can you devote to the assignment?</p>"
"<p>Most instructors ask their students to write or expand an article. Students start by learning the basics of Wikipedia, and then focus on the content. They plan, research, and write a previously missing Wikipedia article, or contribute to an incomplete entry on a course-related topic. This assignment typically replaces a term paper or research project, or it forms the literature review section of a larger paper. The student learning outcome is high with this assignment, but it does take a significant amount of time. Your students need to learn both the wiki markup language and key policies and expectations of the Wikipedia-editing community.</p>"
"<p>If writing an article isn't right for your class, other assignment options offer students valuable learning opportunities and help to improve Wikipedia. Select an assignment type on the left to learn more.</p>"
]
}
]
}
]
# Here begin the pathways for different assignment types: researchwrite,
pathways: {
###################################
researchwrite: [
# Wikipedia essentials (researchwrite)
{
id: "learning_essentials"
title: 'Wikipedia essentials'
showInOverview: true
formTitle: 'Choose one:'
infoTitle: 'About Wikipedia essentials'
instructions: "To get started, you'll want to introduce your students to the basic rules of writing Wikipedia articles and working with the Wikipedia community."
inputs: []
sections: [
{
title: ''
content: [
'<p>As their first Wikipedia assignment milestone, you can ask the students to create user accounts and then complete the <em>online training for students</em>. This training introduces the Wikipedia community and how it works, demonstrates the basics of editing and walks students through their first edits, gives advice for selecting articles and drafting revisions, and explains further sources of support as they continue along. It takes about an hour and ends with a certification step, which you can use to verify that students completed the training.</p>'
'<p>Students who complete this training are better prepared to focus on learning outcomes, and spend less time distracted by cleaning up after errors.</p>'
'<p>Will completion of the student training be part of your students\' grades? (Make your choice at the top left.)</p>'
]
}
{
title: 'Assignment milestones'
accordian: true
content: [
"<ul>
<li>Create a user account and enroll on the course page. </li>
<li>Complete the <em>online training for students</em>. During this training, you will make edits in a sandbox and learn the basic rules of Wikipedia.</li>
<li>To practice editing and communicating on Wikipedia, introduce yourself to any Wikipedians helping your class (such as a Wikipedia Ambassador), and leave a message for a classmate on their user talk page.</li>
</ul>"
]
}
]
}
# Getting started with editing (researchwrite)
{
id: "getting_started"
title: 'Getting started with editing'
showInOverview: true
infoTitle: 'About early editing tasks'
instructions: "It is important for students to start editing Wikipedia early on. That way, they become familiar with Wikipedia's markup (\"wikisyntax\", \"wikimarkup\", or \"wikicode\") and the mechanics of editing and communicating on the site. We recommend assigning a few basic Wikipedia tasks early on."
formTitle: 'Which basic assignments would you like to include?'
inputs: []
sections: [
{
title: ''
content: [
'<p>Which introductory assignments would you like to use to acclimate your students to Wikipedia? You can select none, one, or more. Whichever you select will be added to the assignment timeline.</p>'
'<ul>
<li><strong>Critique an article.</strong> Critically evaluate an existing Wikipedia article related to the class, and leave suggestions for improving it on the article’s talk page. </li>
<li><strong>Add to an article.</strong> Using course readings or other relevant secondary sources, add 1–2 sentences of new information to a Wikipedia article related to the class. Be sure to integrate it well into the existing article, and include a citation to the source. </li>
<li><strong>Copyedit an article.</strong> Browse Wikipedia until you find an article that you would like to improve, and make some edits to improve the language or formatting. </li>
<li><strong>Illustrate an article.</strong> Find an opportunity to improve an article by uploading and adding a photo you took.</li>
</ul>'
]
}
{
content: [
'<p>For most courses, the <em>Critique an article</em> and <em>Add to an article</em> exercises provide a nice foundation for the main writing project. These have been selected by default.</p>'
]
}
]
}
# Choosing articles (researchwrite)
{
id: 'choosing_articles'
title: 'Choosing articles'
showInOverview: true
formTitle: 'How will your class select articles?'
infoTitle: 'About choosing articles'
inputs: []
sections: [
{
title: ''
content: [
'<p>Choosing the right (or wrong) articles to work on can make (or break) a Wikipedia writing assignment.</p>'
'<p>Some articles may initially look easy to improve, but quality references to expand them may be difficult to find. Finding topics with the right balance between poor Wikipedia coverage and available literature from which to expand that coverage can be tricky. Here are some guidelines to keep in mind when selecting articles for improvement.</p>'
]
}
{
title: 'Good choice'
accordian: true
content: [
"<ul>
<li>Choose a well-established topic for which a lot of literature is available in its field, but which isn't covered extensively on Wikipedia.</li>
<li>Gravitate toward \"stub\" and \"start\" class articles. These articles often have only 1–2 paragraphs of information and are in need of expansion. Relevant WikiProject pages can provide a list of stubs that need improvement.</li>
<li>Before creating a new article, search related topics on Wikipedia to make sure your topic isn't already covered elsewhere. Often, an article may exist under another name, or the topic may be covered as a subsection of a broader article.</li>
</ul>"
]
}
{
title: 'Not such a good choice'
accordian: true
content: [
'<p>Articles that are "not such a good choice" for newcomers usually involve a lack of appropriate research material, highly controversial topics that may already be well developed, broad subjects, or topics for which it is difficult to demonstrate notability.</p>'
"<ul>
<li>You probably shouldn't try to completely overhaul articles on very broad topics (e.g., Law).</li>
<li>You should probably avoid trying to improve articles on topics that are highly controversial (for example, Global Warming, Abortion, or Scientology). You may be more successful starting a sub-article on the topic instead.</li>
<li>Don't work on an article that is already of high quality on Wikipedia, unless you discuss a specific plan for improving it with other editors beforehand.</li>
<li>Avoid working on something with scarce literature. Wikipedia articles cite secondary literature sources, so it's important to have enough sources for verification and to provide a neutral point of view.</li>
<li>Don't start articles with titles that imply an argument or essay-like approach (e.g., The Effects That The Recent Sub-Prime Mortgage Crisis has had on the US and Global Economics). These type of titles, and most likely the content too, may not be appropriate for an encyclopedia.</li>
</ul>"
]
}
{
title: ''
content: [
'<p>As the instructor, you should apply your own expertise to examining Wikipedia’s coverage of your field. You understand the broader intellectual context where individual topics fit in, you can recognize where Wikipedia falls short, you know—or know how to find—the relevant literature, and you know what topics your students should be able to handle. Your guidance on article choice and sourcing is critical for both your students’ success and the improvement of Wikipedia.</p>'
'<p>There are two recommended options for selecting articles:</p>'
]
}
{
title: 'Instructor prepares a list'
content: [
'<p>You (the instructor) prepare a list of appropriate \'non-existent\', \'stub\' or \'start\' articles ahead of time for the students to choose from. If possible, you may want to work with an experienced Wikipedian to create the list. Each student chooses an article from the list to work on. Although this requires more preparation, it may help students to start researching and writing their articles sooner.</p>'
]
}
{
title: 'Students find articles'
content: [
'<p>Each student explores Wikipedia and lists 3–5 topics on their Wikipedia user page that they are interested in for their main project. You (the instructor) should approve article choices before students proceed to writing. Having students find their own articles provides them with a sense of motivation and ownership over the assignment and exercises their critical thinking skills as they identify content gaps, but it may also lead to choices that are further afield from course material.</p>'
]
}
]
}
# Research and planning (researchwrite)
{
id: 'research_planning'
title: 'Research and planning'
showInOverview: true
formTitle: 'How should students plan their articles?'
infoTitle: 'About research and planning'
sections: [
{
title: ''
content: [
"<p>Students often wait until the last minute to do their research, or choose sources unsuitable for Wikipedia. That's why we recommend asking students to put together a bibliography of materials they want to use in editing the article, which can then be assessed by you and other Wikipedians.</p>"
"<p>Then, students should propose outlines for their articles. This can be a traditional outline, in which students identify which sections their articles will have and which aspects of the topic will be covered in each section. Alternatively, students can develop each outline in the form of a Wikipedia lead section — the untitled section at the beginning of an article that defines the topic and provide a concise summary of its content. Would you like your students to create traditional outlines, or compose outlines in the form of a Wikipedia-style lead section?</p>"
]
}
]
inputs: []
}
{
id: "tricky_topics"
title: 'Tricky topic areas'
showInOverview: true
formTitle: 'Will your students work in these areas?'
infoTitle: 'Medicine and other tricky topics'
instructions: 'Writing about some topics on Wikipedia can be especially tricky — in particular, topics related to medicine, human health, and psychology. Is there any chance some of your students will work in these topic areas?'
sections: [
{
title: ''
content: [
"<p>If you expect any of your students to work on medicine-related articles — including psychology — you\'ll need to familiarize yourself, and those students, with the special sourcing rules for these subject areas. These rules also apply if your students will be adding information on, say, the sociological implications of disease or other ways of looking at medical articles.Even if your course is not directly related to medicine, these rules may be important if your students are choosing their own topics.</p>"
]
}
{
title: 'Special considerations for medical and psychology topics'
accordian: true
content: [
"<p>Though it is not a medical resource, many people nonetheless turn to Wikipedia for medical information. Poor medical information on Wikipedia can have terrible consequences. For this reason, the standards for sourcing on medical topics differ from other topic areas. In particular, the use of primary sources is strongly discouraged.</p>"
"<p>By Wikipedia\'s conventions for medical content, inappropriate primary sources include original medical research such as clinical studies, case reports, or animal studies, even if published in respected journals. In general, medical and health-related content should be based on review articles from reputable journals and other professional medical literature. Popular press is not considered a reliable source for medical topics.</p>"
"<p>Topics that involve human psychology — in particular, clinical psychology or abnormal psychology — often overlap with medical topics on Wikipedia. In those cases, the same rules about acceptable sources apply.</p>"
]
}
]
inputs: []
}
# Drafts and mainspace (researchwrite)
{
id: "drafts_mainspace"
showInOverview: true
title: 'Drafts and mainspace'
formTitle: 'Choose one:'
infoTitle: 'About drafts and mainspace'
instructions: 'Once students have gotten a grip on their topics and the sources they will use to write about them, it’s time to start writing on Wikipedia. You can ask them to jump right in and edit live, or start them off in their own sandbox pages. There are pros and cons of each approach.'
sections: [
{
title: 'Pros and cons of sandboxes'
content: [
"<p>Sandboxes — pages associated with an individual editor that are not considered part of Wikipedia proper — make students feel safe. They can edit without the pressure of the whole world reading their drafts or other Wikipedians altering their writing. However, sandbox editing limits many of the unique aspects of Wikipedia as a teaching tool, such as collaborative writing and incremental drafting. Spending more than a week or two in sandboxes is strongly discouraged.</p>"
]
}
{
title: 'Pros and cons of editing live'
content: [
"<p>Editing live is exciting for the students because they can see their changes to the articles immediately and experience the collaborative editing process throughout the assignment. However, because new editors often unintentionally break Wikipedia rules, sometimes students’ additions are questioned or removed.</p>"
]
}
{
title: ''
content: '"<p>Will you have your students draft their early work in sandboxes, or work live from the beginning?</p>"'
}
]
inputs: []
}
# Peer feedback (researchwrite)
{
id: "peer_feedback"
title: 'Peer feedback'
showInOverview: true
infoTitle: 'About peer feedback'
formTitle: "How many peer reviews should each student conduct?"
instructions: "Collaboration is a critical element of contributing to Wikipedia."
sections: [
{
title: ''
content: [
"<p>For some students, this will happen spontaneously; their choice of topics will attract interested Wikipedians who will pitch in with ideas, copyedits, or even substantial contributions to the students’ articles. In many cases, however, there will be little spontaneous editing of students’ articles before the end of the term. Fortunately, you have a classroom full of peer reviewers. You can make the most of this by assigning students to review each others’ articles soon after full-length drafts are posted. This gives students plenty of time to act on the advice of their peers.</p>"
"<p>Peer reviews are another chance for students to practice critical thinking. Useful reviews focus on specific issues that can be improved. Since students are usually hesitant to criticize their classmates—and other Wikipedia editors may get annoyed with a stream of praise from students that glosses over an article's shortcomings—it's important to gives examples of the kinds of constructively critical feedback that are the most helpful.</p>"
"<p>How many peer reviews will you ask each student to contribute during the course?</p>"
]
}
]
inputs: []
}
# Supplementary assignments (researchwrite)
{
id: "supplementary_assignments"
title: 'Supplementary assignments'
showInOverview: true
formTitle: 'Choose supplementary assignments (optional):'
infoTitle: 'About supplementary assignments'
instructions: "By the time students have made improvements based on classmates' comments—and ideally suggestions from you as well—they should have produced nearly complete articles. Now is the chance to encourage them to wade a little deeper into Wikipedia and its norms and criteria to create great content."
sections: [
{
title: ''
content: [
"<p>You’ll probably have discussed many of the core principles of Wikipedia—and related issues you want to focus on—but now that they’ve experienced first-hand how Wikipedia works, this is a good time to return to topics like neutrality, media fluency, and the impacts and limits of Wikipedia. Consider bringing in a guest speaker, having a panel discussion, or simply having an open discussion in class about what the students have done so far and why (or whether) it matters.</p>"
"<p>In addition to the Wikipedia article writing itself, you may want to use a supplementary assignment. These assignments can reinforce and deepen your course's learning outcomes, and also help you to understand and evaluate the students' work and learning outcomes. On the left are some of the effective supplementary assignments that instructors often use. Scroll over each for more information, and select any that you wish to use for your course.</p>"
]
}
]
inputs: []
}
# DYK process (researchwrite)
{
id: "dyk"
title: 'DYK process'
showInOverview: false
infoTitle: 'About the <em>Did You Know</em> process'
formTitle: "Would you like to include DYK as an ungraded option?"
sections: [
{
title: ''
content: [
"<p>Did You Know (DYK) is a section on Wikipedia’s main page highlighting new content that has been added to Wikipedia in the last seven days. DYK can be a great opportunity to get students excited about their work. A typical DYK article will be viewed hundreds or thousands of times during its 6 hours in the spotlight.</p>"
"<p>The general criteria for DYK eligibility are that an article is larger than 1,500 characters of original, well-sourced content (about four paragraphs) and that it has been created or expanded (by a factor of 5x or more) within the last seven days. Students who meet this criteria may want to nominate their contributions for DYK.</p>"
"<p>The short window of eligibility, and the strict rules of the nomination process, can make it challenging to incorporate DYK into a classroom project. The DYK process should not be a required part of your assignment, as the DYK nomination process can be difficult for newcomers to navigate. However, it makes a great stretch goal when used selectively.</p>"
"<p>Would you like to include DYK as an ungraded option? If so, the Wiki Ed team can help you and your students during the term to identify work that may be a good candidate for DYK and answer questions you may have about the nomination process.</p>"
]
}
]
inputs: []
}
# Good article process (researchwrite)
{
id: "ga"
title: 'Good Article process'
showInOverview: false
infoTitle: 'About the <em>Good Article</em> process'
formTitle: "Would you like to include this as an ungraded option?"
sections: [
{
title: ''
content: [
"<p>Well-developed articles that have passed a Good Article (GA) review are a substantial achievement in their own right, and can also qualify for DYK. This peer review process involves checking a polished article against Wikipedia's GA criteria: articles must be well-written, verifiable and well-sourced with no original research, broad in coverage, neutral, stable, and appropriately illustrated (when possible). Practically speaking, a potential Good Article should look and sound like other well-developed Wikipedia articles, and it should provide a solid, well-balanced treatment of its subject.</p>"
"<p>The Good Article nominations process generally takes some time — between several days and several weeks, depending on the interest of reviewers and the size of the review backlog in the subject area — and should only be undertaken for articles that are already very well-developed. Typically, reviewers will identify further specific areas for improvement, and the article will be promoted to Good Article status if all the reviewers' concerns are addressed. Because of the uncertain timeline and the frequent need to make substantial changes to articles, Good Article nominations usually only make sense for articles that reach a mature state several weeks before the end of term, and those written by student editors who are already experienced, strong writers and who are willing to come back to address reviewer feedback (even after the term ends)</em>.</p>"
"<p>Would you like to include this as an ungraded option? If so, the Wiki Ed team can provide advice and support to high-achieving students who are interested in the Good Article process.</p>"
]
}
]
inputs: []
}
# {
# id: "overview"
# title: 'Assignment overview'
# showInOverview: false
# infoTitle: "About the course"
# formTitle: ""
# sections: [
# {
# content: [
# "<p>Now it's time to write a short description of your course and how this Wikipedia assignment fits into it. This will allow other Wikipedia editors to understand what students will be doing. Be sure to mention:"
# "<ul>
# <li>topics you're covering in the class</li>
# <li>what students will be asked to do on Wikipedia</li>
# <li>what types of articles your class will be working on</li>
# </ul>"
# ]
# }
# {
# content: [
# "<p class='description-container' style='margin-bottom:0;'></p>"
# "<div class='form-container'>
# <form id='courseLength' oninput='out.value = parseInt(courseLength.value); out2.value = parseInt(courseLength.value);' onsubmit='return false'>
# <div class='overview-input-container'>
# <label for='termStartDate'>Term begins</label>
# <input id='termStartDate' name='termStartDate' type='date'>
# </div>
# <div class='overview-input-container' style='display: none;'>
# <label for='termEndDate'>Term ends</label>
# <input id='termEndDate' name='termEndDate' type='date'>
# </div>
# <!-- %div.overview-input-container -->
# <!-- %label{:for => 'endDate'} End Week of -->
# <!-- %input{:type => 'date', :id => 'endDate', :name => 'endDate'} -->
# <div class='overview-input-container'>
# <label for='courseStartDate'>Course starts on</label>
# <input id='courseStartDate' name='courseStartDate' type='date'>
# </div>
# <div class='overview-input-container' style='display: none;'>
# <label for='startWeekOfDate'>Start week of</label>
# <input id='startWeekOfDate' name='startWeekOfDate' type='date'>
# </div>
# <div class='overview-input-container' style='display: none;'>
# <label for='endWeekOfDate'>End week of</label>
# <input id='endWeekOfDate' name='endWeekOfDate' type='date'>
# </div>
# <div class='overview-input-container'>
# <label for='courseLength'>Course Length</label>
# <input defaultValue='16' id='cLength' max='16' min='6' name='courseLength' step='1' type='range' value='16'>
# <output name='out2'>16</output>
# <span>weeks</span>
# </div>
# <div class='overview-select-container'>
# <div class='overview-select-input-container'>
# <input class='dowCheckbox' id='monday' name='Monday' type='checkbox' value='0'>
# <label for='monday'>Mondays</label>
# </div>
# <div class='overview-select-input-container'>
# <input class='dowCheckbox' id='tuesday' name='Tuesday' type='checkbox' value='1'>
# <label for='tuesday'>Tuesdays</label>
# </div>
# <div class='overview-select-input-container'>
# <input class='dowCheckbox' id='wednesday' name='Wednesday' type='checkbox' value='2'>
# <label for='wednesday'>Wednesdays</label>
# </div>
# <div class='overview-select-input-container'>
# <input class='dowCheckbox' id='thursday' name='Thursday' type='checkbox' value='3'>
# <label for='thursday'>Thursdays</label>
# </div>
# <div class='overview-select-input-container'>
# <input class='dowCheckbox' id='friday' name='Friday' type='checkbox' value='4'>
# <label for='friday'>Fridays</label>
# </div>
# <div class='overview-select-input-container'>
# <input class='dowCheckbox' id='saturday' name='Saturday' type='checkbox' value='5'>
# <label for='saturday'>Saturdays</label>
# </div>
# <div class='overview-select-input-container'>
# <input class='dowCheckbox' id='sunday' name='Sunday' type='checkbox' value='6'>
# <label for='sunday'>Sundays</label>
# </div>
# </div>
# <div class='overview-readout-header'>
# <div class='readout'>
# <output for='courseLength' id='courseLengthReadout' name='out'>16</output>
# <span>weeks</span>
# </div>
# </div>
# </form>
# </div>
# <div>
# <div class='preview-container'></div>
# </div>"
# ]
# }
# {
# content: [
# "<div class='step-form-dates'></div>"
# ]
# }
# {
# title: ''
# content: [
# "<p><a id='publish' href='#' class='button' style='display:inline-block;text-align:center;'>Publish</a></p>"
# ]
# }
# ]
# inputs: []
# }
]
###################################
multimedia: [
# There are no extra steps associated with the multimedia path, so it skips straight to the outro.
]
###################################
copyedit: [
# There are no extra steps associated with the copyedit path, so it skips straight to the outro.
]
###################################
translation: [
# Translation step 1: Training / Translation essentials
{
id: "translation_essentials"
title: "Translation essentials"
showInOverview: true
formTitle: "Choose one"
infoTitle: "Preparing for a translation assignment"
sections: [
{
content: [
"<p>To get started, you'll want to introduce your students to the basic rules of writing Wikipedia articles and working with the Wikipedia community.</p>"
"<p>The online Student Training for Translation Assignments is a very brief introduction to the basics of editing Wikipedia. Should completion of the training be part of your students’ grade? (Make your choice at the top left).</p>"
]
}
]
}
# Translation step 2: Choosing articles
{
id: "translation_choosing_articles"
title: "Choosing articles"
showInOverview: true
formTitle: "How will your class select articles?"
infoTitle: "About choosing articles "
inputs: []
sections: [
{
title: ''
content: [
"<p>Students should find articles that have gone through Wikipedia's peer review process and have been deemed Good or Featured Articles in the language they are studying. For this assignment, students will copy these articles from the target language into a sandbox, and begin translating it into their L1.</p>"
"<p>Generally, articles that are ripe for translation are those that:</p>"
"<ul>
<li>are relevant to the culture, history, or other aspect of their target language (L2).</li>
<li>are quality articles in their target language’s Wikipedia, but not on their L1 Wikipedia.</li>
</ul>"
"<p>There are two recommended options for selecting articles:</p>"
]
}
{
title: 'Instructor prepares a list'
content: [
'<p>You (the instructor) prepare a list of appropriate articles from the target-language Wikipedia ahead of time for the students to choose from. If possible, you may want to work with an experienced Wikipedian to create the list. Each student chooses an article from your list to work on. Although this requires more preparation, it may help students to start translating their articles sooner, which is useful for shorter Wikipedia assignments.</p>'
]
}
{
title: 'Students find articles'
content: [
'<p>Each student explores the target-language Wikipedia and lists two topics on their Wikipedia user page that they are interested in for their main project. You (the instructor) should approve article choices before students proceed to translating. Having students find their own articles provides them with a sense of motivation and ownership over the assignment and exercises their critical thinking skills as they identify content gaps, but it may also lead to choices that are further afield from course material. This options is useful for classes that spend more time on the Wikipedia assignment.</p>'
]
}
]
}
# Translation step 3: Media literacy
{
id: "translation_media_literacy"
title: "Media literacy"
showInOverview: true
formTitle: "Add a media literacy component?"
infoTitle: "Optional: add a more challenging media literacy component"
instructions: "For advanced students, you can add a challenging media literacy component to your course. This requires students to fact-check information on the target-language Wikipedia using the target language. They also find sources in their base language to support those claims on their base-language Wikipedia."
}
]
}
# Outro steps common to all branches
outro_steps: [
{
id: "grading"
title: 'Grading'
showInOverview: false
formTitle: "How will students' grades for assignments be determined?"
infoTitle: "About grading"
instructions: 'Grading Wikipedia assignments can be a challenge. Here are some tips for grading your Wikipedia assignments:'
sections: [
{
title: 'Know all of your students\' Wikipedia usernames.'
accordian: true
content: [
"<p>Without knowing the students' usernames, you won't be able to grade them.</p>"
"<p>Make sure all students enroll on the course page. Once all students have signed the list, you can click on \"user contributions\" (in the menu bar on the left hand side of your browser screen) to review that student's activities on Wikipedia. If you have made student training compulsory, you can check the <a href='https://en.wikipedia.org/wiki/Wikipedia:Training/For_students/Training_feedback' target='_blank'>feedback page</a> to see which students have completed it.</p>"
]
}
{
title: 'Be specific about your expectations.'
accordian: true
content: [
"<p>Being specific about what you expect your students to do is crucial for grading. For example, students could be asked to add a minimum of three sections to an existing article, or a minimum of eight references to an existing article that lacks the appropriate sourcing.</p>"
]
}
{
title: 'Grade based on what students contribute to Wikipedia, not what remains on Wikipedia at the course\'s end.'
accordian: true
content: [
"<p>You can see a student's contributions in the article history, even if some writing was removed by the community (or the student). A student’s content could be edited for many reasons, and can even be evidence of a student reflecting critically on their own contributions. Furthermore, if students feel they must defend control of an article for the sake of their grade, this can lead to conflict with other editors.</p>"
"<p>Wikipedia is a collaborative writing environment driven by verifiability, noteworthiness and neutral point of view – all of which have created challenges for students familiar with a persuasive writing format in classrooms. Encourage students to reflect on edits to improve their understanding of the process and the community.</p>"
]
}
]
inputs: []
}
{
id: "overview"
title: 'Assignment overview'
showInOverview: false
infoTitle: "About the course"
formTitle: ""
sections: [
{
content: [
"<p>Now it's time to write a short description of your course and how this Wikipedia assignment fits into it. This will allow other Wikipedia editors to understand what students will be doing. Be sure to mention:"
"<ul>
<li>topics you're covering in the class</li>
<li>what students will be asked to do on Wikipedia</li>
<li>what types of articles your class will be working on</li>
</ul>"
]
}
{
content: [
"<p class='description-container' style='margin-bottom:0;'></p>"
"<div>
<div class='preview-container'></div>
</div>"
]
}
# {
# content: [
# "<div class='step-form-dates'></div>"
# ]
# }
{
title: ''
content: [
"<p><a id='publish' href='#' class='button' style='display:inline-block;text-align:center;'>Publish</a></p>"
]
}
]
inputs: []
}
]
}
module.exports = WizardConfig
| 68777 | ## THIS FILE IS THE DATA CONTENT AND STEP ORDER CONFIGRATION FOR THE WIZARD AS WELL AS ASSIGNMENT PATHWAYS ##
## UNCOMMENTING THE DATA INSIDE THE PATHWAYS SECTION WILL ADD MORE STEPS INTO THOSE ALTERNATIVE PATHWAYS ##
WizardConfig = {
## The intro_steps are the steps before the wizard branches into different assignment types.
intro_steps: [
# Intro
{
id: "intro"
title: '<center>Welcome to the<br />Assignment Design Wizard!</center>'
login_instructions: 'Click Login with Wikipedia to get started'
instructions: ''
inputs: []
sections: [
{
content: [
"<p class='large'>Thank you for visiting the Assignment Design Wizard. Wiki Ed is excited to announce that we will be launching a new set of tools for creating and tracking a Wikipedia assignment for the Fall 2015 term. You may look through the wizard now, but we are advising instructors teaching with Wikipedia in the fall to wait for our new tools to be ready to plan their assignments. Those teaching during the summer, should go through the current wizard to plan their Wikipedia projects. IF you have any questions, please feel free to contact <NAME> (<EMAIL>). We look forward to working with you in the Fall!</p>"
"<p class='large'>This tool will help you to easily create a customized Wikipedia classroom assignment and customized syllabus for your course.</p>"
"<p class='large'>When you’re finished, you'll have a ready-to-use lesson plan, with weekly assignments, published directly onto a sandbox page on Wikipedia where you can customize it even further.</p>"
"<p class='large'>Let’s start by filling in some basics about you and your course:</p>"
]
}
]
}
# Assignment selection
{
id: "assignment_selection"
title: 'Assignment type selection'
infoTitle: 'About assignment selections'
formTitle: 'Available assignments:'
instructions: "You can teach with Wikipedia in several different ways, and it's important to design an assignment that is suitable for Wikipedia <em>and</em> achieves your student learning objectives. Your first step is to choose which assignment(s) you'll be asking your students to complete as part of the course."
inputs: []
sections: [
{
title: ''
content: [
"<p>We've created some guidelines to help you, but you'll need to make some key decisions, such as: which learning objectives are you targeting with this assignment? What skills do your students already have? How much time can you devote to the assignment?</p>"
"<p>Most instructors ask their students to write or expand an article. Students start by learning the basics of Wikipedia, and then focus on the content. They plan, research, and write a previously missing Wikipedia article, or contribute to an incomplete entry on a course-related topic. This assignment typically replaces a term paper or research project, or it forms the literature review section of a larger paper. The student learning outcome is high with this assignment, but it does take a significant amount of time. Your students need to learn both the wiki markup language and key policies and expectations of the Wikipedia-editing community.</p>"
"<p>If writing an article isn't right for your class, other assignment options offer students valuable learning opportunities and help to improve Wikipedia. Select an assignment type on the left to learn more.</p>"
]
}
]
}
]
# Here begin the pathways for different assignment types: researchwrite,
pathways: {
###################################
researchwrite: [
# Wikipedia essentials (researchwrite)
{
id: "learning_essentials"
title: 'Wikipedia essentials'
showInOverview: true
formTitle: 'Choose one:'
infoTitle: 'About Wikipedia essentials'
instructions: "To get started, you'll want to introduce your students to the basic rules of writing Wikipedia articles and working with the Wikipedia community."
inputs: []
sections: [
{
title: ''
content: [
'<p>As their first Wikipedia assignment milestone, you can ask the students to create user accounts and then complete the <em>online training for students</em>. This training introduces the Wikipedia community and how it works, demonstrates the basics of editing and walks students through their first edits, gives advice for selecting articles and drafting revisions, and explains further sources of support as they continue along. It takes about an hour and ends with a certification step, which you can use to verify that students completed the training.</p>'
'<p>Students who complete this training are better prepared to focus on learning outcomes, and spend less time distracted by cleaning up after errors.</p>'
'<p>Will completion of the student training be part of your students\' grades? (Make your choice at the top left.)</p>'
]
}
{
title: 'Assignment milestones'
accordian: true
content: [
"<ul>
<li>Create a user account and enroll on the course page. </li>
<li>Complete the <em>online training for students</em>. During this training, you will make edits in a sandbox and learn the basic rules of Wikipedia.</li>
<li>To practice editing and communicating on Wikipedia, introduce yourself to any Wikipedians helping your class (such as a Wikipedia Ambassador), and leave a message for a classmate on their user talk page.</li>
</ul>"
]
}
]
}
# Getting started with editing (researchwrite)
{
id: "getting_started"
title: 'Getting started with editing'
showInOverview: true
infoTitle: 'About early editing tasks'
instructions: "It is important for students to start editing Wikipedia early on. That way, they become familiar with Wikipedia's markup (\"wikisyntax\", \"wikimarkup\", or \"wikicode\") and the mechanics of editing and communicating on the site. We recommend assigning a few basic Wikipedia tasks early on."
formTitle: 'Which basic assignments would you like to include?'
inputs: []
sections: [
{
title: ''
content: [
'<p>Which introductory assignments would you like to use to acclimate your students to Wikipedia? You can select none, one, or more. Whichever you select will be added to the assignment timeline.</p>'
'<ul>
<li><strong>Critique an article.</strong> Critically evaluate an existing Wikipedia article related to the class, and leave suggestions for improving it on the article’s talk page. </li>
<li><strong>Add to an article.</strong> Using course readings or other relevant secondary sources, add 1–2 sentences of new information to a Wikipedia article related to the class. Be sure to integrate it well into the existing article, and include a citation to the source. </li>
<li><strong>Copyedit an article.</strong> Browse Wikipedia until you find an article that you would like to improve, and make some edits to improve the language or formatting. </li>
<li><strong>Illustrate an article.</strong> Find an opportunity to improve an article by uploading and adding a photo you took.</li>
</ul>'
]
}
{
content: [
'<p>For most courses, the <em>Critique an article</em> and <em>Add to an article</em> exercises provide a nice foundation for the main writing project. These have been selected by default.</p>'
]
}
]
}
# Choosing articles (researchwrite)
{
id: 'choosing_articles'
title: 'Choosing articles'
showInOverview: true
formTitle: 'How will your class select articles?'
infoTitle: 'About choosing articles'
inputs: []
sections: [
{
title: ''
content: [
'<p>Choosing the right (or wrong) articles to work on can make (or break) a Wikipedia writing assignment.</p>'
'<p>Some articles may initially look easy to improve, but quality references to expand them may be difficult to find. Finding topics with the right balance between poor Wikipedia coverage and available literature from which to expand that coverage can be tricky. Here are some guidelines to keep in mind when selecting articles for improvement.</p>'
]
}
{
title: 'Good choice'
accordian: true
content: [
"<ul>
<li>Choose a well-established topic for which a lot of literature is available in its field, but which isn't covered extensively on Wikipedia.</li>
<li>Gravitate toward \"stub\" and \"start\" class articles. These articles often have only 1–2 paragraphs of information and are in need of expansion. Relevant WikiProject pages can provide a list of stubs that need improvement.</li>
<li>Before creating a new article, search related topics on Wikipedia to make sure your topic isn't already covered elsewhere. Often, an article may exist under another name, or the topic may be covered as a subsection of a broader article.</li>
</ul>"
]
}
{
title: 'Not such a good choice'
accordian: true
content: [
'<p>Articles that are "not such a good choice" for newcomers usually involve a lack of appropriate research material, highly controversial topics that may already be well developed, broad subjects, or topics for which it is difficult to demonstrate notability.</p>'
"<ul>
<li>You probably shouldn't try to completely overhaul articles on very broad topics (e.g., Law).</li>
<li>You should probably avoid trying to improve articles on topics that are highly controversial (for example, Global Warming, Abortion, or Scientology). You may be more successful starting a sub-article on the topic instead.</li>
<li>Don't work on an article that is already of high quality on Wikipedia, unless you discuss a specific plan for improving it with other editors beforehand.</li>
<li>Avoid working on something with scarce literature. Wikipedia articles cite secondary literature sources, so it's important to have enough sources for verification and to provide a neutral point of view.</li>
<li>Don't start articles with titles that imply an argument or essay-like approach (e.g., The Effects That The Recent Sub-Prime Mortgage Crisis has had on the US and Global Economics). These type of titles, and most likely the content too, may not be appropriate for an encyclopedia.</li>
</ul>"
]
}
{
title: ''
content: [
'<p>As the instructor, you should apply your own expertise to examining Wikipedia’s coverage of your field. You understand the broader intellectual context where individual topics fit in, you can recognize where Wikipedia falls short, you know—or know how to find—the relevant literature, and you know what topics your students should be able to handle. Your guidance on article choice and sourcing is critical for both your students’ success and the improvement of Wikipedia.</p>'
'<p>There are two recommended options for selecting articles:</p>'
]
}
{
title: 'Instructor prepares a list'
content: [
'<p>You (the instructor) prepare a list of appropriate \'non-existent\', \'stub\' or \'start\' articles ahead of time for the students to choose from. If possible, you may want to work with an experienced Wikipedian to create the list. Each student chooses an article from the list to work on. Although this requires more preparation, it may help students to start researching and writing their articles sooner.</p>'
]
}
{
title: 'Students find articles'
content: [
'<p>Each student explores Wikipedia and lists 3–5 topics on their Wikipedia user page that they are interested in for their main project. You (the instructor) should approve article choices before students proceed to writing. Having students find their own articles provides them with a sense of motivation and ownership over the assignment and exercises their critical thinking skills as they identify content gaps, but it may also lead to choices that are further afield from course material.</p>'
]
}
]
}
# Research and planning (researchwrite)
{
id: 'research_planning'
title: 'Research and planning'
showInOverview: true
formTitle: 'How should students plan their articles?'
infoTitle: 'About research and planning'
sections: [
{
title: ''
content: [
"<p>Students often wait until the last minute to do their research, or choose sources unsuitable for Wikipedia. That's why we recommend asking students to put together a bibliography of materials they want to use in editing the article, which can then be assessed by you and other Wikipedians.</p>"
"<p>Then, students should propose outlines for their articles. This can be a traditional outline, in which students identify which sections their articles will have and which aspects of the topic will be covered in each section. Alternatively, students can develop each outline in the form of a Wikipedia lead section — the untitled section at the beginning of an article that defines the topic and provide a concise summary of its content. Would you like your students to create traditional outlines, or compose outlines in the form of a Wikipedia-style lead section?</p>"
]
}
]
inputs: []
}
{
id: "tricky_topics"
title: 'Tricky topic areas'
showInOverview: true
formTitle: 'Will your students work in these areas?'
infoTitle: 'Medicine and other tricky topics'
instructions: 'Writing about some topics on Wikipedia can be especially tricky — in particular, topics related to medicine, human health, and psychology. Is there any chance some of your students will work in these topic areas?'
sections: [
{
title: ''
content: [
"<p>If you expect any of your students to work on medicine-related articles — including psychology — you\'ll need to familiarize yourself, and those students, with the special sourcing rules for these subject areas. These rules also apply if your students will be adding information on, say, the sociological implications of disease or other ways of looking at medical articles.Even if your course is not directly related to medicine, these rules may be important if your students are choosing their own topics.</p>"
]
}
{
title: 'Special considerations for medical and psychology topics'
accordian: true
content: [
"<p>Though it is not a medical resource, many people nonetheless turn to Wikipedia for medical information. Poor medical information on Wikipedia can have terrible consequences. For this reason, the standards for sourcing on medical topics differ from other topic areas. In particular, the use of primary sources is strongly discouraged.</p>"
"<p>By Wikipedia\'s conventions for medical content, inappropriate primary sources include original medical research such as clinical studies, case reports, or animal studies, even if published in respected journals. In general, medical and health-related content should be based on review articles from reputable journals and other professional medical literature. Popular press is not considered a reliable source for medical topics.</p>"
"<p>Topics that involve human psychology — in particular, clinical psychology or abnormal psychology — often overlap with medical topics on Wikipedia. In those cases, the same rules about acceptable sources apply.</p>"
]
}
]
inputs: []
}
# Drafts and mainspace (researchwrite)
{
id: "drafts_mainspace"
showInOverview: true
title: 'Drafts and mainspace'
formTitle: 'Choose one:'
infoTitle: 'About drafts and mainspace'
instructions: 'Once students have gotten a grip on their topics and the sources they will use to write about them, it’s time to start writing on Wikipedia. You can ask them to jump right in and edit live, or start them off in their own sandbox pages. There are pros and cons of each approach.'
sections: [
{
title: 'Pros and cons of sandboxes'
content: [
"<p>Sandboxes — pages associated with an individual editor that are not considered part of Wikipedia proper — make students feel safe. They can edit without the pressure of the whole world reading their drafts or other Wikipedians altering their writing. However, sandbox editing limits many of the unique aspects of Wikipedia as a teaching tool, such as collaborative writing and incremental drafting. Spending more than a week or two in sandboxes is strongly discouraged.</p>"
]
}
{
title: 'Pros and cons of editing live'
content: [
"<p>Editing live is exciting for the students because they can see their changes to the articles immediately and experience the collaborative editing process throughout the assignment. However, because new editors often unintentionally break Wikipedia rules, sometimes students’ additions are questioned or removed.</p>"
]
}
{
title: ''
content: '"<p>Will you have your students draft their early work in sandboxes, or work live from the beginning?</p>"'
}
]
inputs: []
}
# Peer feedback (researchwrite)
{
id: "peer_feedback"
title: 'Peer feedback'
showInOverview: true
infoTitle: 'About peer feedback'
formTitle: "How many peer reviews should each student conduct?"
instructions: "Collaboration is a critical element of contributing to Wikipedia."
sections: [
{
title: ''
content: [
"<p>For some students, this will happen spontaneously; their choice of topics will attract interested Wikipedians who will pitch in with ideas, copyedits, or even substantial contributions to the students’ articles. In many cases, however, there will be little spontaneous editing of students’ articles before the end of the term. Fortunately, you have a classroom full of peer reviewers. You can make the most of this by assigning students to review each others’ articles soon after full-length drafts are posted. This gives students plenty of time to act on the advice of their peers.</p>"
"<p>Peer reviews are another chance for students to practice critical thinking. Useful reviews focus on specific issues that can be improved. Since students are usually hesitant to criticize their classmates—and other Wikipedia editors may get annoyed with a stream of praise from students that glosses over an article's shortcomings—it's important to gives examples of the kinds of constructively critical feedback that are the most helpful.</p>"
"<p>How many peer reviews will you ask each student to contribute during the course?</p>"
]
}
]
inputs: []
}
# Supplementary assignments (researchwrite)
{
id: "supplementary_assignments"
title: 'Supplementary assignments'
showInOverview: true
formTitle: 'Choose supplementary assignments (optional):'
infoTitle: 'About supplementary assignments'
instructions: "By the time students have made improvements based on classmates' comments—and ideally suggestions from you as well—they should have produced nearly complete articles. Now is the chance to encourage them to wade a little deeper into Wikipedia and its norms and criteria to create great content."
sections: [
{
title: ''
content: [
"<p>You’ll probably have discussed many of the core principles of Wikipedia—and related issues you want to focus on—but now that they’ve experienced first-hand how Wikipedia works, this is a good time to return to topics like neutrality, media fluency, and the impacts and limits of Wikipedia. Consider bringing in a guest speaker, having a panel discussion, or simply having an open discussion in class about what the students have done so far and why (or whether) it matters.</p>"
"<p>In addition to the Wikipedia article writing itself, you may want to use a supplementary assignment. These assignments can reinforce and deepen your course's learning outcomes, and also help you to understand and evaluate the students' work and learning outcomes. On the left are some of the effective supplementary assignments that instructors often use. Scroll over each for more information, and select any that you wish to use for your course.</p>"
]
}
]
inputs: []
}
# DYK process (researchwrite)
{
id: "dyk"
title: 'DYK process'
showInOverview: false
infoTitle: 'About the <em>Did You Know</em> process'
formTitle: "Would you like to include DYK as an ungraded option?"
sections: [
{
title: ''
content: [
"<p>Did You Know (DYK) is a section on Wikipedia’s main page highlighting new content that has been added to Wikipedia in the last seven days. DYK can be a great opportunity to get students excited about their work. A typical DYK article will be viewed hundreds or thousands of times during its 6 hours in the spotlight.</p>"
"<p>The general criteria for DYK eligibility are that an article is larger than 1,500 characters of original, well-sourced content (about four paragraphs) and that it has been created or expanded (by a factor of 5x or more) within the last seven days. Students who meet this criteria may want to nominate their contributions for DYK.</p>"
"<p>The short window of eligibility, and the strict rules of the nomination process, can make it challenging to incorporate DYK into a classroom project. The DYK process should not be a required part of your assignment, as the DYK nomination process can be difficult for newcomers to navigate. However, it makes a great stretch goal when used selectively.</p>"
"<p>Would you like to include DYK as an ungraded option? If so, the Wiki Ed team can help you and your students during the term to identify work that may be a good candidate for DYK and answer questions you may have about the nomination process.</p>"
]
}
]
inputs: []
}
# Good article process (researchwrite)
{
id: "ga"
title: 'Good Article process'
showInOverview: false
infoTitle: 'About the <em>Good Article</em> process'
formTitle: "Would you like to include this as an ungraded option?"
sections: [
{
title: ''
content: [
"<p>Well-developed articles that have passed a Good Article (GA) review are a substantial achievement in their own right, and can also qualify for DYK. This peer review process involves checking a polished article against Wikipedia's GA criteria: articles must be well-written, verifiable and well-sourced with no original research, broad in coverage, neutral, stable, and appropriately illustrated (when possible). Practically speaking, a potential Good Article should look and sound like other well-developed Wikipedia articles, and it should provide a solid, well-balanced treatment of its subject.</p>"
"<p>The Good Article nominations process generally takes some time — between several days and several weeks, depending on the interest of reviewers and the size of the review backlog in the subject area — and should only be undertaken for articles that are already very well-developed. Typically, reviewers will identify further specific areas for improvement, and the article will be promoted to Good Article status if all the reviewers' concerns are addressed. Because of the uncertain timeline and the frequent need to make substantial changes to articles, Good Article nominations usually only make sense for articles that reach a mature state several weeks before the end of term, and those written by student editors who are already experienced, strong writers and who are willing to come back to address reviewer feedback (even after the term ends)</em>.</p>"
"<p>Would you like to include this as an ungraded option? If so, the Wiki Ed team can provide advice and support to high-achieving students who are interested in the Good Article process.</p>"
]
}
]
inputs: []
}
# {
# id: "overview"
# title: 'Assignment overview'
# showInOverview: false
# infoTitle: "About the course"
# formTitle: ""
# sections: [
# {
# content: [
# "<p>Now it's time to write a short description of your course and how this Wikipedia assignment fits into it. This will allow other Wikipedia editors to understand what students will be doing. Be sure to mention:"
# "<ul>
# <li>topics you're covering in the class</li>
# <li>what students will be asked to do on Wikipedia</li>
# <li>what types of articles your class will be working on</li>
# </ul>"
# ]
# }
# {
# content: [
# "<p class='description-container' style='margin-bottom:0;'></p>"
# "<div class='form-container'>
# <form id='courseLength' oninput='out.value = parseInt(courseLength.value); out2.value = parseInt(courseLength.value);' onsubmit='return false'>
# <div class='overview-input-container'>
# <label for='termStartDate'>Term begins</label>
# <input id='termStartDate' name='termStartDate' type='date'>
# </div>
# <div class='overview-input-container' style='display: none;'>
# <label for='termEndDate'>Term ends</label>
# <input id='termEndDate' name='termEndDate' type='date'>
# </div>
# <!-- %div.overview-input-container -->
# <!-- %label{:for => 'endDate'} End Week of -->
# <!-- %input{:type => 'date', :id => 'endDate', :name => 'endDate'} -->
# <div class='overview-input-container'>
# <label for='courseStartDate'>Course starts on</label>
# <input id='courseStartDate' name='courseStartDate' type='date'>
# </div>
# <div class='overview-input-container' style='display: none;'>
# <label for='startWeekOfDate'>Start week of</label>
# <input id='startWeekOfDate' name='startWeekOfDate' type='date'>
# </div>
# <div class='overview-input-container' style='display: none;'>
# <label for='endWeekOfDate'>End week of</label>
# <input id='endWeekOfDate' name='endWeekOfDate' type='date'>
# </div>
# <div class='overview-input-container'>
# <label for='courseLength'>Course Length</label>
# <input defaultValue='16' id='cLength' max='16' min='6' name='courseLength' step='1' type='range' value='16'>
# <output name='out2'>16</output>
# <span>weeks</span>
# </div>
# <div class='overview-select-container'>
# <div class='overview-select-input-container'>
# <input class='dowCheckbox' id='monday' name='Monday' type='checkbox' value='0'>
# <label for='monday'>Mondays</label>
# </div>
# <div class='overview-select-input-container'>
# <input class='dowCheckbox' id='tuesday' name='Tuesday' type='checkbox' value='1'>
# <label for='tuesday'>Tuesdays</label>
# </div>
# <div class='overview-select-input-container'>
# <input class='dowCheckbox' id='wednesday' name='Wednesday' type='checkbox' value='2'>
# <label for='wednesday'>Wednesdays</label>
# </div>
# <div class='overview-select-input-container'>
# <input class='dowCheckbox' id='thursday' name='Thursday' type='checkbox' value='3'>
# <label for='thursday'>Thursdays</label>
# </div>
# <div class='overview-select-input-container'>
# <input class='dowCheckbox' id='friday' name='Friday' type='checkbox' value='4'>
# <label for='friday'>Fridays</label>
# </div>
# <div class='overview-select-input-container'>
# <input class='dowCheckbox' id='saturday' name='Saturday' type='checkbox' value='5'>
# <label for='saturday'>Saturdays</label>
# </div>
# <div class='overview-select-input-container'>
# <input class='dowCheckbox' id='sunday' name='Sunday' type='checkbox' value='6'>
# <label for='sunday'>Sundays</label>
# </div>
# </div>
# <div class='overview-readout-header'>
# <div class='readout'>
# <output for='courseLength' id='courseLengthReadout' name='out'>16</output>
# <span>weeks</span>
# </div>
# </div>
# </form>
# </div>
# <div>
# <div class='preview-container'></div>
# </div>"
# ]
# }
# {
# content: [
# "<div class='step-form-dates'></div>"
# ]
# }
# {
# title: ''
# content: [
# "<p><a id='publish' href='#' class='button' style='display:inline-block;text-align:center;'>Publish</a></p>"
# ]
# }
# ]
# inputs: []
# }
]
###################################
multimedia: [
# There are no extra steps associated with the multimedia path, so it skips straight to the outro.
]
###################################
copyedit: [
# There are no extra steps associated with the copyedit path, so it skips straight to the outro.
]
###################################
translation: [
# Translation step 1: Training / Translation essentials
{
id: "translation_essentials"
title: "Translation essentials"
showInOverview: true
formTitle: "Choose one"
infoTitle: "Preparing for a translation assignment"
sections: [
{
content: [
"<p>To get started, you'll want to introduce your students to the basic rules of writing Wikipedia articles and working with the Wikipedia community.</p>"
"<p>The online Student Training for Translation Assignments is a very brief introduction to the basics of editing Wikipedia. Should completion of the training be part of your students’ grade? (Make your choice at the top left).</p>"
]
}
]
}
# Translation step 2: Choosing articles
{
id: "translation_choosing_articles"
title: "Choosing articles"
showInOverview: true
formTitle: "How will your class select articles?"
infoTitle: "About choosing articles "
inputs: []
sections: [
{
title: ''
content: [
"<p>Students should find articles that have gone through Wikipedia's peer review process and have been deemed Good or Featured Articles in the language they are studying. For this assignment, students will copy these articles from the target language into a sandbox, and begin translating it into their L1.</p>"
"<p>Generally, articles that are ripe for translation are those that:</p>"
"<ul>
<li>are relevant to the culture, history, or other aspect of their target language (L2).</li>
<li>are quality articles in their target language’s Wikipedia, but not on their L1 Wikipedia.</li>
</ul>"
"<p>There are two recommended options for selecting articles:</p>"
]
}
{
title: 'Instructor prepares a list'
content: [
'<p>You (the instructor) prepare a list of appropriate articles from the target-language Wikipedia ahead of time for the students to choose from. If possible, you may want to work with an experienced Wikipedian to create the list. Each student chooses an article from your list to work on. Although this requires more preparation, it may help students to start translating their articles sooner, which is useful for shorter Wikipedia assignments.</p>'
]
}
{
title: 'Students find articles'
content: [
'<p>Each student explores the target-language Wikipedia and lists two topics on their Wikipedia user page that they are interested in for their main project. You (the instructor) should approve article choices before students proceed to translating. Having students find their own articles provides them with a sense of motivation and ownership over the assignment and exercises their critical thinking skills as they identify content gaps, but it may also lead to choices that are further afield from course material. This options is useful for classes that spend more time on the Wikipedia assignment.</p>'
]
}
]
}
# Translation step 3: Media literacy
{
id: "translation_media_literacy"
title: "Media literacy"
showInOverview: true
formTitle: "Add a media literacy component?"
infoTitle: "Optional: add a more challenging media literacy component"
instructions: "For advanced students, you can add a challenging media literacy component to your course. This requires students to fact-check information on the target-language Wikipedia using the target language. They also find sources in their base language to support those claims on their base-language Wikipedia."
}
]
}
# Outro steps common to all branches
outro_steps: [
{
id: "grading"
title: 'Grading'
showInOverview: false
formTitle: "How will students' grades for assignments be determined?"
infoTitle: "About grading"
instructions: 'Grading Wikipedia assignments can be a challenge. Here are some tips for grading your Wikipedia assignments:'
sections: [
{
title: 'Know all of your students\' Wikipedia usernames.'
accordian: true
content: [
"<p>Without knowing the students' usernames, you won't be able to grade them.</p>"
"<p>Make sure all students enroll on the course page. Once all students have signed the list, you can click on \"user contributions\" (in the menu bar on the left hand side of your browser screen) to review that student's activities on Wikipedia. If you have made student training compulsory, you can check the <a href='https://en.wikipedia.org/wiki/Wikipedia:Training/For_students/Training_feedback' target='_blank'>feedback page</a> to see which students have completed it.</p>"
]
}
{
title: 'Be specific about your expectations.'
accordian: true
content: [
"<p>Being specific about what you expect your students to do is crucial for grading. For example, students could be asked to add a minimum of three sections to an existing article, or a minimum of eight references to an existing article that lacks the appropriate sourcing.</p>"
]
}
{
title: 'Grade based on what students contribute to Wikipedia, not what remains on Wikipedia at the course\'s end.'
accordian: true
content: [
"<p>You can see a student's contributions in the article history, even if some writing was removed by the community (or the student). A student’s content could be edited for many reasons, and can even be evidence of a student reflecting critically on their own contributions. Furthermore, if students feel they must defend control of an article for the sake of their grade, this can lead to conflict with other editors.</p>"
"<p>Wikipedia is a collaborative writing environment driven by verifiability, noteworthiness and neutral point of view – all of which have created challenges for students familiar with a persuasive writing format in classrooms. Encourage students to reflect on edits to improve their understanding of the process and the community.</p>"
]
}
]
inputs: []
}
{
id: "overview"
title: 'Assignment overview'
showInOverview: false
infoTitle: "About the course"
formTitle: ""
sections: [
{
content: [
"<p>Now it's time to write a short description of your course and how this Wikipedia assignment fits into it. This will allow other Wikipedia editors to understand what students will be doing. Be sure to mention:"
"<ul>
<li>topics you're covering in the class</li>
<li>what students will be asked to do on Wikipedia</li>
<li>what types of articles your class will be working on</li>
</ul>"
]
}
{
content: [
"<p class='description-container' style='margin-bottom:0;'></p>"
"<div>
<div class='preview-container'></div>
</div>"
]
}
# {
# content: [
# "<div class='step-form-dates'></div>"
# ]
# }
{
title: ''
content: [
"<p><a id='publish' href='#' class='button' style='display:inline-block;text-align:center;'>Publish</a></p>"
]
}
]
inputs: []
}
]
}
module.exports = WizardConfig
| true | ## THIS FILE IS THE DATA CONTENT AND STEP ORDER CONFIGRATION FOR THE WIZARD AS WELL AS ASSIGNMENT PATHWAYS ##
## UNCOMMENTING THE DATA INSIDE THE PATHWAYS SECTION WILL ADD MORE STEPS INTO THOSE ALTERNATIVE PATHWAYS ##
WizardConfig = {
## The intro_steps are the steps before the wizard branches into different assignment types.
intro_steps: [
# Intro
{
id: "intro"
title: '<center>Welcome to the<br />Assignment Design Wizard!</center>'
login_instructions: 'Click Login with Wikipedia to get started'
instructions: ''
inputs: []
sections: [
{
content: [
"<p class='large'>Thank you for visiting the Assignment Design Wizard. Wiki Ed is excited to announce that we will be launching a new set of tools for creating and tracking a Wikipedia assignment for the Fall 2015 term. You may look through the wizard now, but we are advising instructors teaching with Wikipedia in the fall to wait for our new tools to be ready to plan their assignments. Those teaching during the summer, should go through the current wizard to plan their Wikipedia projects. IF you have any questions, please feel free to contact PI:NAME:<NAME>END_PI (PI:EMAIL:<EMAIL>END_PI). We look forward to working with you in the Fall!</p>"
"<p class='large'>This tool will help you to easily create a customized Wikipedia classroom assignment and customized syllabus for your course.</p>"
"<p class='large'>When you’re finished, you'll have a ready-to-use lesson plan, with weekly assignments, published directly onto a sandbox page on Wikipedia where you can customize it even further.</p>"
"<p class='large'>Let’s start by filling in some basics about you and your course:</p>"
]
}
]
}
# Assignment selection
{
id: "assignment_selection"
title: 'Assignment type selection'
infoTitle: 'About assignment selections'
formTitle: 'Available assignments:'
instructions: "You can teach with Wikipedia in several different ways, and it's important to design an assignment that is suitable for Wikipedia <em>and</em> achieves your student learning objectives. Your first step is to choose which assignment(s) you'll be asking your students to complete as part of the course."
inputs: []
sections: [
{
title: ''
content: [
"<p>We've created some guidelines to help you, but you'll need to make some key decisions, such as: which learning objectives are you targeting with this assignment? What skills do your students already have? How much time can you devote to the assignment?</p>"
"<p>Most instructors ask their students to write or expand an article. Students start by learning the basics of Wikipedia, and then focus on the content. They plan, research, and write a previously missing Wikipedia article, or contribute to an incomplete entry on a course-related topic. This assignment typically replaces a term paper or research project, or it forms the literature review section of a larger paper. The student learning outcome is high with this assignment, but it does take a significant amount of time. Your students need to learn both the wiki markup language and key policies and expectations of the Wikipedia-editing community.</p>"
"<p>If writing an article isn't right for your class, other assignment options offer students valuable learning opportunities and help to improve Wikipedia. Select an assignment type on the left to learn more.</p>"
]
}
]
}
]
# Here begin the pathways for different assignment types: researchwrite,
pathways: {
###################################
researchwrite: [
# Wikipedia essentials (researchwrite)
{
id: "learning_essentials"
title: 'Wikipedia essentials'
showInOverview: true
formTitle: 'Choose one:'
infoTitle: 'About Wikipedia essentials'
instructions: "To get started, you'll want to introduce your students to the basic rules of writing Wikipedia articles and working with the Wikipedia community."
inputs: []
sections: [
{
title: ''
content: [
'<p>As their first Wikipedia assignment milestone, you can ask the students to create user accounts and then complete the <em>online training for students</em>. This training introduces the Wikipedia community and how it works, demonstrates the basics of editing and walks students through their first edits, gives advice for selecting articles and drafting revisions, and explains further sources of support as they continue along. It takes about an hour and ends with a certification step, which you can use to verify that students completed the training.</p>'
'<p>Students who complete this training are better prepared to focus on learning outcomes, and spend less time distracted by cleaning up after errors.</p>'
'<p>Will completion of the student training be part of your students\' grades? (Make your choice at the top left.)</p>'
]
}
{
title: 'Assignment milestones'
accordian: true
content: [
"<ul>
<li>Create a user account and enroll on the course page. </li>
<li>Complete the <em>online training for students</em>. During this training, you will make edits in a sandbox and learn the basic rules of Wikipedia.</li>
<li>To practice editing and communicating on Wikipedia, introduce yourself to any Wikipedians helping your class (such as a Wikipedia Ambassador), and leave a message for a classmate on their user talk page.</li>
</ul>"
]
}
]
}
# Getting started with editing (researchwrite)
{
id: "getting_started"
title: 'Getting started with editing'
showInOverview: true
infoTitle: 'About early editing tasks'
instructions: "It is important for students to start editing Wikipedia early on. That way, they become familiar with Wikipedia's markup (\"wikisyntax\", \"wikimarkup\", or \"wikicode\") and the mechanics of editing and communicating on the site. We recommend assigning a few basic Wikipedia tasks early on."
formTitle: 'Which basic assignments would you like to include?'
inputs: []
sections: [
{
title: ''
content: [
'<p>Which introductory assignments would you like to use to acclimate your students to Wikipedia? You can select none, one, or more. Whichever you select will be added to the assignment timeline.</p>'
'<ul>
<li><strong>Critique an article.</strong> Critically evaluate an existing Wikipedia article related to the class, and leave suggestions for improving it on the article’s talk page. </li>
<li><strong>Add to an article.</strong> Using course readings or other relevant secondary sources, add 1–2 sentences of new information to a Wikipedia article related to the class. Be sure to integrate it well into the existing article, and include a citation to the source. </li>
<li><strong>Copyedit an article.</strong> Browse Wikipedia until you find an article that you would like to improve, and make some edits to improve the language or formatting. </li>
<li><strong>Illustrate an article.</strong> Find an opportunity to improve an article by uploading and adding a photo you took.</li>
</ul>'
]
}
{
content: [
'<p>For most courses, the <em>Critique an article</em> and <em>Add to an article</em> exercises provide a nice foundation for the main writing project. These have been selected by default.</p>'
]
}
]
}
# Choosing articles (researchwrite)
{
id: 'choosing_articles'
title: 'Choosing articles'
showInOverview: true
formTitle: 'How will your class select articles?'
infoTitle: 'About choosing articles'
inputs: []
sections: [
{
title: ''
content: [
'<p>Choosing the right (or wrong) articles to work on can make (or break) a Wikipedia writing assignment.</p>'
'<p>Some articles may initially look easy to improve, but quality references to expand them may be difficult to find. Finding topics with the right balance between poor Wikipedia coverage and available literature from which to expand that coverage can be tricky. Here are some guidelines to keep in mind when selecting articles for improvement.</p>'
]
}
{
title: 'Good choice'
accordian: true
content: [
"<ul>
<li>Choose a well-established topic for which a lot of literature is available in its field, but which isn't covered extensively on Wikipedia.</li>
<li>Gravitate toward \"stub\" and \"start\" class articles. These articles often have only 1–2 paragraphs of information and are in need of expansion. Relevant WikiProject pages can provide a list of stubs that need improvement.</li>
<li>Before creating a new article, search related topics on Wikipedia to make sure your topic isn't already covered elsewhere. Often, an article may exist under another name, or the topic may be covered as a subsection of a broader article.</li>
</ul>"
]
}
{
title: 'Not such a good choice'
accordian: true
content: [
'<p>Articles that are "not such a good choice" for newcomers usually involve a lack of appropriate research material, highly controversial topics that may already be well developed, broad subjects, or topics for which it is difficult to demonstrate notability.</p>'
"<ul>
<li>You probably shouldn't try to completely overhaul articles on very broad topics (e.g., Law).</li>
<li>You should probably avoid trying to improve articles on topics that are highly controversial (for example, Global Warming, Abortion, or Scientology). You may be more successful starting a sub-article on the topic instead.</li>
<li>Don't work on an article that is already of high quality on Wikipedia, unless you discuss a specific plan for improving it with other editors beforehand.</li>
<li>Avoid working on something with scarce literature. Wikipedia articles cite secondary literature sources, so it's important to have enough sources for verification and to provide a neutral point of view.</li>
<li>Don't start articles with titles that imply an argument or essay-like approach (e.g., The Effects That The Recent Sub-Prime Mortgage Crisis has had on the US and Global Economics). These type of titles, and most likely the content too, may not be appropriate for an encyclopedia.</li>
</ul>"
]
}
{
title: ''
content: [
'<p>As the instructor, you should apply your own expertise to examining Wikipedia’s coverage of your field. You understand the broader intellectual context where individual topics fit in, you can recognize where Wikipedia falls short, you know—or know how to find—the relevant literature, and you know what topics your students should be able to handle. Your guidance on article choice and sourcing is critical for both your students’ success and the improvement of Wikipedia.</p>'
'<p>There are two recommended options for selecting articles:</p>'
]
}
{
title: 'Instructor prepares a list'
content: [
'<p>You (the instructor) prepare a list of appropriate \'non-existent\', \'stub\' or \'start\' articles ahead of time for the students to choose from. If possible, you may want to work with an experienced Wikipedian to create the list. Each student chooses an article from the list to work on. Although this requires more preparation, it may help students to start researching and writing their articles sooner.</p>'
]
}
{
title: 'Students find articles'
content: [
'<p>Each student explores Wikipedia and lists 3–5 topics on their Wikipedia user page that they are interested in for their main project. You (the instructor) should approve article choices before students proceed to writing. Having students find their own articles provides them with a sense of motivation and ownership over the assignment and exercises their critical thinking skills as they identify content gaps, but it may also lead to choices that are further afield from course material.</p>'
]
}
]
}
# Research and planning (researchwrite)
{
id: 'research_planning'
title: 'Research and planning'
showInOverview: true
formTitle: 'How should students plan their articles?'
infoTitle: 'About research and planning'
sections: [
{
title: ''
content: [
"<p>Students often wait until the last minute to do their research, or choose sources unsuitable for Wikipedia. That's why we recommend asking students to put together a bibliography of materials they want to use in editing the article, which can then be assessed by you and other Wikipedians.</p>"
"<p>Then, students should propose outlines for their articles. This can be a traditional outline, in which students identify which sections their articles will have and which aspects of the topic will be covered in each section. Alternatively, students can develop each outline in the form of a Wikipedia lead section — the untitled section at the beginning of an article that defines the topic and provide a concise summary of its content. Would you like your students to create traditional outlines, or compose outlines in the form of a Wikipedia-style lead section?</p>"
]
}
]
inputs: []
}
{
id: "tricky_topics"
title: 'Tricky topic areas'
showInOverview: true
formTitle: 'Will your students work in these areas?'
infoTitle: 'Medicine and other tricky topics'
instructions: 'Writing about some topics on Wikipedia can be especially tricky — in particular, topics related to medicine, human health, and psychology. Is there any chance some of your students will work in these topic areas?'
sections: [
{
title: ''
content: [
"<p>If you expect any of your students to work on medicine-related articles — including psychology — you\'ll need to familiarize yourself, and those students, with the special sourcing rules for these subject areas. These rules also apply if your students will be adding information on, say, the sociological implications of disease or other ways of looking at medical articles.Even if your course is not directly related to medicine, these rules may be important if your students are choosing their own topics.</p>"
]
}
{
title: 'Special considerations for medical and psychology topics'
accordian: true
content: [
"<p>Though it is not a medical resource, many people nonetheless turn to Wikipedia for medical information. Poor medical information on Wikipedia can have terrible consequences. For this reason, the standards for sourcing on medical topics differ from other topic areas. In particular, the use of primary sources is strongly discouraged.</p>"
"<p>By Wikipedia\'s conventions for medical content, inappropriate primary sources include original medical research such as clinical studies, case reports, or animal studies, even if published in respected journals. In general, medical and health-related content should be based on review articles from reputable journals and other professional medical literature. Popular press is not considered a reliable source for medical topics.</p>"
"<p>Topics that involve human psychology — in particular, clinical psychology or abnormal psychology — often overlap with medical topics on Wikipedia. In those cases, the same rules about acceptable sources apply.</p>"
]
}
]
inputs: []
}
# Drafts and mainspace (researchwrite)
{
id: "drafts_mainspace"
showInOverview: true
title: 'Drafts and mainspace'
formTitle: 'Choose one:'
infoTitle: 'About drafts and mainspace'
instructions: 'Once students have gotten a grip on their topics and the sources they will use to write about them, it’s time to start writing on Wikipedia. You can ask them to jump right in and edit live, or start them off in their own sandbox pages. There are pros and cons of each approach.'
sections: [
{
title: 'Pros and cons of sandboxes'
content: [
"<p>Sandboxes — pages associated with an individual editor that are not considered part of Wikipedia proper — make students feel safe. They can edit without the pressure of the whole world reading their drafts or other Wikipedians altering their writing. However, sandbox editing limits many of the unique aspects of Wikipedia as a teaching tool, such as collaborative writing and incremental drafting. Spending more than a week or two in sandboxes is strongly discouraged.</p>"
]
}
{
title: 'Pros and cons of editing live'
content: [
"<p>Editing live is exciting for the students because they can see their changes to the articles immediately and experience the collaborative editing process throughout the assignment. However, because new editors often unintentionally break Wikipedia rules, sometimes students’ additions are questioned or removed.</p>"
]
}
{
title: ''
content: '"<p>Will you have your students draft their early work in sandboxes, or work live from the beginning?</p>"'
}
]
inputs: []
}
# Peer feedback (researchwrite)
{
id: "peer_feedback"
title: 'Peer feedback'
showInOverview: true
infoTitle: 'About peer feedback'
formTitle: "How many peer reviews should each student conduct?"
instructions: "Collaboration is a critical element of contributing to Wikipedia."
sections: [
{
title: ''
content: [
"<p>For some students, this will happen spontaneously; their choice of topics will attract interested Wikipedians who will pitch in with ideas, copyedits, or even substantial contributions to the students’ articles. In many cases, however, there will be little spontaneous editing of students’ articles before the end of the term. Fortunately, you have a classroom full of peer reviewers. You can make the most of this by assigning students to review each others’ articles soon after full-length drafts are posted. This gives students plenty of time to act on the advice of their peers.</p>"
"<p>Peer reviews are another chance for students to practice critical thinking. Useful reviews focus on specific issues that can be improved. Since students are usually hesitant to criticize their classmates—and other Wikipedia editors may get annoyed with a stream of praise from students that glosses over an article's shortcomings—it's important to gives examples of the kinds of constructively critical feedback that are the most helpful.</p>"
"<p>How many peer reviews will you ask each student to contribute during the course?</p>"
]
}
]
inputs: []
}
# Supplementary assignments (researchwrite)
{
id: "supplementary_assignments"
title: 'Supplementary assignments'
showInOverview: true
formTitle: 'Choose supplementary assignments (optional):'
infoTitle: 'About supplementary assignments'
instructions: "By the time students have made improvements based on classmates' comments—and ideally suggestions from you as well—they should have produced nearly complete articles. Now is the chance to encourage them to wade a little deeper into Wikipedia and its norms and criteria to create great content."
sections: [
{
title: ''
content: [
"<p>You’ll probably have discussed many of the core principles of Wikipedia—and related issues you want to focus on—but now that they’ve experienced first-hand how Wikipedia works, this is a good time to return to topics like neutrality, media fluency, and the impacts and limits of Wikipedia. Consider bringing in a guest speaker, having a panel discussion, or simply having an open discussion in class about what the students have done so far and why (or whether) it matters.</p>"
"<p>In addition to the Wikipedia article writing itself, you may want to use a supplementary assignment. These assignments can reinforce and deepen your course's learning outcomes, and also help you to understand and evaluate the students' work and learning outcomes. On the left are some of the effective supplementary assignments that instructors often use. Scroll over each for more information, and select any that you wish to use for your course.</p>"
]
}
]
inputs: []
}
# DYK process (researchwrite)
{
id: "dyk"
title: 'DYK process'
showInOverview: false
infoTitle: 'About the <em>Did You Know</em> process'
formTitle: "Would you like to include DYK as an ungraded option?"
sections: [
{
title: ''
content: [
"<p>Did You Know (DYK) is a section on Wikipedia’s main page highlighting new content that has been added to Wikipedia in the last seven days. DYK can be a great opportunity to get students excited about their work. A typical DYK article will be viewed hundreds or thousands of times during its 6 hours in the spotlight.</p>"
"<p>The general criteria for DYK eligibility are that an article is larger than 1,500 characters of original, well-sourced content (about four paragraphs) and that it has been created or expanded (by a factor of 5x or more) within the last seven days. Students who meet this criteria may want to nominate their contributions for DYK.</p>"
"<p>The short window of eligibility, and the strict rules of the nomination process, can make it challenging to incorporate DYK into a classroom project. The DYK process should not be a required part of your assignment, as the DYK nomination process can be difficult for newcomers to navigate. However, it makes a great stretch goal when used selectively.</p>"
"<p>Would you like to include DYK as an ungraded option? If so, the Wiki Ed team can help you and your students during the term to identify work that may be a good candidate for DYK and answer questions you may have about the nomination process.</p>"
]
}
]
inputs: []
}
# Good article process (researchwrite)
{
id: "ga"
title: 'Good Article process'
showInOverview: false
infoTitle: 'About the <em>Good Article</em> process'
formTitle: "Would you like to include this as an ungraded option?"
sections: [
{
title: ''
content: [
"<p>Well-developed articles that have passed a Good Article (GA) review are a substantial achievement in their own right, and can also qualify for DYK. This peer review process involves checking a polished article against Wikipedia's GA criteria: articles must be well-written, verifiable and well-sourced with no original research, broad in coverage, neutral, stable, and appropriately illustrated (when possible). Practically speaking, a potential Good Article should look and sound like other well-developed Wikipedia articles, and it should provide a solid, well-balanced treatment of its subject.</p>"
"<p>The Good Article nominations process generally takes some time — between several days and several weeks, depending on the interest of reviewers and the size of the review backlog in the subject area — and should only be undertaken for articles that are already very well-developed. Typically, reviewers will identify further specific areas for improvement, and the article will be promoted to Good Article status if all the reviewers' concerns are addressed. Because of the uncertain timeline and the frequent need to make substantial changes to articles, Good Article nominations usually only make sense for articles that reach a mature state several weeks before the end of term, and those written by student editors who are already experienced, strong writers and who are willing to come back to address reviewer feedback (even after the term ends)</em>.</p>"
"<p>Would you like to include this as an ungraded option? If so, the Wiki Ed team can provide advice and support to high-achieving students who are interested in the Good Article process.</p>"
]
}
]
inputs: []
}
# {
# id: "overview"
# title: 'Assignment overview'
# showInOverview: false
# infoTitle: "About the course"
# formTitle: ""
# sections: [
# {
# content: [
# "<p>Now it's time to write a short description of your course and how this Wikipedia assignment fits into it. This will allow other Wikipedia editors to understand what students will be doing. Be sure to mention:"
# "<ul>
# <li>topics you're covering in the class</li>
# <li>what students will be asked to do on Wikipedia</li>
# <li>what types of articles your class will be working on</li>
# </ul>"
# ]
# }
# {
# content: [
# "<p class='description-container' style='margin-bottom:0;'></p>"
# "<div class='form-container'>
# <form id='courseLength' oninput='out.value = parseInt(courseLength.value); out2.value = parseInt(courseLength.value);' onsubmit='return false'>
# <div class='overview-input-container'>
# <label for='termStartDate'>Term begins</label>
# <input id='termStartDate' name='termStartDate' type='date'>
# </div>
# <div class='overview-input-container' style='display: none;'>
# <label for='termEndDate'>Term ends</label>
# <input id='termEndDate' name='termEndDate' type='date'>
# </div>
# <!-- %div.overview-input-container -->
# <!-- %label{:for => 'endDate'} End Week of -->
# <!-- %input{:type => 'date', :id => 'endDate', :name => 'endDate'} -->
# <div class='overview-input-container'>
# <label for='courseStartDate'>Course starts on</label>
# <input id='courseStartDate' name='courseStartDate' type='date'>
# </div>
# <div class='overview-input-container' style='display: none;'>
# <label for='startWeekOfDate'>Start week of</label>
# <input id='startWeekOfDate' name='startWeekOfDate' type='date'>
# </div>
# <div class='overview-input-container' style='display: none;'>
# <label for='endWeekOfDate'>End week of</label>
# <input id='endWeekOfDate' name='endWeekOfDate' type='date'>
# </div>
# <div class='overview-input-container'>
# <label for='courseLength'>Course Length</label>
# <input defaultValue='16' id='cLength' max='16' min='6' name='courseLength' step='1' type='range' value='16'>
# <output name='out2'>16</output>
# <span>weeks</span>
# </div>
# <div class='overview-select-container'>
# <div class='overview-select-input-container'>
# <input class='dowCheckbox' id='monday' name='Monday' type='checkbox' value='0'>
# <label for='monday'>Mondays</label>
# </div>
# <div class='overview-select-input-container'>
# <input class='dowCheckbox' id='tuesday' name='Tuesday' type='checkbox' value='1'>
# <label for='tuesday'>Tuesdays</label>
# </div>
# <div class='overview-select-input-container'>
# <input class='dowCheckbox' id='wednesday' name='Wednesday' type='checkbox' value='2'>
# <label for='wednesday'>Wednesdays</label>
# </div>
# <div class='overview-select-input-container'>
# <input class='dowCheckbox' id='thursday' name='Thursday' type='checkbox' value='3'>
# <label for='thursday'>Thursdays</label>
# </div>
# <div class='overview-select-input-container'>
# <input class='dowCheckbox' id='friday' name='Friday' type='checkbox' value='4'>
# <label for='friday'>Fridays</label>
# </div>
# <div class='overview-select-input-container'>
# <input class='dowCheckbox' id='saturday' name='Saturday' type='checkbox' value='5'>
# <label for='saturday'>Saturdays</label>
# </div>
# <div class='overview-select-input-container'>
# <input class='dowCheckbox' id='sunday' name='Sunday' type='checkbox' value='6'>
# <label for='sunday'>Sundays</label>
# </div>
# </div>
# <div class='overview-readout-header'>
# <div class='readout'>
# <output for='courseLength' id='courseLengthReadout' name='out'>16</output>
# <span>weeks</span>
# </div>
# </div>
# </form>
# </div>
# <div>
# <div class='preview-container'></div>
# </div>"
# ]
# }
# {
# content: [
# "<div class='step-form-dates'></div>"
# ]
# }
# {
# title: ''
# content: [
# "<p><a id='publish' href='#' class='button' style='display:inline-block;text-align:center;'>Publish</a></p>"
# ]
# }
# ]
# inputs: []
# }
]
###################################
multimedia: [
# There are no extra steps associated with the multimedia path, so it skips straight to the outro.
]
###################################
copyedit: [
# There are no extra steps associated with the copyedit path, so it skips straight to the outro.
]
###################################
translation: [
# Translation step 1: Training / Translation essentials
{
id: "translation_essentials"
title: "Translation essentials"
showInOverview: true
formTitle: "Choose one"
infoTitle: "Preparing for a translation assignment"
sections: [
{
content: [
"<p>To get started, you'll want to introduce your students to the basic rules of writing Wikipedia articles and working with the Wikipedia community.</p>"
"<p>The online Student Training for Translation Assignments is a very brief introduction to the basics of editing Wikipedia. Should completion of the training be part of your students’ grade? (Make your choice at the top left).</p>"
]
}
]
}
# Translation step 2: Choosing articles
{
id: "translation_choosing_articles"
title: "Choosing articles"
showInOverview: true
formTitle: "How will your class select articles?"
infoTitle: "About choosing articles "
inputs: []
sections: [
{
title: ''
content: [
"<p>Students should find articles that have gone through Wikipedia's peer review process and have been deemed Good or Featured Articles in the language they are studying. For this assignment, students will copy these articles from the target language into a sandbox, and begin translating it into their L1.</p>"
"<p>Generally, articles that are ripe for translation are those that:</p>"
"<ul>
<li>are relevant to the culture, history, or other aspect of their target language (L2).</li>
<li>are quality articles in their target language’s Wikipedia, but not on their L1 Wikipedia.</li>
</ul>"
"<p>There are two recommended options for selecting articles:</p>"
]
}
{
title: 'Instructor prepares a list'
content: [
'<p>You (the instructor) prepare a list of appropriate articles from the target-language Wikipedia ahead of time for the students to choose from. If possible, you may want to work with an experienced Wikipedian to create the list. Each student chooses an article from your list to work on. Although this requires more preparation, it may help students to start translating their articles sooner, which is useful for shorter Wikipedia assignments.</p>'
]
}
{
title: 'Students find articles'
content: [
'<p>Each student explores the target-language Wikipedia and lists two topics on their Wikipedia user page that they are interested in for their main project. You (the instructor) should approve article choices before students proceed to translating. Having students find their own articles provides them with a sense of motivation and ownership over the assignment and exercises their critical thinking skills as they identify content gaps, but it may also lead to choices that are further afield from course material. This options is useful for classes that spend more time on the Wikipedia assignment.</p>'
]
}
]
}
# Translation step 3: Media literacy
{
id: "translation_media_literacy"
title: "Media literacy"
showInOverview: true
formTitle: "Add a media literacy component?"
infoTitle: "Optional: add a more challenging media literacy component"
instructions: "For advanced students, you can add a challenging media literacy component to your course. This requires students to fact-check information on the target-language Wikipedia using the target language. They also find sources in their base language to support those claims on their base-language Wikipedia."
}
]
}
# Outro steps common to all branches
outro_steps: [
{
id: "grading"
title: 'Grading'
showInOverview: false
formTitle: "How will students' grades for assignments be determined?"
infoTitle: "About grading"
instructions: 'Grading Wikipedia assignments can be a challenge. Here are some tips for grading your Wikipedia assignments:'
sections: [
{
title: 'Know all of your students\' Wikipedia usernames.'
accordian: true
content: [
"<p>Without knowing the students' usernames, you won't be able to grade them.</p>"
"<p>Make sure all students enroll on the course page. Once all students have signed the list, you can click on \"user contributions\" (in the menu bar on the left hand side of your browser screen) to review that student's activities on Wikipedia. If you have made student training compulsory, you can check the <a href='https://en.wikipedia.org/wiki/Wikipedia:Training/For_students/Training_feedback' target='_blank'>feedback page</a> to see which students have completed it.</p>"
]
}
{
title: 'Be specific about your expectations.'
accordian: true
content: [
"<p>Being specific about what you expect your students to do is crucial for grading. For example, students could be asked to add a minimum of three sections to an existing article, or a minimum of eight references to an existing article that lacks the appropriate sourcing.</p>"
]
}
{
title: 'Grade based on what students contribute to Wikipedia, not what remains on Wikipedia at the course\'s end.'
accordian: true
content: [
"<p>You can see a student's contributions in the article history, even if some writing was removed by the community (or the student). A student’s content could be edited for many reasons, and can even be evidence of a student reflecting critically on their own contributions. Furthermore, if students feel they must defend control of an article for the sake of their grade, this can lead to conflict with other editors.</p>"
"<p>Wikipedia is a collaborative writing environment driven by verifiability, noteworthiness and neutral point of view – all of which have created challenges for students familiar with a persuasive writing format in classrooms. Encourage students to reflect on edits to improve their understanding of the process and the community.</p>"
]
}
]
inputs: []
}
{
id: "overview"
title: 'Assignment overview'
showInOverview: false
infoTitle: "About the course"
formTitle: ""
sections: [
{
content: [
"<p>Now it's time to write a short description of your course and how this Wikipedia assignment fits into it. This will allow other Wikipedia editors to understand what students will be doing. Be sure to mention:"
"<ul>
<li>topics you're covering in the class</li>
<li>what students will be asked to do on Wikipedia</li>
<li>what types of articles your class will be working on</li>
</ul>"
]
}
{
content: [
"<p class='description-container' style='margin-bottom:0;'></p>"
"<div>
<div class='preview-container'></div>
</div>"
]
}
# {
# content: [
# "<div class='step-form-dates'></div>"
# ]
# }
{
title: ''
content: [
"<p><a id='publish' href='#' class='button' style='display:inline-block;text-align:center;'>Publish</a></p>"
]
}
]
inputs: []
}
]
}
module.exports = WizardConfig
|
[
{
"context": " .matchHeader(\"authorization\", \"Bearer auth-token-123\")\n .get(\"/projects/id-123/builds\")\n .re",
"end": 3821,
"score": 0.5706019997596741,
"start": 3818,
"tag": "PASSWORD",
"value": "123"
},
{
"context": " .matchHeader(\"authorization\", \"Bearer auth-t... | packages/server/test/unit/api_spec.coffee | smartmanru/cypress | 0 | require("../spec_helper")
_ = require("lodash")
rp = require("request-promise")
os = require("os")
nmi = require("node-machine-id")
pkg = require("@packages/root")
api = require("#{root}lib/api")
Promise = require("bluebird")
describe "lib/api", ->
beforeEach ->
@sandbox.stub(os, "platform").returns("linux")
context ".getOrgs", ->
it "GET /orgs + returns orgs", ->
orgs = []
nock("http://localhost:1234")
.matchHeader("authorization", "Bearer auth-token-123")
.get("/organizations")
.reply(200, orgs)
api.getOrgs("auth-token-123")
.then (ret) ->
expect(ret).to.eql(orgs)
it "tags errors", ->
nock("http://localhost:1234")
.matchHeader("authorization", "Bearer auth-token-123")
.get("/organizations")
.reply(500, {})
api.getOrgs("auth-token-123")
.then ->
throw new Error("should have thrown here")
.catch (err) ->
expect(err.isApiError).to.be.true
context ".getProjects", ->
it "GET /projects + returns projects", ->
projects = []
nock("http://localhost:1234")
.matchHeader("authorization", "Bearer auth-token-123")
.get("/projects")
.reply(200, projects)
api.getProjects("auth-token-123")
.then (ret) ->
expect(ret).to.eql(projects)
it "tags errors", ->
nock("http://localhost:1234")
.matchHeader("authorization", "Bearer auth-token-123")
.get("/projects")
.reply(500, {})
api.getProjects("auth-token-123")
.then ->
throw new Error("should have thrown here")
.catch (err) ->
expect(err.isApiError).to.be.true
context ".getProject", ->
it "GET /projects/:id + returns project", ->
project = { id: "id-123" }
nock("http://localhost:1234")
.matchHeader("authorization", "Bearer auth-token-123")
.matchHeader("x-route-version", "2")
.get("/projects/id-123")
.reply(200, project)
api.getProject("id-123", "auth-token-123")
.then (ret) ->
expect(ret).to.eql(project)
it "tags errors", ->
nock("http://localhost:1234")
.matchHeader("authorization", "Bearer auth-token-123")
.get("/projects/id-123")
.reply(500, {})
api.getProject("id-123", "auth-token-123")
.then ->
throw new Error("should have thrown here")
.catch (err) ->
expect(err.isApiError).to.be.true
context ".getProjectRuns", ->
it "GET /projects/:id/builds + returns builds", ->
builds = []
nock("http://localhost:1234")
.matchHeader("authorization", "Bearer auth-token-123")
.get("/projects/id-123/builds")
.reply(200, builds)
api.getProjectRuns("id-123", "auth-token-123")
.then (ret) ->
expect(ret).to.eql(builds)
it "handles timeouts", ->
nock("http://localhost:1234")
.matchHeader("authorization", "Bearer auth-token-123")
.get("/projects/id-123/builds")
.socketDelay(5000)
.reply(200, [])
api.getProjectRuns("id-123", "auth-token-123", {timeout: 100})
.then (ret) ->
throw new Error("should have thrown here")
.catch (err) ->
expect(err.message).to.eq("Error: ESOCKETTIMEDOUT")
it "sets timeout to 10 seconds", ->
@sandbox.stub(rp, "get").returns({
catch: -> {
catch: -> {
then: (fn) -> fn()
}
then: (fn) -> fn()
}
then: (fn) -> fn()
})
api.getProjectRuns("id-123", "auth-token-123")
.then (ret) ->
expect(rp.get).to.be.calledWithMatch({timeout: 10000})
it "GET /projects/:id/builds failure formatting", ->
nock("http://localhost:1234")
.matchHeader("authorization", "Bearer auth-token-123")
.get("/projects/id-123/builds")
.reply(401, {
errors: {
permission: ["denied"]
}
})
api.getProjectRuns("id-123", "auth-token-123")
.then ->
throw new Error("should have thrown here")
.catch (err) ->
expect(err.message).to.eq("""
401
{
"errors": {
"permission": [
"denied"
]
}
}
""")
it "tags errors", ->
nock("http://localhost:1234")
.matchHeader("authorization", "Bearer auth-token-123")
.get("/projects/id-123/builds")
.reply(500, {})
api.getProjectRuns("id-123", "auth-token-123")
.then ->
throw new Error("should have thrown here")
.catch (err) ->
expect(err.isApiError).to.be.true
context ".ping", ->
it "GET /ping", ->
nock("http://localhost:1234")
.matchHeader("x-platform", "linux")
.matchHeader("x-cypress-version", pkg.version)
.get("/ping")
.reply(200, "OK")
api.ping()
.then (resp) ->
expect(resp).to.eq("OK")
it "tags errors", ->
nock("http://localhost:1234")
.matchHeader("authorization", "Bearer auth-token-123")
.get("/ping")
.reply(500, {})
api.ping()
.then ->
throw new Error("should have thrown here")
.catch (err) ->
expect(err.isApiError).to.be.true
context ".createRun", ->
beforeEach ->
@buildProps = {
projectId: "id-123"
recordKey: "token-123"
commitSha: "sha"
commitBranch: "master"
commitAuthorName: "brian"
commitAuthorEmail: "brian@cypress.io"
commitMessage: "such hax"
remoteOrigin: "https://github.com/foo/bar.git"
ciProvider: "circle"
ciBuildNumber: "987"
ciParams: { foo: "bar" }
}
it "POST /builds + returns buildId", ->
nock("http://localhost:1234")
.matchHeader("x-route-version", "2")
.matchHeader("x-platform", "linux")
.matchHeader("x-cypress-version", pkg.version)
.post("/builds", @buildProps)
.reply(200, {
buildId: "new-build-id-123"
})
api.createRun(@buildProps)
.then (ret) ->
expect(ret).to.eq("new-build-id-123")
it "POST /builds failure formatting", ->
nock("http://localhost:1234")
.matchHeader("x-route-version", "2")
.matchHeader("x-platform", "linux")
.matchHeader("x-cypress-version", pkg.version)
.post("/builds", {
projectId: null
recordKey: "token-123"
commitSha: "sha"
commitBranch: "master"
commitAuthorName: "brian"
commitAuthorEmail: "brian@cypress.io"
commitMessage: "such hax"
remoteOrigin: "https://github.com/foo/bar.git"
ciProvider: "circle"
ciBuildNumber: "987"
ciParams: { foo: "bar" }
})
.reply(422, {
errors: {
buildId: ["is required"]
}
})
api.createRun({
projectId: null
recordKey: "token-123"
commitSha: "sha"
commitBranch: "master"
commitAuthorName: "brian"
commitAuthorEmail: "brian@cypress.io"
commitMessage: "such hax"
remoteOrigin: "https://github.com/foo/bar.git"
ciProvider: "circle"
ciBuildNumber: "987"
ciParams: { foo: "bar" }
})
.then ->
throw new Error("should have thrown here")
.catch (err) ->
expect(err.message).to.eq("""
422
{
"errors": {
"buildId": [
"is required"
]
}
}
""")
it "handles timeouts", ->
nock("http://localhost:1234")
.matchHeader("x-route-version", "2")
.matchHeader("x-platform", "linux")
.matchHeader("x-cypress-version", pkg.version)
.post("/builds")
.socketDelay(5000)
.reply(200, {})
api.createRun({
timeout: 100
})
.then ->
throw new Error("should have thrown here")
.catch (err) ->
expect(err.message).to.eq("Error: ESOCKETTIMEDOUT")
it "sets timeout to 10 seconds", ->
@sandbox.stub(rp, "post").returns({
promise: -> {
get: -> {
catch: -> {
catch: -> {
then: (fn) -> fn()
}
then: (fn) -> fn()
}
}
}
})
api.createRun({})
.then ->
expect(rp.post).to.be.calledWithMatch({timeout: 10000})
it "tags errors", ->
nock("http://localhost:1234")
.matchHeader("authorization", "Bearer auth-token-123")
.post("/builds", @buildProps)
.reply(500, {})
api.createRun(@buildProps)
.then ->
throw new Error("should have thrown here")
.catch (err) ->
expect(err.isApiError).to.be.true
context ".createInstance", ->
beforeEach ->
Object.defineProperty(process.versions, "chrome", {
value: "53"
})
@postProps = {
spec: "cypress/integration/app_spec.js"
browserName: "Electron"
browserVersion: "53"
osName: "darwin"
osVersion: "10.10.10"
osCpus: [{model: "foo"}]
osMemory: {
free: 1000
total: 2000
}
}
@createProps = {
buildId: "build-id-123"
spec: "cypress/integration/app_spec.js"
}
it "POSTs /builds/:id/instances", ->
@sandbox.stub(os, "release").returns("10.10.10")
@sandbox.stub(os, "cpus").returns([{model: "foo"}])
@sandbox.stub(os, "freemem").returns(1000)
@sandbox.stub(os, "totalmem").returns(2000)
os.platform.returns("darwin")
nock("http://localhost:1234")
.matchHeader("x-route-version", "3")
.matchHeader("x-platform", "darwin")
.matchHeader("x-cypress-version", pkg.version)
.post("/builds/build-id-123/instances", @postProps)
.reply(200, {
instanceId: "instance-id-123"
})
api.createInstance(@createProps)
.then (instanceId) ->
expect(instanceId).to.eq("instance-id-123")
it "POST /builds/:id/instances failure formatting", ->
nock("http://localhost:1234")
.matchHeader("x-route-version", "3")
.matchHeader("x-platform", "linux")
.matchHeader("x-cypress-version", pkg.version)
.post("/builds/build-id-123/instances")
.reply(422, {
errors: {
tests: ["is required"]
}
})
api.createInstance({buildId: "build-id-123"})
.then ->
throw new Error("should have thrown here")
.catch (err) ->
expect(err.message).to.eq("""
422
{
"errors": {
"tests": [
"is required"
]
}
}
""")
it "handles timeouts", ->
nock("http://localhost:1234")
.matchHeader("x-route-version", "3")
.matchHeader("x-platform", "linux")
.matchHeader("x-cypress-version", pkg.version)
.post("/builds/build-id-123/instances")
.socketDelay(5000)
.reply(200, {})
api.createInstance({
buildId: "build-id-123"
timeout: 100
})
.then ->
throw new Error("should have thrown here")
.catch (err) ->
expect(err.message).to.eq("Error: ESOCKETTIMEDOUT")
it "sets timeout to 10 seconds", ->
@sandbox.stub(rp, "post").returns({
promise: -> {
get: -> {
catch: -> {
catch: -> {
then: (fn) -> fn()
}
then: (fn) -> fn()
}
}
}
})
api.createInstance({})
.then ->
expect(rp.post).to.be.calledWithMatch({timeout: 10000})
it "tags errors", ->
nock("http://localhost:1234")
.matchHeader("authorization", "Bearer auth-token-123")
.post("/builds/build-id-123/instances", @postProps)
.reply(500, {})
api.createInstance(@createProps)
.then ->
throw new Error("should have thrown here")
.catch (err) ->
expect(err.isApiError).to.be.true
context ".updateInstance", ->
beforeEach ->
Object.defineProperty(process.versions, "chrome", {
value: "53"
})
@putProps = {
tests: 1
passes: 2
failures: 3
pending: 4
duration: 5
error: "err msg"
video: true
screenshots: []
failingTests: []
cypressConfig: {}
ciProvider: "circle"
stdout: "foo\nbar\nbaz"
}
@updateProps = {
instanceId: "instance-id-123"
tests: 1
passes: 2
failures: 3
pending: 4
duration: 5
error: "err msg"
video: true
screenshots: []
failingTests: []
cypressConfig: {}
ciProvider: "circle"
stdout: "foo\nbar\nbaz"
}
it "PUTs /instances/:id", ->
nock("http://localhost:1234")
.matchHeader("x-platform", "linux")
.matchHeader("x-cypress-version", pkg.version)
.put("/instances/instance-id-123", @putProps)
.reply(200)
api.updateInstance(@updateProps)
it "PUT /instances/:id failure formatting", ->
nock("http://localhost:1234")
.matchHeader("x-platform", "linux")
.matchHeader("x-cypress-version", pkg.version)
.put("/instances/instance-id-123")
.reply(422, {
errors: {
tests: ["is required"]
}
})
api.updateInstance({instanceId: "instance-id-123"})
.then ->
throw new Error("should have thrown here")
.catch (err) ->
expect(err.message).to.eq("""
422
{
"errors": {
"tests": [
"is required"
]
}
}
""")
it "handles timeouts", ->
nock("http://localhost:1234")
.matchHeader("x-platform", "linux")
.matchHeader("x-cypress-version", pkg.version)
.put("/instances/instance-id-123")
.socketDelay(5000)
.reply(200, {})
api.updateInstance({
instanceId: "instance-id-123"
timeout: 100
})
.then ->
throw new Error("should have thrown here")
.catch (err) ->
expect(err.message).to.eq("Error: ESOCKETTIMEDOUT")
it "sets timeout to 10 seconds", ->
@sandbox.stub(rp, "put").resolves()
api.updateInstance({})
.then ->
expect(rp.put).to.be.calledWithMatch({timeout: 10000})
it "tags errors", ->
nock("http://localhost:1234")
.matchHeader("authorization", "Bearer auth-token-123")
.put("/instances/instance-id-123", @putProps)
.reply(500, {})
api.updateInstance(@updateProps)
.then ->
throw new Error("should have thrown here")
.catch (err) ->
expect(err.isApiError).to.be.true
context ".updateInstanceStdout", ->
it "PUTs /instances/:id/stdout", ->
nock("http://localhost:1234")
.matchHeader("x-platform", "linux")
.matchHeader("x-cypress-version", pkg.version)
.put("/instances/instance-id-123/stdout", {
stdout: "foobarbaz\n"
})
.reply(200)
api.updateInstanceStdout({
instanceId: "instance-id-123"
stdout: "foobarbaz\n"
})
it "PUT /instances/:id/stdout failure formatting", ->
nock("http://localhost:1234")
.matchHeader("x-platform", "linux")
.matchHeader("x-cypress-version", pkg.version)
.put("/instances/instance-id-123/stdout")
.reply(422, {
errors: {
tests: ["is required"]
}
})
api.updateInstanceStdout({instanceId: "instance-id-123"})
.then ->
throw new Error("should have thrown here")
.catch (err) ->
expect(err.message).to.eq("""
422
{
"errors": {
"tests": [
"is required"
]
}
}
""")
it "handles timeouts", ->
nock("http://localhost:1234")
.matchHeader("x-platform", "linux")
.matchHeader("x-cypress-version", pkg.version)
.put("/instances/instance-id-123/stdout")
.socketDelay(5000)
.reply(200, {})
api.updateInstanceStdout({
instanceId: "instance-id-123"
timeout: 100
})
.then ->
throw new Error("should have thrown here")
.catch (err) ->
expect(err.message).to.eq("Error: ESOCKETTIMEDOUT")
it "sets timeout to 10 seconds", ->
@sandbox.stub(rp, "put").resolves()
api.updateInstanceStdout({})
.then ->
expect(rp.put).to.be.calledWithMatch({timeout: 10000})
it "tags errors", ->
nock("http://localhost:1234")
.matchHeader("authorization", "Bearer auth-token-123")
.put("/instances/instance-id-123/stdout", {
stdout: "foobarbaz\n"
})
.reply(500, {})
api.updateInstanceStdout({
instanceId: "instance-id-123"
stdout: "foobarbaz\n"
})
.then ->
throw new Error("should have thrown here")
.catch (err) ->
expect(err.isApiError).to.be.true
context ".getLoginUrl", ->
it "GET /auth + returns the url", ->
nock("http://localhost:1234")
.matchHeader("x-platform", "linux")
.matchHeader("x-cypress-version", pkg.version)
.get("/auth")
.reply(200, {
url: "https://github.com/authorize"
})
api.getLoginUrl().then (url) ->
expect(url).to.eq("https://github.com/authorize")
it "tags errors", ->
nock("http://localhost:1234")
.matchHeader("authorization", "Bearer auth-token-123")
.get("/auth")
.reply(500, {})
api.getLoginUrl()
.then ->
throw new Error("should have thrown here")
.catch (err) ->
expect(err.isApiError).to.be.true
context ".createSignin", ->
it "POSTs /signin + returns user object", ->
@sandbox.stub(nmi, "machineId").resolves("12345")
nock("http://localhost:1234")
.matchHeader("x-platform", "linux")
.matchHeader("x-cypress-version", pkg.version)
.matchHeader("x-route-version", "3")
.matchHeader("x-machine-id", "12345")
.post("/signin")
.query({code: "abc-123"})
.reply(200, {
name: "brian"
})
api.createSignin("abc-123").then (user) ->
expect(user).to.deep.eq({
name: "brian"
})
it "handles nmi errors", ->
@sandbox.stub(nmi, "machineId").rejects(new Error("foo"))
nock("http://localhost:1234", {
"badheaders": ["x-machine-id"]
})
.matchHeader("x-platform", "linux")
.matchHeader("x-cypress-version", pkg.version)
.matchHeader("x-route-version", "3")
.matchHeader("x-accept-terms", "true")
.post("/signin")
.query({code: "abc-123"})
.reply(200, {
name: "brian"
})
api.createSignin("abc-123").then (user) ->
expect(user).to.deep.eq({
name: "brian"
})
it "handles 401 exceptions", ->
nock("http://localhost:1234")
.matchHeader("x-platform", "linux")
.matchHeader("x-cypress-version", pkg.version)
.matchHeader("x-route-version", "3")
.post("/signin")
.query({code: "abc-123"})
.reply(401, "Your email: 'brian@gmail.com' has not been authorized.")
api.createSignin("abc-123")
.then ->
throw new Error("should have thrown error")
.catch (err) ->
expect(err.message).to.eq("Your email: 'brian@gmail.com' has not been authorized.")
it "tags errors", ->
nock("http://localhost:1234")
.matchHeader("authorization", "Bearer auth-token-123")
.post("/signin")
.reply(500, {})
api.createSignin("abc-123")
.then ->
throw new Error("should have thrown here")
.catch (err) ->
expect(err.isApiError).to.be.true
context ".createSignout", ->
it "POSTs /signout", ->
nock("http://localhost:1234")
.matchHeader("x-platform", "linux")
.matchHeader("x-cypress-version", pkg.version)
.matchHeader("authorization", "Bearer auth-token-123")
.post("/signout")
.reply(200)
api.createSignout("auth-token-123")
it "tags errors", ->
nock("http://localhost:1234")
.matchHeader("authorization", "Bearer auth-token-123")
.post("/signout")
.reply(500, {})
api.createSignout("auth-token-123")
.then ->
throw new Error("should have thrown here")
.catch (err) ->
expect(err.isApiError).to.be.true
context ".createProject", ->
beforeEach ->
@postProps = {
name: "foobar"
orgId: "org-id-123"
public: true
remoteOrigin: "remoteOrigin"
}
@createProps = {
projectName: "foobar"
orgId: "org-id-123"
public: true
}
it "POST /projects", ->
nock("http://localhost:1234")
.matchHeader("x-platform", "linux")
.matchHeader("x-cypress-version", pkg.version)
.matchHeader("x-route-version", "2")
.matchHeader("authorization", "Bearer auth-token-123")
.post("/projects", @postProps)
.reply(200, {
id: "id-123"
name: "foobar"
orgId: "org-id-123"
public: true
})
api.createProject(@createProps, "remoteOrigin", "auth-token-123")
.then (projectDetails) ->
expect(projectDetails).to.eql({
id: "id-123"
name: "foobar"
orgId: "org-id-123"
public: true
})
it "POST /projects failure formatting", ->
nock("http://localhost:1234")
.matchHeader("x-platform", "linux")
.matchHeader("x-cypress-version", pkg.version)
.matchHeader("x-route-version", "2")
.matchHeader("authorization", "Bearer auth-token-123")
.post("/projects", {
name: "foobar"
orgId: "org-id-123"
public: true
remoteOrigin: "remoteOrigin"
})
.reply(422, {
errors: {
orgId: ["is required"]
}
})
projectDetails = {
projectName: "foobar"
orgId: "org-id-123"
public: true
}
api.createProject(projectDetails, "remoteOrigin", "auth-token-123")
.then ->
throw new Error("should have thrown here")
.catch (err) ->
expect(err.message).to.eq("""
422
{
"errors": {
"orgId": [
"is required"
]
}
}
""")
it "tags errors", ->
nock("http://localhost:1234")
.matchHeader("authorization", "Bearer auth-token-123")
.post("/projects", @postProps)
.reply(500, {})
api.createProject(@createProps, "remoteOrigin", "auth-token-123")
.then ->
throw new Error("should have thrown here")
.catch (err) ->
expect(err.isApiError).to.be.true
context ".getProjectRecordKeys", ->
it "GET /projects/:id/keys + returns keys", ->
recordKeys = []
nock("http://localhost:1234")
.matchHeader("authorization", "Bearer auth-token-123")
.get("/projects/id-123/keys")
.reply(200, recordKeys)
api.getProjectRecordKeys("id-123", "auth-token-123")
.then (ret) ->
expect(ret).to.eql(recordKeys)
it "tags errors", ->
nock("http://localhost:1234")
.matchHeader("authorization", "Bearer auth-token-123")
.get("/projects/id-123/keys")
.reply(500, {})
api.getProjectRecordKeys("id-123", "auth-token-123")
.then ->
throw new Error("should have thrown here")
.catch (err) ->
expect(err.isApiError).to.be.true
context ".requestAccess", ->
it "POST /projects/:id/membership_requests + returns response", ->
nock("http://localhost:1234")
.matchHeader("authorization", "Bearer auth-token-123")
.post("/projects/project-id-123/membership_requests")
.reply(200)
api.requestAccess("project-id-123", "auth-token-123")
.then (ret) ->
expect(ret).to.be.undefined
it "POST /projects/:id/membership_requests failure formatting", ->
nock("http://localhost:1234")
.matchHeader("authorization", "Bearer auth-token-123")
.post("/projects/project-id-123/membership_requests")
.reply(422, {
errors: {
access: ["already requested"]
}
})
api.requestAccess("project-id-123", "auth-token-123")
.then ->
throw new Error("should have thrown here")
.catch (err) ->
expect(err.message).to.eq("""
422
{
"errors": {
"access": [
"already requested"
]
}
}
""")
it "tags errors", ->
nock("http://localhost:1234")
.matchHeader("authorization", "Bearer auth-token-123")
.post("/projects/project-id-123/membership_requests")
.reply(500, {})
api.requestAccess("project-id-123", "auth-token-123")
.then ->
throw new Error("should have thrown here")
.catch (err) ->
expect(err.isApiError).to.be.true
context ".getProjectToken", ->
it "GETs /projects/:id/token", ->
nock("http://localhost:1234")
.matchHeader("x-platform", "linux")
.matchHeader("x-cypress-version", pkg.version)
.matchHeader("authorization", "Bearer auth-token-123")
.get("/projects/project-123/token")
.reply(200, {
apiToken: "token-123"
})
api.getProjectToken("project-123", "auth-token-123")
.then (resp) ->
expect(resp).to.eq("token-123")
it "tags errors", ->
nock("http://localhost:1234")
.matchHeader("authorization", "Bearer auth-token-123")
.get("/projects/project-123/token")
.reply(500, {})
api.getProjectToken("project-123", "auth-token-123")
.then ->
throw new Error("should have thrown here")
.catch (err) ->
expect(err.isApiError).to.be.true
context ".updateProjectToken", ->
it "PUTs /projects/:id/token", ->
nock("http://localhost:1234")
.matchHeader("x-platform", "linux")
.matchHeader("x-cypress-version", pkg.version)
.matchHeader("authorization", "Bearer auth-token-123")
.put("/projects/project-123/token")
.reply(200, {
apiToken: "token-123"
})
api.updateProjectToken("project-123", "auth-token-123")
.then (resp) ->
expect(resp).to.eq("token-123")
it "tags errors", ->
nock("http://localhost:1234")
.matchHeader("authorization", "Bearer auth-token-123")
.put("/projects/project-id-123/token")
.reply(500, {})
api.updateProjectToken("project-123", "auth-token-123")
.then ->
throw new Error("should have thrown here")
.catch (err) ->
expect(err.isApiError).to.be.true
context ".createRaygunException", ->
beforeEach ->
@setup = (body, authToken, delay = 0) ->
nock("http://localhost:1234")
.matchHeader("x-platform", "linux")
.matchHeader("x-cypress-version", pkg.version)
.matchHeader("authorization", "Bearer #{authToken}")
.post("/exceptions", body)
.delayConnection(delay)
.reply(200)
it "POSTs /exceptions", ->
@setup({foo: "bar"}, "auth-token-123")
api.createRaygunException({foo: "bar"}, "auth-token-123")
it "by default times outs after 3 seconds", ->
## return our own specific promise
## so we can spy on the timeout function
p = Promise.resolve()
@sandbox.spy(p, "timeout")
@sandbox.stub(rp.Request.prototype, "promise").returns(p)
@setup({foo: "bar"}, "auth-token-123")
api.createRaygunException({foo: "bar"}, "auth-token-123").then ->
expect(p.timeout).to.be.calledWith(3000)
it "times out after exceeding timeout", ->
## force our connection to be delayed 5 seconds
@setup({foo: "bar"}, "auth-token-123", 5000)
## and set the timeout to only be 50ms
api.createRaygunException({foo: "bar"}, "auth-token-123", 50)
.then ->
throw new Error("errored: it did not catch the timeout error!")
.catch Promise.TimeoutError, ->
it "tags errors", ->
nock("http://localhost:1234")
.matchHeader("x-platform", "linux")
.matchHeader("x-cypress-version", pkg.version)
.matchHeader("authorization", "Bearer auth-token-123")
.post("/exceptions", {foo: "bar"})
.reply(500, {})
api.createRaygunException({foo: "bar"}, "auth-token-123")
.then ->
throw new Error("should have thrown here")
.catch (err) ->
expect(err.isApiError).to.be.true
| 99166 | require("../spec_helper")
_ = require("lodash")
rp = require("request-promise")
os = require("os")
nmi = require("node-machine-id")
pkg = require("@packages/root")
api = require("#{root}lib/api")
Promise = require("bluebird")
describe "lib/api", ->
beforeEach ->
@sandbox.stub(os, "platform").returns("linux")
context ".getOrgs", ->
it "GET /orgs + returns orgs", ->
orgs = []
nock("http://localhost:1234")
.matchHeader("authorization", "Bearer auth-token-123")
.get("/organizations")
.reply(200, orgs)
api.getOrgs("auth-token-123")
.then (ret) ->
expect(ret).to.eql(orgs)
it "tags errors", ->
nock("http://localhost:1234")
.matchHeader("authorization", "Bearer auth-token-123")
.get("/organizations")
.reply(500, {})
api.getOrgs("auth-token-123")
.then ->
throw new Error("should have thrown here")
.catch (err) ->
expect(err.isApiError).to.be.true
context ".getProjects", ->
it "GET /projects + returns projects", ->
projects = []
nock("http://localhost:1234")
.matchHeader("authorization", "Bearer auth-token-123")
.get("/projects")
.reply(200, projects)
api.getProjects("auth-token-123")
.then (ret) ->
expect(ret).to.eql(projects)
it "tags errors", ->
nock("http://localhost:1234")
.matchHeader("authorization", "Bearer auth-token-123")
.get("/projects")
.reply(500, {})
api.getProjects("auth-token-123")
.then ->
throw new Error("should have thrown here")
.catch (err) ->
expect(err.isApiError).to.be.true
context ".getProject", ->
it "GET /projects/:id + returns project", ->
project = { id: "id-123" }
nock("http://localhost:1234")
.matchHeader("authorization", "Bearer auth-token-123")
.matchHeader("x-route-version", "2")
.get("/projects/id-123")
.reply(200, project)
api.getProject("id-123", "auth-token-123")
.then (ret) ->
expect(ret).to.eql(project)
it "tags errors", ->
nock("http://localhost:1234")
.matchHeader("authorization", "Bearer auth-token-123")
.get("/projects/id-123")
.reply(500, {})
api.getProject("id-123", "auth-token-123")
.then ->
throw new Error("should have thrown here")
.catch (err) ->
expect(err.isApiError).to.be.true
context ".getProjectRuns", ->
it "GET /projects/:id/builds + returns builds", ->
builds = []
nock("http://localhost:1234")
.matchHeader("authorization", "Bearer auth-token-123")
.get("/projects/id-123/builds")
.reply(200, builds)
api.getProjectRuns("id-123", "auth-token-123")
.then (ret) ->
expect(ret).to.eql(builds)
it "handles timeouts", ->
nock("http://localhost:1234")
.matchHeader("authorization", "Bearer auth-token-123")
.get("/projects/id-123/builds")
.socketDelay(5000)
.reply(200, [])
api.getProjectRuns("id-123", "auth-token-123", {timeout: 100})
.then (ret) ->
throw new Error("should have thrown here")
.catch (err) ->
expect(err.message).to.eq("Error: ESOCKETTIMEDOUT")
it "sets timeout to 10 seconds", ->
@sandbox.stub(rp, "get").returns({
catch: -> {
catch: -> {
then: (fn) -> fn()
}
then: (fn) -> fn()
}
then: (fn) -> fn()
})
api.getProjectRuns("id-123", "auth-token-123")
.then (ret) ->
expect(rp.get).to.be.calledWithMatch({timeout: 10000})
it "GET /projects/:id/builds failure formatting", ->
nock("http://localhost:1234")
.matchHeader("authorization", "Bearer auth-token-<PASSWORD>")
.get("/projects/id-123/builds")
.reply(401, {
errors: {
permission: ["denied"]
}
})
api.getProjectRuns("id-123", "auth-token-123")
.then ->
throw new Error("should have thrown here")
.catch (err) ->
expect(err.message).to.eq("""
401
{
"errors": {
"permission": [
"denied"
]
}
}
""")
it "tags errors", ->
nock("http://localhost:1234")
.matchHeader("authorization", "Bearer auth-token-<PASSWORD>")
.get("/projects/id-123/builds")
.reply(500, {})
api.getProjectRuns("id-123", "auth-token-123")
.then ->
throw new Error("should have thrown here")
.catch (err) ->
expect(err.isApiError).to.be.true
context ".ping", ->
it "GET /ping", ->
nock("http://localhost:1234")
.matchHeader("x-platform", "linux")
.matchHeader("x-cypress-version", pkg.version)
.get("/ping")
.reply(200, "OK")
api.ping()
.then (resp) ->
expect(resp).to.eq("OK")
it "tags errors", ->
nock("http://localhost:1234")
.matchHeader("authorization", "Bearer auth-token-123")
.get("/ping")
.reply(500, {})
api.ping()
.then ->
throw new Error("should have thrown here")
.catch (err) ->
expect(err.isApiError).to.be.true
context ".createRun", ->
beforeEach ->
@buildProps = {
projectId: "id-123"
recordKey: "<KEY>"
commitSha: "sha"
commitBranch: "master"
commitAuthorName: "brian"
commitAuthorEmail: "<EMAIL>"
commitMessage: "such hax"
remoteOrigin: "https://github.com/foo/bar.git"
ciProvider: "circle"
ciBuildNumber: "987"
ciParams: { foo: "bar" }
}
it "POST /builds + returns buildId", ->
nock("http://localhost:1234")
.matchHeader("x-route-version", "2")
.matchHeader("x-platform", "linux")
.matchHeader("x-cypress-version", pkg.version)
.post("/builds", @buildProps)
.reply(200, {
buildId: "new-build-id-123"
})
api.createRun(@buildProps)
.then (ret) ->
expect(ret).to.eq("new-build-id-123")
it "POST /builds failure formatting", ->
nock("http://localhost:1234")
.matchHeader("x-route-version", "2")
.matchHeader("x-platform", "linux")
.matchHeader("x-cypress-version", pkg.version)
.post("/builds", {
projectId: null
recordKey: "<KEY>"
commitSha: "sha"
commitBranch: "master"
commitAuthorName: "brian"
commitAuthorEmail: "<EMAIL>"
commitMessage: "such hax"
remoteOrigin: "https://github.com/foo/bar.git"
ciProvider: "circle"
ciBuildNumber: "987"
ciParams: { foo: "bar" }
})
.reply(422, {
errors: {
buildId: ["is required"]
}
})
api.createRun({
projectId: null
recordKey: "<KEY>"
commitSha: "sha"
commitBranch: "master"
commitAuthorName: "brian"
commitAuthorEmail: "<EMAIL>"
commitMessage: "such hax"
remoteOrigin: "https://github.com/foo/bar.git"
ciProvider: "circle"
ciBuildNumber: "987"
ciParams: { foo: "bar" }
})
.then ->
throw new Error("should have thrown here")
.catch (err) ->
expect(err.message).to.eq("""
422
{
"errors": {
"buildId": [
"is required"
]
}
}
""")
it "handles timeouts", ->
nock("http://localhost:1234")
.matchHeader("x-route-version", "2")
.matchHeader("x-platform", "linux")
.matchHeader("x-cypress-version", pkg.version)
.post("/builds")
.socketDelay(5000)
.reply(200, {})
api.createRun({
timeout: 100
})
.then ->
throw new Error("should have thrown here")
.catch (err) ->
expect(err.message).to.eq("Error: ESOCKETTIMEDOUT")
it "sets timeout to 10 seconds", ->
@sandbox.stub(rp, "post").returns({
promise: -> {
get: -> {
catch: -> {
catch: -> {
then: (fn) -> fn()
}
then: (fn) -> fn()
}
}
}
})
api.createRun({})
.then ->
expect(rp.post).to.be.calledWithMatch({timeout: 10000})
it "tags errors", ->
nock("http://localhost:1234")
.matchHeader("authorization", "Bearer auth-token-123")
.post("/builds", @buildProps)
.reply(500, {})
api.createRun(@buildProps)
.then ->
throw new Error("should have thrown here")
.catch (err) ->
expect(err.isApiError).to.be.true
context ".createInstance", ->
beforeEach ->
Object.defineProperty(process.versions, "chrome", {
value: "53"
})
@postProps = {
spec: "cypress/integration/app_spec.js"
browserName: "Electron"
browserVersion: "53"
osName: "darwin"
osVersion: "10.10.10"
osCpus: [{model: "foo"}]
osMemory: {
free: 1000
total: 2000
}
}
@createProps = {
buildId: "build-id-123"
spec: "cypress/integration/app_spec.js"
}
it "POSTs /builds/:id/instances", ->
@sandbox.stub(os, "release").returns("10.10.10")
@sandbox.stub(os, "cpus").returns([{model: "foo"}])
@sandbox.stub(os, "freemem").returns(1000)
@sandbox.stub(os, "totalmem").returns(2000)
os.platform.returns("darwin")
nock("http://localhost:1234")
.matchHeader("x-route-version", "3")
.matchHeader("x-platform", "darwin")
.matchHeader("x-cypress-version", pkg.version)
.post("/builds/build-id-123/instances", @postProps)
.reply(200, {
instanceId: "instance-id-123"
})
api.createInstance(@createProps)
.then (instanceId) ->
expect(instanceId).to.eq("instance-id-123")
it "POST /builds/:id/instances failure formatting", ->
nock("http://localhost:1234")
.matchHeader("x-route-version", "3")
.matchHeader("x-platform", "linux")
.matchHeader("x-cypress-version", pkg.version)
.post("/builds/build-id-123/instances")
.reply(422, {
errors: {
tests: ["is required"]
}
})
api.createInstance({buildId: "build-id-123"})
.then ->
throw new Error("should have thrown here")
.catch (err) ->
expect(err.message).to.eq("""
422
{
"errors": {
"tests": [
"is required"
]
}
}
""")
it "handles timeouts", ->
nock("http://localhost:1234")
.matchHeader("x-route-version", "3")
.matchHeader("x-platform", "linux")
.matchHeader("x-cypress-version", pkg.version)
.post("/builds/build-id-123/instances")
.socketDelay(5000)
.reply(200, {})
api.createInstance({
buildId: "build-id-123"
timeout: 100
})
.then ->
throw new Error("should have thrown here")
.catch (err) ->
expect(err.message).to.eq("Error: ESOCKETTIMEDOUT")
it "sets timeout to 10 seconds", ->
@sandbox.stub(rp, "post").returns({
promise: -> {
get: -> {
catch: -> {
catch: -> {
then: (fn) -> fn()
}
then: (fn) -> fn()
}
}
}
})
api.createInstance({})
.then ->
expect(rp.post).to.be.calledWithMatch({timeout: 10000})
it "tags errors", ->
nock("http://localhost:1234")
.matchHeader("authorization", "Bearer <KEY>")
.post("/builds/build-id-123/instances", @postProps)
.reply(500, {})
api.createInstance(@createProps)
.then ->
throw new Error("should have thrown here")
.catch (err) ->
expect(err.isApiError).to.be.true
context ".updateInstance", ->
beforeEach ->
Object.defineProperty(process.versions, "chrome", {
value: "53"
})
@putProps = {
tests: 1
passes: 2
failures: 3
pending: 4
duration: 5
error: "err msg"
video: true
screenshots: []
failingTests: []
cypressConfig: {}
ciProvider: "circle"
stdout: "foo\nbar\nbaz"
}
@updateProps = {
instanceId: "instance-id-123"
tests: 1
passes: 2
failures: 3
pending: 4
duration: 5
error: "err msg"
video: true
screenshots: []
failingTests: []
cypressConfig: {}
ciProvider: "circle"
stdout: "foo\nbar\nbaz"
}
it "PUTs /instances/:id", ->
nock("http://localhost:1234")
.matchHeader("x-platform", "linux")
.matchHeader("x-cypress-version", pkg.version)
.put("/instances/instance-id-123", @putProps)
.reply(200)
api.updateInstance(@updateProps)
it "PUT /instances/:id failure formatting", ->
nock("http://localhost:1234")
.matchHeader("x-platform", "linux")
.matchHeader("x-cypress-version", pkg.version)
.put("/instances/instance-id-123")
.reply(422, {
errors: {
tests: ["is required"]
}
})
api.updateInstance({instanceId: "instance-id-123"})
.then ->
throw new Error("should have thrown here")
.catch (err) ->
expect(err.message).to.eq("""
422
{
"errors": {
"tests": [
"is required"
]
}
}
""")
it "handles timeouts", ->
nock("http://localhost:1234")
.matchHeader("x-platform", "linux")
.matchHeader("x-cypress-version", pkg.version)
.put("/instances/instance-id-123")
.socketDelay(5000)
.reply(200, {})
api.updateInstance({
instanceId: "instance-id-123"
timeout: 100
})
.then ->
throw new Error("should have thrown here")
.catch (err) ->
expect(err.message).to.eq("Error: ESOCKETTIMEDOUT")
it "sets timeout to 10 seconds", ->
@sandbox.stub(rp, "put").resolves()
api.updateInstance({})
.then ->
expect(rp.put).to.be.calledWithMatch({timeout: 10000})
it "tags errors", ->
nock("http://localhost:1234")
.matchHeader("authorization", "<KEY> <KEY>-<PASSWORD>")
.put("/instances/instance-id-123", @putProps)
.reply(500, {})
api.updateInstance(@updateProps)
.then ->
throw new Error("should have thrown here")
.catch (err) ->
expect(err.isApiError).to.be.true
context ".updateInstanceStdout", ->
it "PUTs /instances/:id/stdout", ->
nock("http://localhost:1234")
.matchHeader("x-platform", "linux")
.matchHeader("x-cypress-version", pkg.version)
.put("/instances/instance-id-123/stdout", {
stdout: "foobarbaz\n"
})
.reply(200)
api.updateInstanceStdout({
instanceId: "instance-id-123"
stdout: "foobarbaz\n"
})
it "PUT /instances/:id/stdout failure formatting", ->
nock("http://localhost:1234")
.matchHeader("x-platform", "linux")
.matchHeader("x-cypress-version", pkg.version)
.put("/instances/instance-id-123/stdout")
.reply(422, {
errors: {
tests: ["is required"]
}
})
api.updateInstanceStdout({instanceId: "instance-id-123"})
.then ->
throw new Error("should have thrown here")
.catch (err) ->
expect(err.message).to.eq("""
422
{
"errors": {
"tests": [
"is required"
]
}
}
""")
it "handles timeouts", ->
nock("http://localhost:1234")
.matchHeader("x-platform", "linux")
.matchHeader("x-cypress-version", pkg.version)
.put("/instances/instance-id-123/stdout")
.socketDelay(5000)
.reply(200, {})
api.updateInstanceStdout({
instanceId: "instance-id-123"
timeout: 100
})
.then ->
throw new Error("should have thrown here")
.catch (err) ->
expect(err.message).to.eq("Error: ESOCKETTIMEDOUT")
it "sets timeout to 10 seconds", ->
@sandbox.stub(rp, "put").resolves()
api.updateInstanceStdout({})
.then ->
expect(rp.put).to.be.calledWithMatch({timeout: 10000})
it "tags errors", ->
nock("http://localhost:1234")
.matchHeader("authorization", "Bearer auth-token-123")
.put("/instances/instance-id-123/stdout", {
stdout: "foobarbaz\n"
})
.reply(500, {})
api.updateInstanceStdout({
instanceId: "instance-id-123"
stdout: "foobarbaz\n"
})
.then ->
throw new Error("should have thrown here")
.catch (err) ->
expect(err.isApiError).to.be.true
context ".getLoginUrl", ->
it "GET /auth + returns the url", ->
nock("http://localhost:1234")
.matchHeader("x-platform", "linux")
.matchHeader("x-cypress-version", pkg.version)
.get("/auth")
.reply(200, {
url: "https://github.com/authorize"
})
api.getLoginUrl().then (url) ->
expect(url).to.eq("https://github.com/authorize")
it "tags errors", ->
nock("http://localhost:1234")
.matchHeader("authorization", "Bearer auth-token-<PASSWORD>")
.get("/auth")
.reply(500, {})
api.getLoginUrl()
.then ->
throw new Error("should have thrown here")
.catch (err) ->
expect(err.isApiError).to.be.true
context ".createSignin", ->
it "POSTs /signin + returns user object", ->
@sandbox.stub(nmi, "machineId").resolves("12345")
nock("http://localhost:1234")
.matchHeader("x-platform", "linux")
.matchHeader("x-cypress-version", pkg.version)
.matchHeader("x-route-version", "3")
.matchHeader("x-machine-id", "12345")
.post("/signin")
.query({code: "abc-123"})
.reply(200, {
name: "<NAME>"
})
api.createSignin("abc-123").then (user) ->
expect(user).to.deep.eq({
name: "<NAME>"
})
it "handles nmi errors", ->
@sandbox.stub(nmi, "machineId").rejects(new Error("foo"))
nock("http://localhost:1234", {
"badheaders": ["x-machine-id"]
})
.matchHeader("x-platform", "linux")
.matchHeader("x-cypress-version", pkg.version)
.matchHeader("x-route-version", "3")
.matchHeader("x-accept-terms", "true")
.post("/signin")
.query({code: "abc-123"})
.reply(200, {
name: "<NAME>"
})
api.createSignin("abc-123").then (user) ->
expect(user).to.deep.eq({
name: "<NAME>"
})
it "handles 401 exceptions", ->
nock("http://localhost:1234")
.matchHeader("x-platform", "linux")
.matchHeader("x-cypress-version", pkg.version)
.matchHeader("x-route-version", "3")
.post("/signin")
.query({code: "abc-123"})
.reply(401, "Your email: '<EMAIL>' has not been authorized.")
api.createSignin("abc-123")
.then ->
throw new Error("should have thrown error")
.catch (err) ->
expect(err.message).to.eq("Your email: '<EMAIL>' has not been authorized.")
it "tags errors", ->
nock("http://localhost:1234")
.matchHeader("authorization", "Bearer auth-token-123")
.post("/signin")
.reply(500, {})
api.createSignin("abc-123")
.then ->
throw new Error("should have thrown here")
.catch (err) ->
expect(err.isApiError).to.be.true
context ".createSignout", ->
it "POSTs /signout", ->
nock("http://localhost:1234")
.matchHeader("x-platform", "linux")
.matchHeader("x-cypress-version", pkg.version)
.matchHeader("authorization", "Bearer auth-token-123")
.post("/signout")
.reply(200)
api.createSignout("auth-token-123")
it "tags errors", ->
nock("http://localhost:1234")
.matchHeader("authorization", "Bearer auth-token-123")
.post("/signout")
.reply(500, {})
api.createSignout("auth-token-123")
.then ->
throw new Error("should have thrown here")
.catch (err) ->
expect(err.isApiError).to.be.true
context ".createProject", ->
beforeEach ->
@postProps = {
name: "<NAME>"
orgId: "org-id-123"
public: true
remoteOrigin: "remoteOrigin"
}
@createProps = {
projectName: "foobar"
orgId: "org-id-123"
public: true
}
it "POST /projects", ->
nock("http://localhost:1234")
.matchHeader("x-platform", "linux")
.matchHeader("x-cypress-version", pkg.version)
.matchHeader("x-route-version", "2")
.matchHeader("authorization", "Bearer auth-token-123")
.post("/projects", @postProps)
.reply(200, {
id: "id-123"
name: "<NAME>"
orgId: "org-id-123"
public: true
})
api.createProject(@createProps, "remoteOrigin", "auth-token-123")
.then (projectDetails) ->
expect(projectDetails).to.eql({
id: "id-123"
name: "<NAME>"
orgId: "org-id-123"
public: true
})
it "POST /projects failure formatting", ->
nock("http://localhost:1234")
.matchHeader("x-platform", "linux")
.matchHeader("x-cypress-version", pkg.version)
.matchHeader("x-route-version", "2")
.matchHeader("authorization", "Bearer auth-token-123")
.post("/projects", {
name: "<NAME>"
orgId: "org-id-123"
public: true
remoteOrigin: "remoteOrigin"
})
.reply(422, {
errors: {
orgId: ["is required"]
}
})
projectDetails = {
projectName: "foobar"
orgId: "org-id-123"
public: true
}
api.createProject(projectDetails, "remoteOrigin", "auth-token-123")
.then ->
throw new Error("should have thrown here")
.catch (err) ->
expect(err.message).to.eq("""
422
{
"errors": {
"orgId": [
"is required"
]
}
}
""")
it "tags errors", ->
nock("http://localhost:1234")
.matchHeader("authorization", "Bearer auth-token-123")
.post("/projects", @postProps)
.reply(500, {})
api.createProject(@createProps, "remoteOrigin", "auth-token-123")
.then ->
throw new Error("should have thrown here")
.catch (err) ->
expect(err.isApiError).to.be.true
context ".getProjectRecordKeys", ->
it "GET /projects/:id/keys + returns keys", ->
recordKeys = []
nock("http://localhost:1234")
.matchHeader("authorization", "Bearer auth-token-123")
.get("/projects/id-123/keys")
.reply(200, recordKeys)
api.getProjectRecordKeys("id-<KEY>2<KEY>", "auth-<KEY>")
.then (ret) ->
expect(ret).to.eql(recordKeys)
it "tags errors", ->
nock("http://localhost:1234")
.matchHeader("authorization", "Bearer auth-token-123")
.get("/projects/id-123/keys")
.reply(500, {})
api.getProjectRecordKeys("<KEY>", "<KEY>")
.then ->
throw new Error("should have thrown here")
.catch (err) ->
expect(err.isApiError).to.be.true
context ".requestAccess", ->
it "POST /projects/:id/membership_requests + returns response", ->
nock("http://localhost:1234")
.matchHeader("authorization", "Bearer auth-token-123")
.post("/projects/project-id-123/membership_requests")
.reply(200)
api.requestAccess("project-id-123", "auth-token-123")
.then (ret) ->
expect(ret).to.be.undefined
it "POST /projects/:id/membership_requests failure formatting", ->
nock("http://localhost:1234")
.matchHeader("authorization", "Bearer auth-token-123")
.post("/projects/project-id-123/membership_requests")
.reply(422, {
errors: {
access: ["already requested"]
}
})
api.requestAccess("project-id-123", "auth-token-123")
.then ->
throw new Error("should have thrown here")
.catch (err) ->
expect(err.message).to.eq("""
422
{
"errors": {
"access": [
"already requested"
]
}
}
""")
it "tags errors", ->
nock("http://localhost:1234")
.matchHeader("authorization", "Bearer auth-token-123")
.post("/projects/project-id-123/membership_requests")
.reply(500, {})
api.requestAccess("project-id-123", "auth-token-123")
.then ->
throw new Error("should have thrown here")
.catch (err) ->
expect(err.isApiError).to.be.true
context ".getProjectToken", ->
it "GETs /projects/:id/token", ->
nock("http://localhost:1234")
.matchHeader("x-platform", "linux")
.matchHeader("x-cypress-version", pkg.version)
.matchHeader("authorization", "Bearer auth-token-123")
.get("/projects/project-123/token")
.reply(200, {
apiToken: "<KEY>"
})
api.getProjectToken("project-123", "auth-token-123")
.then (resp) ->
expect(resp).to.eq("token-123")
it "tags errors", ->
nock("http://localhost:1234")
.matchHeader("authorization", "Bearer auth-token-123")
.get("/projects/project-123/token")
.reply(500, {})
api.getProjectToken("project-123", "auth-token-123")
.then ->
throw new Error("should have thrown here")
.catch (err) ->
expect(err.isApiError).to.be.true
context ".updateProjectToken", ->
it "PUTs /projects/:id/token", ->
nock("http://localhost:1234")
.matchHeader("x-platform", "linux")
.matchHeader("x-cypress-version", pkg.version)
.matchHeader("authorization", "Bearer <KEY>-<KEY>")
.put("/projects/project-123/token")
.reply(200, {
apiToken: "<KEY>"
})
api.updateProjectToken("project-123", "auth-<KEY>")
.then (resp) ->
expect(resp).to.eq("token-123")
it "tags errors", ->
nock("http://localhost:1234")
.matchHeader("authorization", "Bearer auth-token-<PASSWORD>")
.put("/projects/project-id-123/token")
.reply(500, {})
api.updateProjectToken("project-123", "auth-token-<PASSWORD>")
.then ->
throw new Error("should have thrown here")
.catch (err) ->
expect(err.isApiError).to.be.true
context ".createRaygunException", ->
beforeEach ->
@setup = (body, authToken, delay = 0) ->
nock("http://localhost:1234")
.matchHeader("x-platform", "linux")
.matchHeader("x-cypress-version", pkg.version)
.matchHeader("authorization", "Bearer #{authToken}")
.post("/exceptions", body)
.delayConnection(delay)
.reply(200)
it "POSTs /exceptions", ->
@setup({foo: "bar"}, "auth-token-123")
api.createRaygunException({foo: "bar"}, "auth-token-123")
it "by default times outs after 3 seconds", ->
## return our own specific promise
## so we can spy on the timeout function
p = Promise.resolve()
@sandbox.spy(p, "timeout")
@sandbox.stub(rp.Request.prototype, "promise").returns(p)
@setup({foo: "bar"}, "auth-token-123")
api.createRaygunException({foo: "bar"}, "auth-token-123").then ->
expect(p.timeout).to.be.calledWith(3000)
it "times out after exceeding timeout", ->
## force our connection to be delayed 5 seconds
@setup({foo: "bar"}, "auth-token-123", 5000)
## and set the timeout to only be 50ms
api.createRaygunException({foo: "bar"}, "auth-token-123", 50)
.then ->
throw new Error("errored: it did not catch the timeout error!")
.catch Promise.TimeoutError, ->
it "tags errors", ->
nock("http://localhost:1234")
.matchHeader("x-platform", "linux")
.matchHeader("x-cypress-version", pkg.version)
.matchHeader("authorization", "Bearer auth-token-<PASSWORD>")
.post("/exceptions", {foo: "bar"})
.reply(500, {})
api.createRaygunException({foo: "bar"}, "auth-token-123")
.then ->
throw new Error("should have thrown here")
.catch (err) ->
expect(err.isApiError).to.be.true
| true | require("../spec_helper")
_ = require("lodash")
rp = require("request-promise")
os = require("os")
nmi = require("node-machine-id")
pkg = require("@packages/root")
api = require("#{root}lib/api")
Promise = require("bluebird")
describe "lib/api", ->
beforeEach ->
@sandbox.stub(os, "platform").returns("linux")
context ".getOrgs", ->
it "GET /orgs + returns orgs", ->
orgs = []
nock("http://localhost:1234")
.matchHeader("authorization", "Bearer auth-token-123")
.get("/organizations")
.reply(200, orgs)
api.getOrgs("auth-token-123")
.then (ret) ->
expect(ret).to.eql(orgs)
it "tags errors", ->
nock("http://localhost:1234")
.matchHeader("authorization", "Bearer auth-token-123")
.get("/organizations")
.reply(500, {})
api.getOrgs("auth-token-123")
.then ->
throw new Error("should have thrown here")
.catch (err) ->
expect(err.isApiError).to.be.true
context ".getProjects", ->
it "GET /projects + returns projects", ->
projects = []
nock("http://localhost:1234")
.matchHeader("authorization", "Bearer auth-token-123")
.get("/projects")
.reply(200, projects)
api.getProjects("auth-token-123")
.then (ret) ->
expect(ret).to.eql(projects)
it "tags errors", ->
nock("http://localhost:1234")
.matchHeader("authorization", "Bearer auth-token-123")
.get("/projects")
.reply(500, {})
api.getProjects("auth-token-123")
.then ->
throw new Error("should have thrown here")
.catch (err) ->
expect(err.isApiError).to.be.true
context ".getProject", ->
it "GET /projects/:id + returns project", ->
project = { id: "id-123" }
nock("http://localhost:1234")
.matchHeader("authorization", "Bearer auth-token-123")
.matchHeader("x-route-version", "2")
.get("/projects/id-123")
.reply(200, project)
api.getProject("id-123", "auth-token-123")
.then (ret) ->
expect(ret).to.eql(project)
it "tags errors", ->
nock("http://localhost:1234")
.matchHeader("authorization", "Bearer auth-token-123")
.get("/projects/id-123")
.reply(500, {})
api.getProject("id-123", "auth-token-123")
.then ->
throw new Error("should have thrown here")
.catch (err) ->
expect(err.isApiError).to.be.true
context ".getProjectRuns", ->
it "GET /projects/:id/builds + returns builds", ->
builds = []
nock("http://localhost:1234")
.matchHeader("authorization", "Bearer auth-token-123")
.get("/projects/id-123/builds")
.reply(200, builds)
api.getProjectRuns("id-123", "auth-token-123")
.then (ret) ->
expect(ret).to.eql(builds)
it "handles timeouts", ->
nock("http://localhost:1234")
.matchHeader("authorization", "Bearer auth-token-123")
.get("/projects/id-123/builds")
.socketDelay(5000)
.reply(200, [])
api.getProjectRuns("id-123", "auth-token-123", {timeout: 100})
.then (ret) ->
throw new Error("should have thrown here")
.catch (err) ->
expect(err.message).to.eq("Error: ESOCKETTIMEDOUT")
it "sets timeout to 10 seconds", ->
@sandbox.stub(rp, "get").returns({
catch: -> {
catch: -> {
then: (fn) -> fn()
}
then: (fn) -> fn()
}
then: (fn) -> fn()
})
api.getProjectRuns("id-123", "auth-token-123")
.then (ret) ->
expect(rp.get).to.be.calledWithMatch({timeout: 10000})
it "GET /projects/:id/builds failure formatting", ->
nock("http://localhost:1234")
.matchHeader("authorization", "Bearer auth-token-PI:PASSWORD:<PASSWORD>END_PI")
.get("/projects/id-123/builds")
.reply(401, {
errors: {
permission: ["denied"]
}
})
api.getProjectRuns("id-123", "auth-token-123")
.then ->
throw new Error("should have thrown here")
.catch (err) ->
expect(err.message).to.eq("""
401
{
"errors": {
"permission": [
"denied"
]
}
}
""")
it "tags errors", ->
nock("http://localhost:1234")
.matchHeader("authorization", "Bearer auth-token-PI:PASSWORD:<PASSWORD>END_PI")
.get("/projects/id-123/builds")
.reply(500, {})
api.getProjectRuns("id-123", "auth-token-123")
.then ->
throw new Error("should have thrown here")
.catch (err) ->
expect(err.isApiError).to.be.true
context ".ping", ->
it "GET /ping", ->
nock("http://localhost:1234")
.matchHeader("x-platform", "linux")
.matchHeader("x-cypress-version", pkg.version)
.get("/ping")
.reply(200, "OK")
api.ping()
.then (resp) ->
expect(resp).to.eq("OK")
it "tags errors", ->
nock("http://localhost:1234")
.matchHeader("authorization", "Bearer auth-token-123")
.get("/ping")
.reply(500, {})
api.ping()
.then ->
throw new Error("should have thrown here")
.catch (err) ->
expect(err.isApiError).to.be.true
context ".createRun", ->
beforeEach ->
@buildProps = {
projectId: "id-123"
recordKey: "PI:KEY:<KEY>END_PI"
commitSha: "sha"
commitBranch: "master"
commitAuthorName: "brian"
commitAuthorEmail: "PI:EMAIL:<EMAIL>END_PI"
commitMessage: "such hax"
remoteOrigin: "https://github.com/foo/bar.git"
ciProvider: "circle"
ciBuildNumber: "987"
ciParams: { foo: "bar" }
}
it "POST /builds + returns buildId", ->
nock("http://localhost:1234")
.matchHeader("x-route-version", "2")
.matchHeader("x-platform", "linux")
.matchHeader("x-cypress-version", pkg.version)
.post("/builds", @buildProps)
.reply(200, {
buildId: "new-build-id-123"
})
api.createRun(@buildProps)
.then (ret) ->
expect(ret).to.eq("new-build-id-123")
it "POST /builds failure formatting", ->
nock("http://localhost:1234")
.matchHeader("x-route-version", "2")
.matchHeader("x-platform", "linux")
.matchHeader("x-cypress-version", pkg.version)
.post("/builds", {
projectId: null
recordKey: "PI:KEY:<KEY>END_PI"
commitSha: "sha"
commitBranch: "master"
commitAuthorName: "brian"
commitAuthorEmail: "PI:EMAIL:<EMAIL>END_PI"
commitMessage: "such hax"
remoteOrigin: "https://github.com/foo/bar.git"
ciProvider: "circle"
ciBuildNumber: "987"
ciParams: { foo: "bar" }
})
.reply(422, {
errors: {
buildId: ["is required"]
}
})
api.createRun({
projectId: null
recordKey: "PI:KEY:<KEY>END_PI"
commitSha: "sha"
commitBranch: "master"
commitAuthorName: "brian"
commitAuthorEmail: "PI:EMAIL:<EMAIL>END_PI"
commitMessage: "such hax"
remoteOrigin: "https://github.com/foo/bar.git"
ciProvider: "circle"
ciBuildNumber: "987"
ciParams: { foo: "bar" }
})
.then ->
throw new Error("should have thrown here")
.catch (err) ->
expect(err.message).to.eq("""
422
{
"errors": {
"buildId": [
"is required"
]
}
}
""")
it "handles timeouts", ->
nock("http://localhost:1234")
.matchHeader("x-route-version", "2")
.matchHeader("x-platform", "linux")
.matchHeader("x-cypress-version", pkg.version)
.post("/builds")
.socketDelay(5000)
.reply(200, {})
api.createRun({
timeout: 100
})
.then ->
throw new Error("should have thrown here")
.catch (err) ->
expect(err.message).to.eq("Error: ESOCKETTIMEDOUT")
it "sets timeout to 10 seconds", ->
@sandbox.stub(rp, "post").returns({
promise: -> {
get: -> {
catch: -> {
catch: -> {
then: (fn) -> fn()
}
then: (fn) -> fn()
}
}
}
})
api.createRun({})
.then ->
expect(rp.post).to.be.calledWithMatch({timeout: 10000})
it "tags errors", ->
nock("http://localhost:1234")
.matchHeader("authorization", "Bearer auth-token-123")
.post("/builds", @buildProps)
.reply(500, {})
api.createRun(@buildProps)
.then ->
throw new Error("should have thrown here")
.catch (err) ->
expect(err.isApiError).to.be.true
context ".createInstance", ->
beforeEach ->
Object.defineProperty(process.versions, "chrome", {
value: "53"
})
@postProps = {
spec: "cypress/integration/app_spec.js"
browserName: "Electron"
browserVersion: "53"
osName: "darwin"
osVersion: "10.10.10"
osCpus: [{model: "foo"}]
osMemory: {
free: 1000
total: 2000
}
}
@createProps = {
buildId: "build-id-123"
spec: "cypress/integration/app_spec.js"
}
it "POSTs /builds/:id/instances", ->
@sandbox.stub(os, "release").returns("10.10.10")
@sandbox.stub(os, "cpus").returns([{model: "foo"}])
@sandbox.stub(os, "freemem").returns(1000)
@sandbox.stub(os, "totalmem").returns(2000)
os.platform.returns("darwin")
nock("http://localhost:1234")
.matchHeader("x-route-version", "3")
.matchHeader("x-platform", "darwin")
.matchHeader("x-cypress-version", pkg.version)
.post("/builds/build-id-123/instances", @postProps)
.reply(200, {
instanceId: "instance-id-123"
})
api.createInstance(@createProps)
.then (instanceId) ->
expect(instanceId).to.eq("instance-id-123")
it "POST /builds/:id/instances failure formatting", ->
nock("http://localhost:1234")
.matchHeader("x-route-version", "3")
.matchHeader("x-platform", "linux")
.matchHeader("x-cypress-version", pkg.version)
.post("/builds/build-id-123/instances")
.reply(422, {
errors: {
tests: ["is required"]
}
})
api.createInstance({buildId: "build-id-123"})
.then ->
throw new Error("should have thrown here")
.catch (err) ->
expect(err.message).to.eq("""
422
{
"errors": {
"tests": [
"is required"
]
}
}
""")
it "handles timeouts", ->
nock("http://localhost:1234")
.matchHeader("x-route-version", "3")
.matchHeader("x-platform", "linux")
.matchHeader("x-cypress-version", pkg.version)
.post("/builds/build-id-123/instances")
.socketDelay(5000)
.reply(200, {})
api.createInstance({
buildId: "build-id-123"
timeout: 100
})
.then ->
throw new Error("should have thrown here")
.catch (err) ->
expect(err.message).to.eq("Error: ESOCKETTIMEDOUT")
it "sets timeout to 10 seconds", ->
@sandbox.stub(rp, "post").returns({
promise: -> {
get: -> {
catch: -> {
catch: -> {
then: (fn) -> fn()
}
then: (fn) -> fn()
}
}
}
})
api.createInstance({})
.then ->
expect(rp.post).to.be.calledWithMatch({timeout: 10000})
it "tags errors", ->
nock("http://localhost:1234")
.matchHeader("authorization", "Bearer PI:KEY:<KEY>END_PI")
.post("/builds/build-id-123/instances", @postProps)
.reply(500, {})
api.createInstance(@createProps)
.then ->
throw new Error("should have thrown here")
.catch (err) ->
expect(err.isApiError).to.be.true
context ".updateInstance", ->
beforeEach ->
Object.defineProperty(process.versions, "chrome", {
value: "53"
})
@putProps = {
tests: 1
passes: 2
failures: 3
pending: 4
duration: 5
error: "err msg"
video: true
screenshots: []
failingTests: []
cypressConfig: {}
ciProvider: "circle"
stdout: "foo\nbar\nbaz"
}
@updateProps = {
instanceId: "instance-id-123"
tests: 1
passes: 2
failures: 3
pending: 4
duration: 5
error: "err msg"
video: true
screenshots: []
failingTests: []
cypressConfig: {}
ciProvider: "circle"
stdout: "foo\nbar\nbaz"
}
it "PUTs /instances/:id", ->
nock("http://localhost:1234")
.matchHeader("x-platform", "linux")
.matchHeader("x-cypress-version", pkg.version)
.put("/instances/instance-id-123", @putProps)
.reply(200)
api.updateInstance(@updateProps)
it "PUT /instances/:id failure formatting", ->
nock("http://localhost:1234")
.matchHeader("x-platform", "linux")
.matchHeader("x-cypress-version", pkg.version)
.put("/instances/instance-id-123")
.reply(422, {
errors: {
tests: ["is required"]
}
})
api.updateInstance({instanceId: "instance-id-123"})
.then ->
throw new Error("should have thrown here")
.catch (err) ->
expect(err.message).to.eq("""
422
{
"errors": {
"tests": [
"is required"
]
}
}
""")
it "handles timeouts", ->
nock("http://localhost:1234")
.matchHeader("x-platform", "linux")
.matchHeader("x-cypress-version", pkg.version)
.put("/instances/instance-id-123")
.socketDelay(5000)
.reply(200, {})
api.updateInstance({
instanceId: "instance-id-123"
timeout: 100
})
.then ->
throw new Error("should have thrown here")
.catch (err) ->
expect(err.message).to.eq("Error: ESOCKETTIMEDOUT")
it "sets timeout to 10 seconds", ->
@sandbox.stub(rp, "put").resolves()
api.updateInstance({})
.then ->
expect(rp.put).to.be.calledWithMatch({timeout: 10000})
it "tags errors", ->
nock("http://localhost:1234")
.matchHeader("authorization", "PI:KEY:<KEY>END_PI PI:KEY:<KEY>END_PI-PI:PASSWORD:<PASSWORD>END_PI")
.put("/instances/instance-id-123", @putProps)
.reply(500, {})
api.updateInstance(@updateProps)
.then ->
throw new Error("should have thrown here")
.catch (err) ->
expect(err.isApiError).to.be.true
context ".updateInstanceStdout", ->
it "PUTs /instances/:id/stdout", ->
nock("http://localhost:1234")
.matchHeader("x-platform", "linux")
.matchHeader("x-cypress-version", pkg.version)
.put("/instances/instance-id-123/stdout", {
stdout: "foobarbaz\n"
})
.reply(200)
api.updateInstanceStdout({
instanceId: "instance-id-123"
stdout: "foobarbaz\n"
})
it "PUT /instances/:id/stdout failure formatting", ->
nock("http://localhost:1234")
.matchHeader("x-platform", "linux")
.matchHeader("x-cypress-version", pkg.version)
.put("/instances/instance-id-123/stdout")
.reply(422, {
errors: {
tests: ["is required"]
}
})
api.updateInstanceStdout({instanceId: "instance-id-123"})
.then ->
throw new Error("should have thrown here")
.catch (err) ->
expect(err.message).to.eq("""
422
{
"errors": {
"tests": [
"is required"
]
}
}
""")
it "handles timeouts", ->
nock("http://localhost:1234")
.matchHeader("x-platform", "linux")
.matchHeader("x-cypress-version", pkg.version)
.put("/instances/instance-id-123/stdout")
.socketDelay(5000)
.reply(200, {})
api.updateInstanceStdout({
instanceId: "instance-id-123"
timeout: 100
})
.then ->
throw new Error("should have thrown here")
.catch (err) ->
expect(err.message).to.eq("Error: ESOCKETTIMEDOUT")
it "sets timeout to 10 seconds", ->
@sandbox.stub(rp, "put").resolves()
api.updateInstanceStdout({})
.then ->
expect(rp.put).to.be.calledWithMatch({timeout: 10000})
it "tags errors", ->
nock("http://localhost:1234")
.matchHeader("authorization", "Bearer auth-token-123")
.put("/instances/instance-id-123/stdout", {
stdout: "foobarbaz\n"
})
.reply(500, {})
api.updateInstanceStdout({
instanceId: "instance-id-123"
stdout: "foobarbaz\n"
})
.then ->
throw new Error("should have thrown here")
.catch (err) ->
expect(err.isApiError).to.be.true
context ".getLoginUrl", ->
it "GET /auth + returns the url", ->
nock("http://localhost:1234")
.matchHeader("x-platform", "linux")
.matchHeader("x-cypress-version", pkg.version)
.get("/auth")
.reply(200, {
url: "https://github.com/authorize"
})
api.getLoginUrl().then (url) ->
expect(url).to.eq("https://github.com/authorize")
it "tags errors", ->
nock("http://localhost:1234")
.matchHeader("authorization", "Bearer auth-token-PI:PASSWORD:<PASSWORD>END_PI")
.get("/auth")
.reply(500, {})
api.getLoginUrl()
.then ->
throw new Error("should have thrown here")
.catch (err) ->
expect(err.isApiError).to.be.true
context ".createSignin", ->
it "POSTs /signin + returns user object", ->
@sandbox.stub(nmi, "machineId").resolves("12345")
nock("http://localhost:1234")
.matchHeader("x-platform", "linux")
.matchHeader("x-cypress-version", pkg.version)
.matchHeader("x-route-version", "3")
.matchHeader("x-machine-id", "12345")
.post("/signin")
.query({code: "abc-123"})
.reply(200, {
name: "PI:NAME:<NAME>END_PI"
})
api.createSignin("abc-123").then (user) ->
expect(user).to.deep.eq({
name: "PI:NAME:<NAME>END_PI"
})
it "handles nmi errors", ->
@sandbox.stub(nmi, "machineId").rejects(new Error("foo"))
nock("http://localhost:1234", {
"badheaders": ["x-machine-id"]
})
.matchHeader("x-platform", "linux")
.matchHeader("x-cypress-version", pkg.version)
.matchHeader("x-route-version", "3")
.matchHeader("x-accept-terms", "true")
.post("/signin")
.query({code: "abc-123"})
.reply(200, {
name: "PI:NAME:<NAME>END_PI"
})
api.createSignin("abc-123").then (user) ->
expect(user).to.deep.eq({
name: "PI:NAME:<NAME>END_PI"
})
it "handles 401 exceptions", ->
nock("http://localhost:1234")
.matchHeader("x-platform", "linux")
.matchHeader("x-cypress-version", pkg.version)
.matchHeader("x-route-version", "3")
.post("/signin")
.query({code: "abc-123"})
.reply(401, "Your email: 'PI:EMAIL:<EMAIL>END_PI' has not been authorized.")
api.createSignin("abc-123")
.then ->
throw new Error("should have thrown error")
.catch (err) ->
expect(err.message).to.eq("Your email: 'PI:EMAIL:<EMAIL>END_PI' has not been authorized.")
it "tags errors", ->
nock("http://localhost:1234")
.matchHeader("authorization", "Bearer auth-token-123")
.post("/signin")
.reply(500, {})
api.createSignin("abc-123")
.then ->
throw new Error("should have thrown here")
.catch (err) ->
expect(err.isApiError).to.be.true
context ".createSignout", ->
it "POSTs /signout", ->
nock("http://localhost:1234")
.matchHeader("x-platform", "linux")
.matchHeader("x-cypress-version", pkg.version)
.matchHeader("authorization", "Bearer auth-token-123")
.post("/signout")
.reply(200)
api.createSignout("auth-token-123")
it "tags errors", ->
nock("http://localhost:1234")
.matchHeader("authorization", "Bearer auth-token-123")
.post("/signout")
.reply(500, {})
api.createSignout("auth-token-123")
.then ->
throw new Error("should have thrown here")
.catch (err) ->
expect(err.isApiError).to.be.true
context ".createProject", ->
beforeEach ->
@postProps = {
name: "PI:NAME:<NAME>END_PI"
orgId: "org-id-123"
public: true
remoteOrigin: "remoteOrigin"
}
@createProps = {
projectName: "foobar"
orgId: "org-id-123"
public: true
}
it "POST /projects", ->
nock("http://localhost:1234")
.matchHeader("x-platform", "linux")
.matchHeader("x-cypress-version", pkg.version)
.matchHeader("x-route-version", "2")
.matchHeader("authorization", "Bearer auth-token-123")
.post("/projects", @postProps)
.reply(200, {
id: "id-123"
name: "PI:NAME:<NAME>END_PI"
orgId: "org-id-123"
public: true
})
api.createProject(@createProps, "remoteOrigin", "auth-token-123")
.then (projectDetails) ->
expect(projectDetails).to.eql({
id: "id-123"
name: "PI:NAME:<NAME>END_PI"
orgId: "org-id-123"
public: true
})
it "POST /projects failure formatting", ->
nock("http://localhost:1234")
.matchHeader("x-platform", "linux")
.matchHeader("x-cypress-version", pkg.version)
.matchHeader("x-route-version", "2")
.matchHeader("authorization", "Bearer auth-token-123")
.post("/projects", {
name: "PI:NAME:<NAME>END_PI"
orgId: "org-id-123"
public: true
remoteOrigin: "remoteOrigin"
})
.reply(422, {
errors: {
orgId: ["is required"]
}
})
projectDetails = {
projectName: "foobar"
orgId: "org-id-123"
public: true
}
api.createProject(projectDetails, "remoteOrigin", "auth-token-123")
.then ->
throw new Error("should have thrown here")
.catch (err) ->
expect(err.message).to.eq("""
422
{
"errors": {
"orgId": [
"is required"
]
}
}
""")
it "tags errors", ->
nock("http://localhost:1234")
.matchHeader("authorization", "Bearer auth-token-123")
.post("/projects", @postProps)
.reply(500, {})
api.createProject(@createProps, "remoteOrigin", "auth-token-123")
.then ->
throw new Error("should have thrown here")
.catch (err) ->
expect(err.isApiError).to.be.true
context ".getProjectRecordKeys", ->
it "GET /projects/:id/keys + returns keys", ->
recordKeys = []
nock("http://localhost:1234")
.matchHeader("authorization", "Bearer auth-token-123")
.get("/projects/id-123/keys")
.reply(200, recordKeys)
api.getProjectRecordKeys("id-PI:KEY:<KEY>END_PI2PI:KEY:<KEY>END_PI", "auth-PI:KEY:<KEY>END_PI")
.then (ret) ->
expect(ret).to.eql(recordKeys)
it "tags errors", ->
nock("http://localhost:1234")
.matchHeader("authorization", "Bearer auth-token-123")
.get("/projects/id-123/keys")
.reply(500, {})
api.getProjectRecordKeys("PI:KEY:<KEY>END_PI", "PI:KEY:<KEY>END_PI")
.then ->
throw new Error("should have thrown here")
.catch (err) ->
expect(err.isApiError).to.be.true
context ".requestAccess", ->
it "POST /projects/:id/membership_requests + returns response", ->
nock("http://localhost:1234")
.matchHeader("authorization", "Bearer auth-token-123")
.post("/projects/project-id-123/membership_requests")
.reply(200)
api.requestAccess("project-id-123", "auth-token-123")
.then (ret) ->
expect(ret).to.be.undefined
it "POST /projects/:id/membership_requests failure formatting", ->
nock("http://localhost:1234")
.matchHeader("authorization", "Bearer auth-token-123")
.post("/projects/project-id-123/membership_requests")
.reply(422, {
errors: {
access: ["already requested"]
}
})
api.requestAccess("project-id-123", "auth-token-123")
.then ->
throw new Error("should have thrown here")
.catch (err) ->
expect(err.message).to.eq("""
422
{
"errors": {
"access": [
"already requested"
]
}
}
""")
it "tags errors", ->
nock("http://localhost:1234")
.matchHeader("authorization", "Bearer auth-token-123")
.post("/projects/project-id-123/membership_requests")
.reply(500, {})
api.requestAccess("project-id-123", "auth-token-123")
.then ->
throw new Error("should have thrown here")
.catch (err) ->
expect(err.isApiError).to.be.true
context ".getProjectToken", ->
it "GETs /projects/:id/token", ->
nock("http://localhost:1234")
.matchHeader("x-platform", "linux")
.matchHeader("x-cypress-version", pkg.version)
.matchHeader("authorization", "Bearer auth-token-123")
.get("/projects/project-123/token")
.reply(200, {
apiToken: "PI:PASSWORD:<KEY>END_PI"
})
api.getProjectToken("project-123", "auth-token-123")
.then (resp) ->
expect(resp).to.eq("token-123")
it "tags errors", ->
nock("http://localhost:1234")
.matchHeader("authorization", "Bearer auth-token-123")
.get("/projects/project-123/token")
.reply(500, {})
api.getProjectToken("project-123", "auth-token-123")
.then ->
throw new Error("should have thrown here")
.catch (err) ->
expect(err.isApiError).to.be.true
context ".updateProjectToken", ->
it "PUTs /projects/:id/token", ->
nock("http://localhost:1234")
.matchHeader("x-platform", "linux")
.matchHeader("x-cypress-version", pkg.version)
.matchHeader("authorization", "Bearer PI:KEY:<KEY>END_PI-PI:PASSWORD:<KEY>END_PI")
.put("/projects/project-123/token")
.reply(200, {
apiToken: "PI:PASSWORD:<KEY>END_PI"
})
api.updateProjectToken("project-123", "auth-PI:PASSWORD:<KEY>END_PI")
.then (resp) ->
expect(resp).to.eq("token-123")
it "tags errors", ->
nock("http://localhost:1234")
.matchHeader("authorization", "Bearer auth-token-PI:PASSWORD:<PASSWORD>END_PI")
.put("/projects/project-id-123/token")
.reply(500, {})
api.updateProjectToken("project-123", "auth-token-PI:PASSWORD:<PASSWORD>END_PI")
.then ->
throw new Error("should have thrown here")
.catch (err) ->
expect(err.isApiError).to.be.true
context ".createRaygunException", ->
beforeEach ->
@setup = (body, authToken, delay = 0) ->
nock("http://localhost:1234")
.matchHeader("x-platform", "linux")
.matchHeader("x-cypress-version", pkg.version)
.matchHeader("authorization", "Bearer #{authToken}")
.post("/exceptions", body)
.delayConnection(delay)
.reply(200)
it "POSTs /exceptions", ->
@setup({foo: "bar"}, "auth-token-123")
api.createRaygunException({foo: "bar"}, "auth-token-123")
it "by default times outs after 3 seconds", ->
## return our own specific promise
## so we can spy on the timeout function
p = Promise.resolve()
@sandbox.spy(p, "timeout")
@sandbox.stub(rp.Request.prototype, "promise").returns(p)
@setup({foo: "bar"}, "auth-token-123")
api.createRaygunException({foo: "bar"}, "auth-token-123").then ->
expect(p.timeout).to.be.calledWith(3000)
it "times out after exceeding timeout", ->
## force our connection to be delayed 5 seconds
@setup({foo: "bar"}, "auth-token-123", 5000)
## and set the timeout to only be 50ms
api.createRaygunException({foo: "bar"}, "auth-token-123", 50)
.then ->
throw new Error("errored: it did not catch the timeout error!")
.catch Promise.TimeoutError, ->
it "tags errors", ->
nock("http://localhost:1234")
.matchHeader("x-platform", "linux")
.matchHeader("x-cypress-version", pkg.version)
.matchHeader("authorization", "Bearer auth-token-PI:PASSWORD:<PASSWORD>END_PI")
.post("/exceptions", {foo: "bar"})
.reply(500, {})
api.createRaygunException({foo: "bar"}, "auth-token-123")
.then ->
throw new Error("should have thrown here")
.catch (err) ->
expect(err.isApiError).to.be.true
|
[
{
"context": "?\\d+),(.+)\\s*-\\s*(.+)|(.+))\\n(.+)/\n\n# #EXTINF:822,Iron Maiden - Rime of the Ancient Mariner\nextended = (line) -",
"end": 164,
"score": 0.9984118342399597,
"start": 153,
"tag": "NAME",
"value": "Iron Maiden"
}
] | src/m3u.coffee | nickdesaulniers/javascript-playlist-parser | 75 | # http://gonze.com/playlists/playlist-format-survey.html#M3U
EXTENDED = '#EXTM3U'
COMMENT_RE = /:(?:(-?\d+),(.+)\s*-\s*(.+)|(.+))\n(.+)/
# #EXTINF:822,Iron Maiden - Rime of the Ancient Mariner
extended = (line) ->
match = line.match COMMENT_RE
if match and match.length is 6
length: match[1] or 0
artist: match[2] or ''
title: match[4] or match[3]
file: match[5].trim()
simple = (string) ->
file: string.trim()
empty = (line) ->
!!line.trim().length
comments = (line) ->
line[0] isnt '#'
parse = (playlist) ->
playlist = playlist.replace /\r/g, ''
firstNewline = playlist.search '\n'
if playlist.substr(0, firstNewline) is EXTENDED
playlist.substr(firstNewline).split('\n#').filter(empty).map extended
else
playlist.split('\n').filter(empty).filter(comments).map simple
(if module? then module.exports else window).M3U =
name: 'm3u'
parse: parse
| 112713 | # http://gonze.com/playlists/playlist-format-survey.html#M3U
EXTENDED = '#EXTM3U'
COMMENT_RE = /:(?:(-?\d+),(.+)\s*-\s*(.+)|(.+))\n(.+)/
# #EXTINF:822,<NAME> - Rime of the Ancient Mariner
extended = (line) ->
match = line.match COMMENT_RE
if match and match.length is 6
length: match[1] or 0
artist: match[2] or ''
title: match[4] or match[3]
file: match[5].trim()
simple = (string) ->
file: string.trim()
empty = (line) ->
!!line.trim().length
comments = (line) ->
line[0] isnt '#'
parse = (playlist) ->
playlist = playlist.replace /\r/g, ''
firstNewline = playlist.search '\n'
if playlist.substr(0, firstNewline) is EXTENDED
playlist.substr(firstNewline).split('\n#').filter(empty).map extended
else
playlist.split('\n').filter(empty).filter(comments).map simple
(if module? then module.exports else window).M3U =
name: 'm3u'
parse: parse
| true | # http://gonze.com/playlists/playlist-format-survey.html#M3U
EXTENDED = '#EXTM3U'
COMMENT_RE = /:(?:(-?\d+),(.+)\s*-\s*(.+)|(.+))\n(.+)/
# #EXTINF:822,PI:NAME:<NAME>END_PI - Rime of the Ancient Mariner
extended = (line) ->
match = line.match COMMENT_RE
if match and match.length is 6
length: match[1] or 0
artist: match[2] or ''
title: match[4] or match[3]
file: match[5].trim()
simple = (string) ->
file: string.trim()
empty = (line) ->
!!line.trim().length
comments = (line) ->
line[0] isnt '#'
parse = (playlist) ->
playlist = playlist.replace /\r/g, ''
firstNewline = playlist.search '\n'
if playlist.substr(0, firstNewline) is EXTENDED
playlist.substr(firstNewline).split('\n#').filter(empty).map extended
else
playlist.split('\n').filter(empty).filter(comments).map simple
(if module? then module.exports else window).M3U =
name: 'm3u'
parse: parse
|
[
{
"context": "own risk.'), photo: 'grumpy.jpg'}\n\t\t6: {name: tr('Princess'), descr: tr('Too frail to even look at. Careful!",
"end": 625,
"score": 0.681588351726532,
"start": 617,
"tag": "NAME",
"value": "Princess"
}
] | config.common.coffee | Happening/PersonOfTheWeek | 0 | {tr} = require 'i18n'
exports.getDefault = ->
period: 'week'
topics:
1: {name: tr('Player'), descr: tr('Smooth talker with a healthy reproductive drive.'), photo: 'player.jpg'}
2: {name: tr('Hero'), descr: tr('Accomplishes great things for the collective.'), photo: 'hero.jpg'}
3: {name: tr('Disappointment'), descr: tr('We had such great hopes for this one.'), photo: 'disappointment.jpg'}
4: {name: tr('Beggar'), descr: tr('Always ‘borrowing’, never sharing.'), photo: 'beggar.jpg'}
5: {name: tr('Grumpy'), descr: tr('Terrible temper! Approach at your own risk.'), photo: 'grumpy.jpg'}
6: {name: tr('Princess'), descr: tr('Too frail to even look at. Careful!'), photo: 'princess.jpg'}
7: {name: tr('Ghost'), descr: tr('Rarely seen. Might not even exist at all.'), photo: 'ghost.jpg'}
8: {name: tr('Zombie'), descr: tr('Could perhaps use a little more sleep.'), photo: 'zombie.jpg'}
9: {name: tr('Pig'), descr: tr('Eats and drinks whatever it comes across.'), photo: 'pig.jpg'}
10: {name: tr('Yoda'), descr: tr('Riddles, this one talks in.'), photo: 'yoda.jpg'}
exports.periodTime = (period) ->
{
minute: 60
day: 24*3600
week: 7*24*3600
month: 30*24*3600
}[period]
exports.voteTime = (period) ->
{
minute: 30
day: 1800
week: 6*3600
month: 24*3600
}[period]
exports.awardName = (what,period) ->
if period=="day"
tr "%1 of the Day", what
else if period=="month"
tr "%1 of the Month", what
else if period=="minute"
tr "%1 of the Minute", what
else
tr "%1 of the Week", what
| 155365 | {tr} = require 'i18n'
exports.getDefault = ->
period: 'week'
topics:
1: {name: tr('Player'), descr: tr('Smooth talker with a healthy reproductive drive.'), photo: 'player.jpg'}
2: {name: tr('Hero'), descr: tr('Accomplishes great things for the collective.'), photo: 'hero.jpg'}
3: {name: tr('Disappointment'), descr: tr('We had such great hopes for this one.'), photo: 'disappointment.jpg'}
4: {name: tr('Beggar'), descr: tr('Always ‘borrowing’, never sharing.'), photo: 'beggar.jpg'}
5: {name: tr('Grumpy'), descr: tr('Terrible temper! Approach at your own risk.'), photo: 'grumpy.jpg'}
6: {name: tr('<NAME>'), descr: tr('Too frail to even look at. Careful!'), photo: 'princess.jpg'}
7: {name: tr('Ghost'), descr: tr('Rarely seen. Might not even exist at all.'), photo: 'ghost.jpg'}
8: {name: tr('Zombie'), descr: tr('Could perhaps use a little more sleep.'), photo: 'zombie.jpg'}
9: {name: tr('Pig'), descr: tr('Eats and drinks whatever it comes across.'), photo: 'pig.jpg'}
10: {name: tr('Yoda'), descr: tr('Riddles, this one talks in.'), photo: 'yoda.jpg'}
exports.periodTime = (period) ->
{
minute: 60
day: 24*3600
week: 7*24*3600
month: 30*24*3600
}[period]
exports.voteTime = (period) ->
{
minute: 30
day: 1800
week: 6*3600
month: 24*3600
}[period]
exports.awardName = (what,period) ->
if period=="day"
tr "%1 of the Day", what
else if period=="month"
tr "%1 of the Month", what
else if period=="minute"
tr "%1 of the Minute", what
else
tr "%1 of the Week", what
| true | {tr} = require 'i18n'
exports.getDefault = ->
period: 'week'
topics:
1: {name: tr('Player'), descr: tr('Smooth talker with a healthy reproductive drive.'), photo: 'player.jpg'}
2: {name: tr('Hero'), descr: tr('Accomplishes great things for the collective.'), photo: 'hero.jpg'}
3: {name: tr('Disappointment'), descr: tr('We had such great hopes for this one.'), photo: 'disappointment.jpg'}
4: {name: tr('Beggar'), descr: tr('Always ‘borrowing’, never sharing.'), photo: 'beggar.jpg'}
5: {name: tr('Grumpy'), descr: tr('Terrible temper! Approach at your own risk.'), photo: 'grumpy.jpg'}
6: {name: tr('PI:NAME:<NAME>END_PI'), descr: tr('Too frail to even look at. Careful!'), photo: 'princess.jpg'}
7: {name: tr('Ghost'), descr: tr('Rarely seen. Might not even exist at all.'), photo: 'ghost.jpg'}
8: {name: tr('Zombie'), descr: tr('Could perhaps use a little more sleep.'), photo: 'zombie.jpg'}
9: {name: tr('Pig'), descr: tr('Eats and drinks whatever it comes across.'), photo: 'pig.jpg'}
10: {name: tr('Yoda'), descr: tr('Riddles, this one talks in.'), photo: 'yoda.jpg'}
exports.periodTime = (period) ->
{
minute: 60
day: 24*3600
week: 7*24*3600
month: 30*24*3600
}[period]
exports.voteTime = (period) ->
{
minute: 30
day: 1800
week: 6*3600
month: 24*3600
}[period]
exports.awardName = (what,period) ->
if period=="day"
tr "%1 of the Day", what
else if period=="month"
tr "%1 of the Month", what
else if period=="minute"
tr "%1 of the Minute", what
else
tr "%1 of the Week", what
|
[
{
"context": "fig **\nAccounts.ui.config\n\tpasswordSignupFields: \"USERNAME_AND_EMAIL\"\n\trequestPermissions:\n\t\tfacebook: [\"user",
"end": 79,
"score": 0.8331320285797119,
"start": 70,
"tag": "PASSWORD",
"value": "USERNAME_"
}
] | client/lib/config.coffee | ananta-IO/marq | 1 | # ** Accounts UI Config **
Accounts.ui.config
passwordSignupFields: "USERNAME_AND_EMAIL"
requestPermissions:
facebook: ["user_about_me", "user_activities", "user_birthday", "user_checkins", "user_education_history", "user_interests", "user_likes", "friends_likes", "user_work_history", "email"]
# EpicEditorOptions =
# container: "editor"
# basePath: "/editor"
# clientSideStorage: false
# theme:
# base: "/themes/base/epiceditor.css"
# preview: "/themes/preview/github.css"
# editor: "/themes/editor/epic-light.css"
# SharrreOptions =
# share:
# googlePlus: true
# # facebook: true,
# twitter: true
# buttons:
# googlePlus:
# size: "tall"
# # facebook: {layout: 'box_count'},
# twitter:
# count: "vertical"
# via: "TelescopeApp"
# enableHover: false
# enableCounter: false
# enableTracking: true
# Statuses =
# pending: 1
# approved: 2
# rejected: 3 | 46137 | # ** Accounts UI Config **
Accounts.ui.config
passwordSignupFields: "<PASSWORD>AND_EMAIL"
requestPermissions:
facebook: ["user_about_me", "user_activities", "user_birthday", "user_checkins", "user_education_history", "user_interests", "user_likes", "friends_likes", "user_work_history", "email"]
# EpicEditorOptions =
# container: "editor"
# basePath: "/editor"
# clientSideStorage: false
# theme:
# base: "/themes/base/epiceditor.css"
# preview: "/themes/preview/github.css"
# editor: "/themes/editor/epic-light.css"
# SharrreOptions =
# share:
# googlePlus: true
# # facebook: true,
# twitter: true
# buttons:
# googlePlus:
# size: "tall"
# # facebook: {layout: 'box_count'},
# twitter:
# count: "vertical"
# via: "TelescopeApp"
# enableHover: false
# enableCounter: false
# enableTracking: true
# Statuses =
# pending: 1
# approved: 2
# rejected: 3 | true | # ** Accounts UI Config **
Accounts.ui.config
passwordSignupFields: "PI:PASSWORD:<PASSWORD>END_PIAND_EMAIL"
requestPermissions:
facebook: ["user_about_me", "user_activities", "user_birthday", "user_checkins", "user_education_history", "user_interests", "user_likes", "friends_likes", "user_work_history", "email"]
# EpicEditorOptions =
# container: "editor"
# basePath: "/editor"
# clientSideStorage: false
# theme:
# base: "/themes/base/epiceditor.css"
# preview: "/themes/preview/github.css"
# editor: "/themes/editor/epic-light.css"
# SharrreOptions =
# share:
# googlePlus: true
# # facebook: true,
# twitter: true
# buttons:
# googlePlus:
# size: "tall"
# # facebook: {layout: 'box_count'},
# twitter:
# count: "vertical"
# via: "TelescopeApp"
# enableHover: false
# enableCounter: false
# enableTracking: true
# Statuses =
# pending: 1
# approved: 2
# rejected: 3 |
[
{
"context": "ls\", ->\n\tbeforeEach (done) ->\n\t\t@timeout(20000)\n\t\t@user = new User()\n\t\t@user.login done\n\n\tdescribe 'confi",
"end": 351,
"score": 0.94962477684021,
"start": 346,
"tag": "USERNAME",
"value": "@user"
},
{
"context": "\t\turl: '/user/emails',\n\t\t\t\t\t\tjso... | test/acceptance/coffee/UserEmailsTests.coffee | davidmehren/web-sharelatex | 0 | expect = require("chai").expect
async = require("async")
User = require "./helpers/User"
request = require "./helpers/request"
settings = require "settings-sharelatex"
{db, ObjectId} = require("../../../app/js/infrastructure/mongojs")
MockV1Api = require "./helpers/MockV1Api"
describe "UserEmails", ->
beforeEach (done) ->
@timeout(20000)
@user = new User()
@user.login done
describe 'confirming an email', ->
it 'should confirm the email', (done) ->
token = null
async.series [
(cb) =>
@user.request {
method: 'POST',
url: '/user/emails',
json:
email: 'newly-added-email@example.com'
}, (error, response, body) =>
return done(error) if error?
expect(response.statusCode).to.equal 204
cb()
(cb) =>
@user.request { url: '/user/emails', json: true }, (error, response, body) ->
expect(response.statusCode).to.equal 200
expect(body[0].confirmedAt).to.not.exist
expect(body[1].confirmedAt).to.not.exist
cb()
(cb) =>
db.tokens.find {
use: 'email_confirmation',
'data.user_id': @user._id,
usedAt: { $exists: false }
}, (error, tokens) =>
# There should only be one confirmation token at the moment
expect(tokens.length).to.equal 1
expect(tokens[0].data.email).to.equal 'newly-added-email@example.com'
expect(tokens[0].data.user_id).to.equal @user._id
token = tokens[0].token
cb()
(cb) =>
@user.request {
method: 'POST',
url: '/user/emails/confirm',
json:
token: token
}, (error, response, body) =>
return done(error) if error?
expect(response.statusCode).to.equal 200
cb()
(cb) =>
@user.request { url: '/user/emails', json: true }, (error, response, body) ->
expect(response.statusCode).to.equal 200
expect(body[0].confirmedAt).to.not.exist
expect(body[1].confirmedAt).to.exist
cb()
(cb) =>
db.tokens.find {
use: 'email_confirmation',
'data.user_id': @user._id,
usedAt: { $exists: false }
}, (error, tokens) =>
# Token should be deleted after use
expect(tokens.length).to.equal 0
cb()
], done
it 'should not allow confirmation of the email if the user has changed', (done) ->
token1 = null
token2 = null
@user2 = new User()
@email = 'duplicate-email@example.com'
async.series [
(cb) => @user2.login cb
(cb) =>
# Create email for first user
@user.request {
method: 'POST',
url: '/user/emails',
json: {@email}
}, cb
(cb) =>
db.tokens.find {
use: 'email_confirmation',
'data.user_id': @user._id,
usedAt: { $exists: false }
}, (error, tokens) =>
# There should only be one confirmation token at the moment
expect(tokens.length).to.equal 1
expect(tokens[0].data.email).to.equal @email
expect(tokens[0].data.user_id).to.equal @user._id
token1 = tokens[0].token
cb()
(cb) =>
# Delete the email from the first user
@user.request {
method: 'DELETE',
url: '/user/emails',
json: {@email}
}, cb
(cb) =>
# Create email for second user
@user2.request {
method: 'POST',
url: '/user/emails',
json: {@email}
}, cb
(cb) =>
# Original confirmation token should no longer work
@user.request {
method: 'POST',
url: '/user/emails/confirm',
json:
token: token1
}, (error, response, body) =>
return done(error) if error?
expect(response.statusCode).to.equal 404
cb()
(cb) =>
db.tokens.find {
use: 'email_confirmation',
'data.user_id': @user2._id,
usedAt: { $exists: false }
}, (error, tokens) =>
# The first token has been used, so this should be token2 now
expect(tokens.length).to.equal 1
expect(tokens[0].data.email).to.equal @email
expect(tokens[0].data.user_id).to.equal @user2._id
token2 = tokens[0].token
cb()
(cb) =>
# Second user should be able to confirm the email
@user2.request {
method: 'POST',
url: '/user/emails/confirm',
json:
token: token2
}, (error, response, body) =>
return done(error) if error?
expect(response.statusCode).to.equal 200
cb()
(cb) =>
@user2.request { url: '/user/emails', json: true }, (error, response, body) ->
expect(response.statusCode).to.equal 200
expect(body[0].confirmedAt).to.not.exist
expect(body[1].confirmedAt).to.exist
cb()
], done
describe "with an expired token", ->
it 'should not confirm the email', (done) ->
token = null
async.series [
(cb) =>
@user.request {
method: 'POST',
url: '/user/emails',
json:
email: @email = 'expired-token-email@example.com'
}, (error, response, body) =>
return done(error) if error?
expect(response.statusCode).to.equal 204
cb()
(cb) =>
db.tokens.find {
use: 'email_confirmation',
'data.user_id': @user._id,
usedAt: { $exists: false }
}, (error, tokens) =>
# There should only be one confirmation token at the moment
expect(tokens.length).to.equal 1
expect(tokens[0].data.email).to.equal @email
expect(tokens[0].data.user_id).to.equal @user._id
token = tokens[0].token
cb()
(cb) =>
db.tokens.update {
token: token
}, {
$set: {
expiresAt: new Date(Date.now() - 1000000)
}
}, cb
(cb) =>
@user.request {
method: 'POST',
url: '/user/emails/confirm',
json:
token: token
}, (error, response, body) =>
return done(error) if error?
expect(response.statusCode).to.equal 404
cb()
], done
| 103016 | expect = require("chai").expect
async = require("async")
User = require "./helpers/User"
request = require "./helpers/request"
settings = require "settings-sharelatex"
{db, ObjectId} = require("../../../app/js/infrastructure/mongojs")
MockV1Api = require "./helpers/MockV1Api"
describe "UserEmails", ->
beforeEach (done) ->
@timeout(20000)
@user = new User()
@user.login done
describe 'confirming an email', ->
it 'should confirm the email', (done) ->
token = null
async.series [
(cb) =>
@user.request {
method: 'POST',
url: '/user/emails',
json:
email: '<EMAIL>'
}, (error, response, body) =>
return done(error) if error?
expect(response.statusCode).to.equal 204
cb()
(cb) =>
@user.request { url: '/user/emails', json: true }, (error, response, body) ->
expect(response.statusCode).to.equal 200
expect(body[0].confirmedAt).to.not.exist
expect(body[1].confirmedAt).to.not.exist
cb()
(cb) =>
db.tokens.find {
use: 'email_confirmation',
'data.user_id': @user._id,
usedAt: { $exists: false }
}, (error, tokens) =>
# There should only be one confirmation token at the moment
expect(tokens.length).to.equal 1
expect(tokens[0].data.email).to.equal '<EMAIL>'
expect(tokens[0].data.user_id).to.equal @user._id
token = tokens[0].token
cb()
(cb) =>
@user.request {
method: 'POST',
url: '/user/emails/confirm',
json:
token: token
}, (error, response, body) =>
return done(error) if error?
expect(response.statusCode).to.equal 200
cb()
(cb) =>
@user.request { url: '/user/emails', json: true }, (error, response, body) ->
expect(response.statusCode).to.equal 200
expect(body[0].confirmedAt).to.not.exist
expect(body[1].confirmedAt).to.exist
cb()
(cb) =>
db.tokens.find {
use: 'email_confirmation',
'data.user_id': @user._id,
usedAt: { $exists: false }
}, (error, tokens) =>
# Token should be deleted after use
expect(tokens.length).to.equal 0
cb()
], done
it 'should not allow confirmation of the email if the user has changed', (done) ->
token1 = null
token2 = null
@user2 = new User()
@email = '<EMAIL>'
async.series [
(cb) => @user2.login cb
(cb) =>
# Create email for first user
@user.request {
method: 'POST',
url: '/user/emails',
json: {@email}
}, cb
(cb) =>
db.tokens.find {
use: 'email_confirmation',
'data.user_id': @user._id,
usedAt: { $exists: false }
}, (error, tokens) =>
# There should only be one confirmation token at the moment
expect(tokens.length).to.equal 1
expect(tokens[0].data.email).to.equal @email
expect(tokens[0].data.user_id).to.equal @user._id
token1 = tokens[0].token
cb()
(cb) =>
# Delete the email from the first user
@user.request {
method: 'DELETE',
url: '/user/emails',
json: {@email}
}, cb
(cb) =>
# Create email for second user
@user2.request {
method: 'POST',
url: '/user/emails',
json: {@email}
}, cb
(cb) =>
# Original confirmation token should no longer work
@user.request {
method: 'POST',
url: '/user/emails/confirm',
json:
token: token1
}, (error, response, body) =>
return done(error) if error?
expect(response.statusCode).to.equal 404
cb()
(cb) =>
db.tokens.find {
use: 'email_confirmation',
'data.user_id': @user2._id,
usedAt: { $exists: false }
}, (error, tokens) =>
# The first token has been used, so this should be token2 now
expect(tokens.length).to.equal 1
expect(tokens[0].data.email).to.equal @email
expect(tokens[0].data.user_id).to.equal @user2._id
token2 = tokens[0].token
cb()
(cb) =>
# Second user should be able to confirm the email
@user2.request {
method: 'POST',
url: '/user/emails/confirm',
json:
token: token2
}, (error, response, body) =>
return done(error) if error?
expect(response.statusCode).to.equal 200
cb()
(cb) =>
@user2.request { url: '/user/emails', json: true }, (error, response, body) ->
expect(response.statusCode).to.equal 200
expect(body[0].confirmedAt).to.not.exist
expect(body[1].confirmedAt).to.exist
cb()
], done
describe "with an expired token", ->
it 'should not confirm the email', (done) ->
token = null
async.series [
(cb) =>
@user.request {
method: 'POST',
url: '/user/emails',
json:
email: @email = '<EMAIL>'
}, (error, response, body) =>
return done(error) if error?
expect(response.statusCode).to.equal 204
cb()
(cb) =>
db.tokens.find {
use: 'email_confirmation',
'data.user_id': @user._id,
usedAt: { $exists: false }
}, (error, tokens) =>
# There should only be one confirmation token at the moment
expect(tokens.length).to.equal 1
expect(tokens[0].data.email).to.equal @email
expect(tokens[0].data.user_id).to.equal @user._id
token = tokens[0].token
cb()
(cb) =>
db.tokens.update {
token: token
}, {
$set: {
expiresAt: new Date(Date.now() - 1000000)
}
}, cb
(cb) =>
@user.request {
method: 'POST',
url: '/user/emails/confirm',
json:
token: token
}, (error, response, body) =>
return done(error) if error?
expect(response.statusCode).to.equal 404
cb()
], done
| true | expect = require("chai").expect
async = require("async")
User = require "./helpers/User"
request = require "./helpers/request"
settings = require "settings-sharelatex"
{db, ObjectId} = require("../../../app/js/infrastructure/mongojs")
MockV1Api = require "./helpers/MockV1Api"
describe "UserEmails", ->
beforeEach (done) ->
@timeout(20000)
@user = new User()
@user.login done
describe 'confirming an email', ->
it 'should confirm the email', (done) ->
token = null
async.series [
(cb) =>
@user.request {
method: 'POST',
url: '/user/emails',
json:
email: 'PI:EMAIL:<EMAIL>END_PI'
}, (error, response, body) =>
return done(error) if error?
expect(response.statusCode).to.equal 204
cb()
(cb) =>
@user.request { url: '/user/emails', json: true }, (error, response, body) ->
expect(response.statusCode).to.equal 200
expect(body[0].confirmedAt).to.not.exist
expect(body[1].confirmedAt).to.not.exist
cb()
(cb) =>
db.tokens.find {
use: 'email_confirmation',
'data.user_id': @user._id,
usedAt: { $exists: false }
}, (error, tokens) =>
# There should only be one confirmation token at the moment
expect(tokens.length).to.equal 1
expect(tokens[0].data.email).to.equal 'PI:EMAIL:<EMAIL>END_PI'
expect(tokens[0].data.user_id).to.equal @user._id
token = tokens[0].token
cb()
(cb) =>
@user.request {
method: 'POST',
url: '/user/emails/confirm',
json:
token: token
}, (error, response, body) =>
return done(error) if error?
expect(response.statusCode).to.equal 200
cb()
(cb) =>
@user.request { url: '/user/emails', json: true }, (error, response, body) ->
expect(response.statusCode).to.equal 200
expect(body[0].confirmedAt).to.not.exist
expect(body[1].confirmedAt).to.exist
cb()
(cb) =>
db.tokens.find {
use: 'email_confirmation',
'data.user_id': @user._id,
usedAt: { $exists: false }
}, (error, tokens) =>
# Token should be deleted after use
expect(tokens.length).to.equal 0
cb()
], done
it 'should not allow confirmation of the email if the user has changed', (done) ->
token1 = null
token2 = null
@user2 = new User()
@email = 'PI:EMAIL:<EMAIL>END_PI'
async.series [
(cb) => @user2.login cb
(cb) =>
# Create email for first user
@user.request {
method: 'POST',
url: '/user/emails',
json: {@email}
}, cb
(cb) =>
db.tokens.find {
use: 'email_confirmation',
'data.user_id': @user._id,
usedAt: { $exists: false }
}, (error, tokens) =>
# There should only be one confirmation token at the moment
expect(tokens.length).to.equal 1
expect(tokens[0].data.email).to.equal @email
expect(tokens[0].data.user_id).to.equal @user._id
token1 = tokens[0].token
cb()
(cb) =>
# Delete the email from the first user
@user.request {
method: 'DELETE',
url: '/user/emails',
json: {@email}
}, cb
(cb) =>
# Create email for second user
@user2.request {
method: 'POST',
url: '/user/emails',
json: {@email}
}, cb
(cb) =>
# Original confirmation token should no longer work
@user.request {
method: 'POST',
url: '/user/emails/confirm',
json:
token: token1
}, (error, response, body) =>
return done(error) if error?
expect(response.statusCode).to.equal 404
cb()
(cb) =>
db.tokens.find {
use: 'email_confirmation',
'data.user_id': @user2._id,
usedAt: { $exists: false }
}, (error, tokens) =>
# The first token has been used, so this should be token2 now
expect(tokens.length).to.equal 1
expect(tokens[0].data.email).to.equal @email
expect(tokens[0].data.user_id).to.equal @user2._id
token2 = tokens[0].token
cb()
(cb) =>
# Second user should be able to confirm the email
@user2.request {
method: 'POST',
url: '/user/emails/confirm',
json:
token: token2
}, (error, response, body) =>
return done(error) if error?
expect(response.statusCode).to.equal 200
cb()
(cb) =>
@user2.request { url: '/user/emails', json: true }, (error, response, body) ->
expect(response.statusCode).to.equal 200
expect(body[0].confirmedAt).to.not.exist
expect(body[1].confirmedAt).to.exist
cb()
], done
describe "with an expired token", ->
it 'should not confirm the email', (done) ->
token = null
async.series [
(cb) =>
@user.request {
method: 'POST',
url: '/user/emails',
json:
email: @email = 'PI:EMAIL:<EMAIL>END_PI'
}, (error, response, body) =>
return done(error) if error?
expect(response.statusCode).to.equal 204
cb()
(cb) =>
db.tokens.find {
use: 'email_confirmation',
'data.user_id': @user._id,
usedAt: { $exists: false }
}, (error, tokens) =>
# There should only be one confirmation token at the moment
expect(tokens.length).to.equal 1
expect(tokens[0].data.email).to.equal @email
expect(tokens[0].data.user_id).to.equal @user._id
token = tokens[0].token
cb()
(cb) =>
db.tokens.update {
token: token
}, {
$set: {
expiresAt: new Date(Date.now() - 1000000)
}
}, cb
(cb) =>
@user.request {
method: 'POST',
url: '/user/emails/confirm',
json:
token: token
}, (error, response, body) =>
return done(error) if error?
expect(response.statusCode).to.equal 404
cb()
], done
|
[
{
"context": " def.options =\n collectionName: 'a third name'\n res2 = sql.modelFactory def, 'asqlcon'",
"end": 19313,
"score": 0.6176007390022278,
"start": 19301,
"tag": "NAME",
"value": "a third name"
}
] | test/server/common/dal/sql.coffee | valueflowquality/gi-util-update | 0 | path = require 'path'
sinon = require 'sinon'
expect = require('chai').expect
proxyquire = require 'proxyquire'
dir = path.normalize __dirname + '../../../../../server'
module.exports = () ->
describe 'sql', ->
sql = null
tediousStub =
Connection: ->
Request: ->
queryQueueStub =
runQuery: ->
sqlHelperStub =
generateWhereClause: ->
""
stubs =
'tedious': tediousStub
'../crudModelFactory': 'crudModelFactoryStub'
'./queryQueue': queryQueueStub
'./sqlHelper': sqlHelperStub
beforeEach ->
sql = proxyquire(dir + '/common/dal/sql', stubs)
describe 'Exports', ->
it 'connect', (done) ->
expect(sql).to.have.ownProperty 'connect'
expect(sql.connect).to.be.a 'function'
done()
it 'QueryBuilder', (done) ->
expect(sql).to.have.ownProperty 'QueryBuilder'
done()
it 'schemaFactory', (done) ->
expect(sql).to.have.ownProperty 'schemaFactory'
done()
it 'modelFactory', (done) ->
expect(sql).to.have.ownProperty 'modelFactory'
done()
it 'crudFactory', (done) ->
expect(sql).to.have.ownProperty 'crudFactory'
done()
describe 'QueryBuilder', ->
qb = {}
beforeEach ->
qb = new sql.QueryBuilder("aTable", "aConnection", "_id")
describe 'Properties', ->
it 'query: String', (done) ->
expect(qb).to.have.ownProperty 'query'
expect(qb.query).to.be.a 'String'
expect(qb.query).to.equal ""
done()
it 'returnArray: Boolean (true)', (done) ->
expect(qb).to.have.ownProperty 'returnArray'
expect(qb.returnArray).to.be.a 'boolean'
expect(qb.returnArray).to.be.true
done()
it 'table: String', (done) ->
expect(qb).to.have.ownProperty 'table'
expect(qb.table).to.be.a 'String'
expect(qb.table).to.equal 'aTable'
done()
it 'dbConnection: Object', (done) ->
expect(qb).to.have.ownProperty 'dbConnection'
expect(qb.dbConnection).to.equal 'aConnection'
done()
describe 'Methods', ->
it 'exec: (callback) ->', (done) ->
expect(qb).to.have.property 'exec'
expect(qb.exec).to.be.a 'function'
done()
it 'create: (obj, callback) ->', (done) ->
expect(qb).to.have.property 'create'
expect(qb.create).to.be.a 'function'
done()
it 'find: (query, callback) ->', (done) ->
expect(qb).to.have.property 'find'
expect(qb.find).to.be.a 'function'
done()
it 'findOne: (query, callback) ->', (done) ->
expect(qb).to.have.property 'findOne'
expect(qb.findOne).to.be.a 'function'
done()
it 'sort: (query, callback) ->', (done) ->
expect(qb).to.have.property 'sort'
expect(qb.sort).to.be.a 'function'
done()
it 'skip: (num, cb) ->', (done) ->
expect(qb).to.have.property 'skip'
expect(qb.skip).to.be.a 'function'
done()
it 'limit: (num, cb) ->', (done) ->
expect(qb).to.have.property 'limit'
expect(qb.limit).to.be.a 'function'
done()
it 'count: (query, cb) ->', (done) ->
expect(qb).to.have.property 'count'
expect(qb.count).to.be.a 'function'
done()
it 'findByIdAndUpdate: (id, update, options, callback)', (done) ->
expect(qb).to.have.property 'findByIdAndUpdate'
expect(qb.findByIdAndUpdate).to.be.a 'function'
done()
it 'remove: (query, callback)', (done) ->
expect(qb).to.have.property 'remove'
expect(qb.remove).to.be.a 'function'
done()
describe 'exec', ->
beforeEach ->
sinon.spy queryQueueStub, 'runQuery'
afterEach ->
queryQueueStub.runQuery.restore()
it 'calls runQuery with query returnArray dbConnection and cb'
, (done) ->
qb.query = 'hello'
qb.returnArray = true
qb.dbConnection = 'bob'
qb.exec 'aCallback'
expect(queryQueueStub.runQuery.calledWithExactly(
'hello', true, 'bob', 'aCallback'
), 'runQuery not given correct arguments').to.be.true
done()
describe 'create', ->
beforeEach ->
sinon.spy qb, 'exec'
query =
'abc': 1
'def': 'bob'
qb.returnArray = false
qb.create query, 'callbackstub'
it 'sets returnArray to false', (done) ->
expect(qb.returnArray).to.be.false
done()
it 'inserts into correct table', (done) ->
expect(qb.query.indexOf('INSERT INTO aTable')).to.equal 0
done()
it 'decomposes the query object into sql', (done) ->
expect(qb.query)
.to.equal "INSERT INTO aTable (abc, def) VALUES ('1', 'bob') "
done()
it 'executes the query', (done) ->
expect(qb.exec.calledOnce).to.be.true
expect(qb.exec.calledWithExactly('callbackstub')).to.be.true
done()
describe 'find', ->
beforeEach ->
sinon.spy qb, 'exec'
sinon.stub sqlHelperStub, 'generateWhereClause'
qb.returnArray = false
afterEach ->
sqlHelperStub.generateWhereClause.restore()
it 'sets returnArray to true', (done) ->
qb.find "abc", "123"
expect(qb.returnArray).to.be.true
done()
it 'selects from correct table', (done) ->
qb.find "abc", "another callback"
expect(qb.query.indexOf('SELECT * FROM aTable')).to.equal 0
done()
it 'generates a whereclause from the query', (done) ->
qb.find "abc", "another callback"
expect(sqlHelperStub.generateWhereClause.calledWithExactly 'abc')
.to.be.true
done()
it 'appends whereclase to the query', (done) ->
sqlHelperStub.generateWhereClause.returns " WHERE CLAUSE"
qb.find "abc", "another callback"
expect(qb.query).to.equal "SELECT * FROM aTable WHERE CLAUSE"
done()
it 'returns the query builder if no callback', (done) ->
res = qb.find "abc"
expect(qb).to.equal res
done()
it 'does not run a query if no callback passed', (done) ->
qb.find "abc"
expect(qb.exec.notCalled, "exec was called").to.be.true
done()
it 'returns nothing if callback is specified', (done) ->
res = qb.find "abc", "callback"
expect(res).to.not.exist
done()
it 'executes the query if callback is specified', (done) ->
qb.find "abc", "callback"
expect(qb.exec.calledOnce).to.be.true
expect(qb.exec.calledWithExactly "callback").to.be.true
done()
describe 'findOne', ->
beforeEach ->
sinon.spy qb, 'exec'
sinon.stub sqlHelperStub, 'generateWhereClause'
afterEach ->
sqlHelperStub.generateWhereClause.restore()
it 'selects from correct table', (done) ->
qb.findOne "abc", "another callback"
expect(qb.query.indexOf('SELECT TOP 1 * FROM aTable')).to.equal 0
done()
it 'generates a whereclause from the query', (done) ->
qb.findOne "abc", "another callback"
expect(sqlHelperStub.generateWhereClause.calledWithExactly 'abc')
.to.be.true
done()
it 'appends whereclase to the query', (done) ->
sqlHelperStub.generateWhereClause.returns " WHERE CLAUSE"
qb.findOne "abc", "another callback"
expect(qb.query)
.to.equal "SELECT TOP 1 * FROM aTable WHERE CLAUSE"
done()
it 'returns the query builder if no callback', (done) ->
res = qb.findOne "abc"
expect(qb).to.equal res
done()
it 'does not run a query if no callback passed', (done) ->
qb.findOne "abc"
expect(qb.exec.notCalled, "exec was called").to.be.true
done()
it 'returns nothing if callback is specified', (done) ->
res = qb.findOne "abc", "callback"
expect(res).to.not.exist
done()
it 'executes the query if callback is specified', (done) ->
qb.findOne "abc", "callback"
expect(qb.exec.calledOnce).to.be.true
expect(qb.exec.calledWithExactly "callback").to.be.true
done()
describe 'sort', ->
beforeEach ->
sinon.spy qb, 'exec'
it 'returns the query builder if no callback', (done) ->
res = qb.sort "abc"
expect(qb).to.equal res
done()
it 'does not run a query if no callback passed', (done) ->
qb.sort "abc"
expect(qb.exec.notCalled, "exec was called").to.be.true
done()
it 'returns nothing if callback is specified', (done) ->
res = qb.sort "abc", "callback"
expect(res).to.not.exist
done()
it 'executes the query if callback is specified', (done) ->
qb.sort "abc", "callback"
expect(qb.exec.calledOnce).to.be.true
expect(qb.exec.calledWithExactly "callback").to.be.true
done()
describe 'skip', ->
beforeEach ->
sinon.spy qb, 'exec'
it 'returns the query builder if no callback', (done) ->
res = qb.skip "abc"
expect(qb).to.equal res
done()
it 'does not run a query if no callback passed', (done) ->
qb.skip "abc"
expect(qb.exec.notCalled, "exec was called").to.be.true
done()
it 'returns nothing if callback is specified', (done) ->
res = qb.skip "abc", "callback"
expect(res).to.not.exist
done()
it 'executes the query if callback is specified', (done) ->
qb.skip "abc", "callback"
expect(qb.exec.calledOnce).to.be.true
expect(qb.exec.calledWithExactly "callback").to.be.true
done()
describe 'limit', ->
beforeEach ->
sinon.spy qb, 'exec'
it 'returns the query builder if no callback', (done) ->
res = qb.limit "abc"
expect(qb).to.equal res
done()
it 'does not run a query if no callback passed', (done) ->
qb.limit "abc"
expect(qb.exec.notCalled, "exec was called").to.be.true
done()
it 'returns nothing if callback is specified', (done) ->
res = qb.limit "abc", "callback"
expect(res).to.not.exist
done()
it 'executes the query if callback is specified', (done) ->
qb.limit "abc", "callback"
expect(qb.exec.calledOnce).to.be.true
expect(qb.exec.calledWithExactly "callback").to.be.true
done()
describe 'count', ->
beforeEach ->
sinon.spy qb, 'exec'
it 'sets the query to count all rows in the table', (done) ->
qb.count "abc", "def"
expect(qb.query).to.equal 'SELECT COUNT(*) FROM aTable'
done()
it 'returns the query builder if no callback', (done) ->
res = qb.count "abc"
expect(qb).to.equal res
done()
it 'does not run a query if no callback passed', (done) ->
qb.count "abc"
expect(qb.exec.notCalled, "exec was called").to.be.true
done()
it 'returns nothing if callback is specified', (done) ->
res = qb.count "abc", "callback"
expect(res).to.not.exist
done()
it 'executes the query if callback is specified', (done) ->
qb.count "abc", "callback"
expect(qb.exec.calledOnce).to.be.true
expect(qb.exec.calledWithExactly "callback").to.be.true
done()
describe 'findByIdAndUpdate', ->
beforeEach ->
sinon.spy qb, 'exec'
it 'sets the query', (done) ->
qb.findByIdAndUpdate "123", {abc: "def"}, "callback"
expect(qb.query)
.to.equal "UPDATE aTable SET abc= 'def' WHERE _id = 123"
done()
it 'returns the query builder if no callback', (done) ->
res = qb.findByIdAndUpdate "123", {abc: "def"}
expect(qb).to.equal res
done()
it 'does not run a query if no callback passed', (done) ->
qb.findByIdAndUpdate "123", {abc: "def"}
expect(qb.exec.notCalled, "exec was called").to.be.true
done()
it 'returns nothing if callback is specified', (done) ->
res = qb.findByIdAndUpdate "123", {abc: "def"}, "callback"
expect(res).to.not.exist
done()
it 'executes the query if callback is specified', (done) ->
qb.count qb.findByIdAndUpdate "123", {abc: "def"}, "callback"
expect(qb.exec.calledOnce).to.be.true
done()
describe 'remove', ->
beforeEach ->
sinon.spy qb, 'exec'
qb.returnArray = true
it 'sets returnArray to false', (done) ->
qb.remove {id: "123"}, "callback"
expect(qb.returnArray).to.be.false
done()
it 'removes based on the id given in query', (done) ->
qb.remove {_id: "123"}, "callback"
expect(qb.query)
.to.equal 'DELETE FROM aTable WHERE _id = ' + "123"
done()
it 'returns the query builder if no callback', (done) ->
res = qb.remove {id: "123"}
expect(qb).to.equal res
done()
it 'does not run a query if no callback passed', (done) ->
qb.remove {id: "123"}
expect(qb.exec.notCalled, "exec was called").to.be.true
done()
it 'returns nothing if callback is specified', (done) ->
res = qb.remove {id: "123"}, "callback"
expect(res).to.not.exist
done()
it 'executes the query if callback is specified', (done) ->
qb.remove {id: "123"}, "callback"
expect(qb.exec.calledOnce).to.be.true
expect(qb.exec.calledWithExactly "callback").to.be.true
done()
describe 'connect', ->
beforeEach ->
sinon.stub tediousStub, 'Connection'
tediousStub.Connection.returns {a: 'sql connection'}
afterEach ->
tediousStub.Connection.restore()
it 'creates a new sql connection with conf', (done) ->
res = sql.connect 'bob'
expect(tediousStub.Connection.calledOnce).to.be.true
expect(tediousStub.Connection.calledWithExactly 'bob')
.to.be.true
expect(res).to.deep.equal {a: 'sql connection'}
done()
describe 'schemaFactory', ->
result = {}
def =
schemaDefiniton: 'a definition'
beforeEach ->
result = sql.schemaFactory def
it 'sets schema to the given defition', (done) ->
expect(result).to.have.ownProperty 'schema'
expect(result.schema).to.equal def.schemaDefinition
done()
it 'defines a virtual method on the returned schema object'
, (done) ->
expect(result).to.have.ownProperty 'virtual'
expect(result.virtual).to.be.a 'function'
done()
it 'the virtual method defines a get function', (done) ->
virtualResult = result.virtual 'bob'
expect(virtualResult).to.have.ownProperty 'get'
expect(virtualResult.get).to.be.a 'function'
expect(virtualResult.get('abc')).to.not.exist
done()
it 'the virtual method defines a set function', (done) ->
virtualResult = result.virtual 'bob'
expect(virtualResult).to.have.ownProperty 'set'
expect(virtualResult.set).to.be.a 'function'
expect(virtualResult.set('abc')).to.not.exist
done()
it 'defines an empty methods object on the returned schema object'
, (done) ->
expect(result).to.have.ownProperty 'methods'
expect(result.methods).to.be.an 'object'
expect(result.methods).to.deep.equal {}
done()
it 'defines a pre function on the returned schema object', (done) ->
expect(result).to.have.ownProperty 'pre'
expect(result.pre).to.be.a 'function'
expect(result.pre('abc','def')).to.not.exist
done()
describe 'modelFactory', ->
def = {}
res = null
beforeEach ->
def =
name: 'a new model'
Qb: sinon.spy()
afterEach ->
def = null
it 'returns a new query builder instance from def.qb', (done) ->
res = sql.modelFactory def, 'asqlcon'
expect(def.Qb.calledWithNew()).to.be.true
done()
it 'uses def.name for the table name', (done) ->
res = sql.modelFactory def, 'asqlcon'
expect(def.Qb.calledWith('a new model')).to.be.true
done()
it 'uses def.options.collectionName for the table name if present'
, (done) ->
def.options =
collectionName: 'anothername'
res = sql.modelFactory def, 'asqlcon'
expect(def.Qb.calledOnce).to.be.true
expect(def.Qb.calledWith('anothername')).to.be.true
done()
it 'uses the given database connection for the connection', (done) ->
res = sql.modelFactory def, 'asqlcon'
expect(def.Qb.calledWith('a new model', 'asqlcon')).to.be.true
done()
it 'sets modelName on the QueryBuilder object to the def.name'
, (done) ->
res = sql.modelFactory def, 'asqlcon'
expect(res).to.have.property 'modelName'
expect(res.modelName).to.equal 'a new model'
def.options =
collectionName: 'a third name'
res2 = sql.modelFactory def, 'asqlcon'
expect(res2.modelName).to.equal 'a new model'
done()
describe 'crudFactory', ->
it 'returns the common.crudModelFactory', (done) ->
expect(sql.crudFactory).to.equal 'crudModelFactoryStub'
done()
| 61463 | path = require 'path'
sinon = require 'sinon'
expect = require('chai').expect
proxyquire = require 'proxyquire'
dir = path.normalize __dirname + '../../../../../server'
module.exports = () ->
describe 'sql', ->
sql = null
tediousStub =
Connection: ->
Request: ->
queryQueueStub =
runQuery: ->
sqlHelperStub =
generateWhereClause: ->
""
stubs =
'tedious': tediousStub
'../crudModelFactory': 'crudModelFactoryStub'
'./queryQueue': queryQueueStub
'./sqlHelper': sqlHelperStub
beforeEach ->
sql = proxyquire(dir + '/common/dal/sql', stubs)
describe 'Exports', ->
it 'connect', (done) ->
expect(sql).to.have.ownProperty 'connect'
expect(sql.connect).to.be.a 'function'
done()
it 'QueryBuilder', (done) ->
expect(sql).to.have.ownProperty 'QueryBuilder'
done()
it 'schemaFactory', (done) ->
expect(sql).to.have.ownProperty 'schemaFactory'
done()
it 'modelFactory', (done) ->
expect(sql).to.have.ownProperty 'modelFactory'
done()
it 'crudFactory', (done) ->
expect(sql).to.have.ownProperty 'crudFactory'
done()
describe 'QueryBuilder', ->
qb = {}
beforeEach ->
qb = new sql.QueryBuilder("aTable", "aConnection", "_id")
describe 'Properties', ->
it 'query: String', (done) ->
expect(qb).to.have.ownProperty 'query'
expect(qb.query).to.be.a 'String'
expect(qb.query).to.equal ""
done()
it 'returnArray: Boolean (true)', (done) ->
expect(qb).to.have.ownProperty 'returnArray'
expect(qb.returnArray).to.be.a 'boolean'
expect(qb.returnArray).to.be.true
done()
it 'table: String', (done) ->
expect(qb).to.have.ownProperty 'table'
expect(qb.table).to.be.a 'String'
expect(qb.table).to.equal 'aTable'
done()
it 'dbConnection: Object', (done) ->
expect(qb).to.have.ownProperty 'dbConnection'
expect(qb.dbConnection).to.equal 'aConnection'
done()
describe 'Methods', ->
it 'exec: (callback) ->', (done) ->
expect(qb).to.have.property 'exec'
expect(qb.exec).to.be.a 'function'
done()
it 'create: (obj, callback) ->', (done) ->
expect(qb).to.have.property 'create'
expect(qb.create).to.be.a 'function'
done()
it 'find: (query, callback) ->', (done) ->
expect(qb).to.have.property 'find'
expect(qb.find).to.be.a 'function'
done()
it 'findOne: (query, callback) ->', (done) ->
expect(qb).to.have.property 'findOne'
expect(qb.findOne).to.be.a 'function'
done()
it 'sort: (query, callback) ->', (done) ->
expect(qb).to.have.property 'sort'
expect(qb.sort).to.be.a 'function'
done()
it 'skip: (num, cb) ->', (done) ->
expect(qb).to.have.property 'skip'
expect(qb.skip).to.be.a 'function'
done()
it 'limit: (num, cb) ->', (done) ->
expect(qb).to.have.property 'limit'
expect(qb.limit).to.be.a 'function'
done()
it 'count: (query, cb) ->', (done) ->
expect(qb).to.have.property 'count'
expect(qb.count).to.be.a 'function'
done()
it 'findByIdAndUpdate: (id, update, options, callback)', (done) ->
expect(qb).to.have.property 'findByIdAndUpdate'
expect(qb.findByIdAndUpdate).to.be.a 'function'
done()
it 'remove: (query, callback)', (done) ->
expect(qb).to.have.property 'remove'
expect(qb.remove).to.be.a 'function'
done()
describe 'exec', ->
beforeEach ->
sinon.spy queryQueueStub, 'runQuery'
afterEach ->
queryQueueStub.runQuery.restore()
it 'calls runQuery with query returnArray dbConnection and cb'
, (done) ->
qb.query = 'hello'
qb.returnArray = true
qb.dbConnection = 'bob'
qb.exec 'aCallback'
expect(queryQueueStub.runQuery.calledWithExactly(
'hello', true, 'bob', 'aCallback'
), 'runQuery not given correct arguments').to.be.true
done()
describe 'create', ->
beforeEach ->
sinon.spy qb, 'exec'
query =
'abc': 1
'def': 'bob'
qb.returnArray = false
qb.create query, 'callbackstub'
it 'sets returnArray to false', (done) ->
expect(qb.returnArray).to.be.false
done()
it 'inserts into correct table', (done) ->
expect(qb.query.indexOf('INSERT INTO aTable')).to.equal 0
done()
it 'decomposes the query object into sql', (done) ->
expect(qb.query)
.to.equal "INSERT INTO aTable (abc, def) VALUES ('1', 'bob') "
done()
it 'executes the query', (done) ->
expect(qb.exec.calledOnce).to.be.true
expect(qb.exec.calledWithExactly('callbackstub')).to.be.true
done()
describe 'find', ->
beforeEach ->
sinon.spy qb, 'exec'
sinon.stub sqlHelperStub, 'generateWhereClause'
qb.returnArray = false
afterEach ->
sqlHelperStub.generateWhereClause.restore()
it 'sets returnArray to true', (done) ->
qb.find "abc", "123"
expect(qb.returnArray).to.be.true
done()
it 'selects from correct table', (done) ->
qb.find "abc", "another callback"
expect(qb.query.indexOf('SELECT * FROM aTable')).to.equal 0
done()
it 'generates a whereclause from the query', (done) ->
qb.find "abc", "another callback"
expect(sqlHelperStub.generateWhereClause.calledWithExactly 'abc')
.to.be.true
done()
it 'appends whereclase to the query', (done) ->
sqlHelperStub.generateWhereClause.returns " WHERE CLAUSE"
qb.find "abc", "another callback"
expect(qb.query).to.equal "SELECT * FROM aTable WHERE CLAUSE"
done()
it 'returns the query builder if no callback', (done) ->
res = qb.find "abc"
expect(qb).to.equal res
done()
it 'does not run a query if no callback passed', (done) ->
qb.find "abc"
expect(qb.exec.notCalled, "exec was called").to.be.true
done()
it 'returns nothing if callback is specified', (done) ->
res = qb.find "abc", "callback"
expect(res).to.not.exist
done()
it 'executes the query if callback is specified', (done) ->
qb.find "abc", "callback"
expect(qb.exec.calledOnce).to.be.true
expect(qb.exec.calledWithExactly "callback").to.be.true
done()
describe 'findOne', ->
beforeEach ->
sinon.spy qb, 'exec'
sinon.stub sqlHelperStub, 'generateWhereClause'
afterEach ->
sqlHelperStub.generateWhereClause.restore()
it 'selects from correct table', (done) ->
qb.findOne "abc", "another callback"
expect(qb.query.indexOf('SELECT TOP 1 * FROM aTable')).to.equal 0
done()
it 'generates a whereclause from the query', (done) ->
qb.findOne "abc", "another callback"
expect(sqlHelperStub.generateWhereClause.calledWithExactly 'abc')
.to.be.true
done()
it 'appends whereclase to the query', (done) ->
sqlHelperStub.generateWhereClause.returns " WHERE CLAUSE"
qb.findOne "abc", "another callback"
expect(qb.query)
.to.equal "SELECT TOP 1 * FROM aTable WHERE CLAUSE"
done()
it 'returns the query builder if no callback', (done) ->
res = qb.findOne "abc"
expect(qb).to.equal res
done()
it 'does not run a query if no callback passed', (done) ->
qb.findOne "abc"
expect(qb.exec.notCalled, "exec was called").to.be.true
done()
it 'returns nothing if callback is specified', (done) ->
res = qb.findOne "abc", "callback"
expect(res).to.not.exist
done()
it 'executes the query if callback is specified', (done) ->
qb.findOne "abc", "callback"
expect(qb.exec.calledOnce).to.be.true
expect(qb.exec.calledWithExactly "callback").to.be.true
done()
describe 'sort', ->
beforeEach ->
sinon.spy qb, 'exec'
it 'returns the query builder if no callback', (done) ->
res = qb.sort "abc"
expect(qb).to.equal res
done()
it 'does not run a query if no callback passed', (done) ->
qb.sort "abc"
expect(qb.exec.notCalled, "exec was called").to.be.true
done()
it 'returns nothing if callback is specified', (done) ->
res = qb.sort "abc", "callback"
expect(res).to.not.exist
done()
it 'executes the query if callback is specified', (done) ->
qb.sort "abc", "callback"
expect(qb.exec.calledOnce).to.be.true
expect(qb.exec.calledWithExactly "callback").to.be.true
done()
describe 'skip', ->
beforeEach ->
sinon.spy qb, 'exec'
it 'returns the query builder if no callback', (done) ->
res = qb.skip "abc"
expect(qb).to.equal res
done()
it 'does not run a query if no callback passed', (done) ->
qb.skip "abc"
expect(qb.exec.notCalled, "exec was called").to.be.true
done()
it 'returns nothing if callback is specified', (done) ->
res = qb.skip "abc", "callback"
expect(res).to.not.exist
done()
it 'executes the query if callback is specified', (done) ->
qb.skip "abc", "callback"
expect(qb.exec.calledOnce).to.be.true
expect(qb.exec.calledWithExactly "callback").to.be.true
done()
describe 'limit', ->
beforeEach ->
sinon.spy qb, 'exec'
it 'returns the query builder if no callback', (done) ->
res = qb.limit "abc"
expect(qb).to.equal res
done()
it 'does not run a query if no callback passed', (done) ->
qb.limit "abc"
expect(qb.exec.notCalled, "exec was called").to.be.true
done()
it 'returns nothing if callback is specified', (done) ->
res = qb.limit "abc", "callback"
expect(res).to.not.exist
done()
it 'executes the query if callback is specified', (done) ->
qb.limit "abc", "callback"
expect(qb.exec.calledOnce).to.be.true
expect(qb.exec.calledWithExactly "callback").to.be.true
done()
describe 'count', ->
beforeEach ->
sinon.spy qb, 'exec'
it 'sets the query to count all rows in the table', (done) ->
qb.count "abc", "def"
expect(qb.query).to.equal 'SELECT COUNT(*) FROM aTable'
done()
it 'returns the query builder if no callback', (done) ->
res = qb.count "abc"
expect(qb).to.equal res
done()
it 'does not run a query if no callback passed', (done) ->
qb.count "abc"
expect(qb.exec.notCalled, "exec was called").to.be.true
done()
it 'returns nothing if callback is specified', (done) ->
res = qb.count "abc", "callback"
expect(res).to.not.exist
done()
it 'executes the query if callback is specified', (done) ->
qb.count "abc", "callback"
expect(qb.exec.calledOnce).to.be.true
expect(qb.exec.calledWithExactly "callback").to.be.true
done()
describe 'findByIdAndUpdate', ->
beforeEach ->
sinon.spy qb, 'exec'
it 'sets the query', (done) ->
qb.findByIdAndUpdate "123", {abc: "def"}, "callback"
expect(qb.query)
.to.equal "UPDATE aTable SET abc= 'def' WHERE _id = 123"
done()
it 'returns the query builder if no callback', (done) ->
res = qb.findByIdAndUpdate "123", {abc: "def"}
expect(qb).to.equal res
done()
it 'does not run a query if no callback passed', (done) ->
qb.findByIdAndUpdate "123", {abc: "def"}
expect(qb.exec.notCalled, "exec was called").to.be.true
done()
it 'returns nothing if callback is specified', (done) ->
res = qb.findByIdAndUpdate "123", {abc: "def"}, "callback"
expect(res).to.not.exist
done()
it 'executes the query if callback is specified', (done) ->
qb.count qb.findByIdAndUpdate "123", {abc: "def"}, "callback"
expect(qb.exec.calledOnce).to.be.true
done()
describe 'remove', ->
beforeEach ->
sinon.spy qb, 'exec'
qb.returnArray = true
it 'sets returnArray to false', (done) ->
qb.remove {id: "123"}, "callback"
expect(qb.returnArray).to.be.false
done()
it 'removes based on the id given in query', (done) ->
qb.remove {_id: "123"}, "callback"
expect(qb.query)
.to.equal 'DELETE FROM aTable WHERE _id = ' + "123"
done()
it 'returns the query builder if no callback', (done) ->
res = qb.remove {id: "123"}
expect(qb).to.equal res
done()
it 'does not run a query if no callback passed', (done) ->
qb.remove {id: "123"}
expect(qb.exec.notCalled, "exec was called").to.be.true
done()
it 'returns nothing if callback is specified', (done) ->
res = qb.remove {id: "123"}, "callback"
expect(res).to.not.exist
done()
it 'executes the query if callback is specified', (done) ->
qb.remove {id: "123"}, "callback"
expect(qb.exec.calledOnce).to.be.true
expect(qb.exec.calledWithExactly "callback").to.be.true
done()
describe 'connect', ->
beforeEach ->
sinon.stub tediousStub, 'Connection'
tediousStub.Connection.returns {a: 'sql connection'}
afterEach ->
tediousStub.Connection.restore()
it 'creates a new sql connection with conf', (done) ->
res = sql.connect 'bob'
expect(tediousStub.Connection.calledOnce).to.be.true
expect(tediousStub.Connection.calledWithExactly 'bob')
.to.be.true
expect(res).to.deep.equal {a: 'sql connection'}
done()
describe 'schemaFactory', ->
result = {}
def =
schemaDefiniton: 'a definition'
beforeEach ->
result = sql.schemaFactory def
it 'sets schema to the given defition', (done) ->
expect(result).to.have.ownProperty 'schema'
expect(result.schema).to.equal def.schemaDefinition
done()
it 'defines a virtual method on the returned schema object'
, (done) ->
expect(result).to.have.ownProperty 'virtual'
expect(result.virtual).to.be.a 'function'
done()
it 'the virtual method defines a get function', (done) ->
virtualResult = result.virtual 'bob'
expect(virtualResult).to.have.ownProperty 'get'
expect(virtualResult.get).to.be.a 'function'
expect(virtualResult.get('abc')).to.not.exist
done()
it 'the virtual method defines a set function', (done) ->
virtualResult = result.virtual 'bob'
expect(virtualResult).to.have.ownProperty 'set'
expect(virtualResult.set).to.be.a 'function'
expect(virtualResult.set('abc')).to.not.exist
done()
it 'defines an empty methods object on the returned schema object'
, (done) ->
expect(result).to.have.ownProperty 'methods'
expect(result.methods).to.be.an 'object'
expect(result.methods).to.deep.equal {}
done()
it 'defines a pre function on the returned schema object', (done) ->
expect(result).to.have.ownProperty 'pre'
expect(result.pre).to.be.a 'function'
expect(result.pre('abc','def')).to.not.exist
done()
describe 'modelFactory', ->
def = {}
res = null
beforeEach ->
def =
name: 'a new model'
Qb: sinon.spy()
afterEach ->
def = null
it 'returns a new query builder instance from def.qb', (done) ->
res = sql.modelFactory def, 'asqlcon'
expect(def.Qb.calledWithNew()).to.be.true
done()
it 'uses def.name for the table name', (done) ->
res = sql.modelFactory def, 'asqlcon'
expect(def.Qb.calledWith('a new model')).to.be.true
done()
it 'uses def.options.collectionName for the table name if present'
, (done) ->
def.options =
collectionName: 'anothername'
res = sql.modelFactory def, 'asqlcon'
expect(def.Qb.calledOnce).to.be.true
expect(def.Qb.calledWith('anothername')).to.be.true
done()
it 'uses the given database connection for the connection', (done) ->
res = sql.modelFactory def, 'asqlcon'
expect(def.Qb.calledWith('a new model', 'asqlcon')).to.be.true
done()
it 'sets modelName on the QueryBuilder object to the def.name'
, (done) ->
res = sql.modelFactory def, 'asqlcon'
expect(res).to.have.property 'modelName'
expect(res.modelName).to.equal 'a new model'
def.options =
collectionName: '<NAME>'
res2 = sql.modelFactory def, 'asqlcon'
expect(res2.modelName).to.equal 'a new model'
done()
describe 'crudFactory', ->
it 'returns the common.crudModelFactory', (done) ->
expect(sql.crudFactory).to.equal 'crudModelFactoryStub'
done()
| true | path = require 'path'
sinon = require 'sinon'
expect = require('chai').expect
proxyquire = require 'proxyquire'
dir = path.normalize __dirname + '../../../../../server'
module.exports = () ->
describe 'sql', ->
sql = null
tediousStub =
Connection: ->
Request: ->
queryQueueStub =
runQuery: ->
sqlHelperStub =
generateWhereClause: ->
""
stubs =
'tedious': tediousStub
'../crudModelFactory': 'crudModelFactoryStub'
'./queryQueue': queryQueueStub
'./sqlHelper': sqlHelperStub
beforeEach ->
sql = proxyquire(dir + '/common/dal/sql', stubs)
describe 'Exports', ->
it 'connect', (done) ->
expect(sql).to.have.ownProperty 'connect'
expect(sql.connect).to.be.a 'function'
done()
it 'QueryBuilder', (done) ->
expect(sql).to.have.ownProperty 'QueryBuilder'
done()
it 'schemaFactory', (done) ->
expect(sql).to.have.ownProperty 'schemaFactory'
done()
it 'modelFactory', (done) ->
expect(sql).to.have.ownProperty 'modelFactory'
done()
it 'crudFactory', (done) ->
expect(sql).to.have.ownProperty 'crudFactory'
done()
describe 'QueryBuilder', ->
qb = {}
beforeEach ->
qb = new sql.QueryBuilder("aTable", "aConnection", "_id")
describe 'Properties', ->
it 'query: String', (done) ->
expect(qb).to.have.ownProperty 'query'
expect(qb.query).to.be.a 'String'
expect(qb.query).to.equal ""
done()
it 'returnArray: Boolean (true)', (done) ->
expect(qb).to.have.ownProperty 'returnArray'
expect(qb.returnArray).to.be.a 'boolean'
expect(qb.returnArray).to.be.true
done()
it 'table: String', (done) ->
expect(qb).to.have.ownProperty 'table'
expect(qb.table).to.be.a 'String'
expect(qb.table).to.equal 'aTable'
done()
it 'dbConnection: Object', (done) ->
expect(qb).to.have.ownProperty 'dbConnection'
expect(qb.dbConnection).to.equal 'aConnection'
done()
describe 'Methods', ->
it 'exec: (callback) ->', (done) ->
expect(qb).to.have.property 'exec'
expect(qb.exec).to.be.a 'function'
done()
it 'create: (obj, callback) ->', (done) ->
expect(qb).to.have.property 'create'
expect(qb.create).to.be.a 'function'
done()
it 'find: (query, callback) ->', (done) ->
expect(qb).to.have.property 'find'
expect(qb.find).to.be.a 'function'
done()
it 'findOne: (query, callback) ->', (done) ->
expect(qb).to.have.property 'findOne'
expect(qb.findOne).to.be.a 'function'
done()
it 'sort: (query, callback) ->', (done) ->
expect(qb).to.have.property 'sort'
expect(qb.sort).to.be.a 'function'
done()
it 'skip: (num, cb) ->', (done) ->
expect(qb).to.have.property 'skip'
expect(qb.skip).to.be.a 'function'
done()
it 'limit: (num, cb) ->', (done) ->
expect(qb).to.have.property 'limit'
expect(qb.limit).to.be.a 'function'
done()
it 'count: (query, cb) ->', (done) ->
expect(qb).to.have.property 'count'
expect(qb.count).to.be.a 'function'
done()
it 'findByIdAndUpdate: (id, update, options, callback)', (done) ->
expect(qb).to.have.property 'findByIdAndUpdate'
expect(qb.findByIdAndUpdate).to.be.a 'function'
done()
it 'remove: (query, callback)', (done) ->
expect(qb).to.have.property 'remove'
expect(qb.remove).to.be.a 'function'
done()
describe 'exec', ->
beforeEach ->
sinon.spy queryQueueStub, 'runQuery'
afterEach ->
queryQueueStub.runQuery.restore()
it 'calls runQuery with query returnArray dbConnection and cb'
, (done) ->
qb.query = 'hello'
qb.returnArray = true
qb.dbConnection = 'bob'
qb.exec 'aCallback'
expect(queryQueueStub.runQuery.calledWithExactly(
'hello', true, 'bob', 'aCallback'
), 'runQuery not given correct arguments').to.be.true
done()
describe 'create', ->
beforeEach ->
sinon.spy qb, 'exec'
query =
'abc': 1
'def': 'bob'
qb.returnArray = false
qb.create query, 'callbackstub'
it 'sets returnArray to false', (done) ->
expect(qb.returnArray).to.be.false
done()
it 'inserts into correct table', (done) ->
expect(qb.query.indexOf('INSERT INTO aTable')).to.equal 0
done()
it 'decomposes the query object into sql', (done) ->
expect(qb.query)
.to.equal "INSERT INTO aTable (abc, def) VALUES ('1', 'bob') "
done()
it 'executes the query', (done) ->
expect(qb.exec.calledOnce).to.be.true
expect(qb.exec.calledWithExactly('callbackstub')).to.be.true
done()
describe 'find', ->
beforeEach ->
sinon.spy qb, 'exec'
sinon.stub sqlHelperStub, 'generateWhereClause'
qb.returnArray = false
afterEach ->
sqlHelperStub.generateWhereClause.restore()
it 'sets returnArray to true', (done) ->
qb.find "abc", "123"
expect(qb.returnArray).to.be.true
done()
it 'selects from correct table', (done) ->
qb.find "abc", "another callback"
expect(qb.query.indexOf('SELECT * FROM aTable')).to.equal 0
done()
it 'generates a whereclause from the query', (done) ->
qb.find "abc", "another callback"
expect(sqlHelperStub.generateWhereClause.calledWithExactly 'abc')
.to.be.true
done()
it 'appends whereclase to the query', (done) ->
sqlHelperStub.generateWhereClause.returns " WHERE CLAUSE"
qb.find "abc", "another callback"
expect(qb.query).to.equal "SELECT * FROM aTable WHERE CLAUSE"
done()
it 'returns the query builder if no callback', (done) ->
res = qb.find "abc"
expect(qb).to.equal res
done()
it 'does not run a query if no callback passed', (done) ->
qb.find "abc"
expect(qb.exec.notCalled, "exec was called").to.be.true
done()
it 'returns nothing if callback is specified', (done) ->
res = qb.find "abc", "callback"
expect(res).to.not.exist
done()
it 'executes the query if callback is specified', (done) ->
qb.find "abc", "callback"
expect(qb.exec.calledOnce).to.be.true
expect(qb.exec.calledWithExactly "callback").to.be.true
done()
describe 'findOne', ->
beforeEach ->
sinon.spy qb, 'exec'
sinon.stub sqlHelperStub, 'generateWhereClause'
afterEach ->
sqlHelperStub.generateWhereClause.restore()
it 'selects from correct table', (done) ->
qb.findOne "abc", "another callback"
expect(qb.query.indexOf('SELECT TOP 1 * FROM aTable')).to.equal 0
done()
it 'generates a whereclause from the query', (done) ->
qb.findOne "abc", "another callback"
expect(sqlHelperStub.generateWhereClause.calledWithExactly 'abc')
.to.be.true
done()
it 'appends whereclase to the query', (done) ->
sqlHelperStub.generateWhereClause.returns " WHERE CLAUSE"
qb.findOne "abc", "another callback"
expect(qb.query)
.to.equal "SELECT TOP 1 * FROM aTable WHERE CLAUSE"
done()
it 'returns the query builder if no callback', (done) ->
res = qb.findOne "abc"
expect(qb).to.equal res
done()
it 'does not run a query if no callback passed', (done) ->
qb.findOne "abc"
expect(qb.exec.notCalled, "exec was called").to.be.true
done()
it 'returns nothing if callback is specified', (done) ->
res = qb.findOne "abc", "callback"
expect(res).to.not.exist
done()
it 'executes the query if callback is specified', (done) ->
qb.findOne "abc", "callback"
expect(qb.exec.calledOnce).to.be.true
expect(qb.exec.calledWithExactly "callback").to.be.true
done()
describe 'sort', ->
beforeEach ->
sinon.spy qb, 'exec'
it 'returns the query builder if no callback', (done) ->
res = qb.sort "abc"
expect(qb).to.equal res
done()
it 'does not run a query if no callback passed', (done) ->
qb.sort "abc"
expect(qb.exec.notCalled, "exec was called").to.be.true
done()
it 'returns nothing if callback is specified', (done) ->
res = qb.sort "abc", "callback"
expect(res).to.not.exist
done()
it 'executes the query if callback is specified', (done) ->
qb.sort "abc", "callback"
expect(qb.exec.calledOnce).to.be.true
expect(qb.exec.calledWithExactly "callback").to.be.true
done()
describe 'skip', ->
beforeEach ->
sinon.spy qb, 'exec'
it 'returns the query builder if no callback', (done) ->
res = qb.skip "abc"
expect(qb).to.equal res
done()
it 'does not run a query if no callback passed', (done) ->
qb.skip "abc"
expect(qb.exec.notCalled, "exec was called").to.be.true
done()
it 'returns nothing if callback is specified', (done) ->
res = qb.skip "abc", "callback"
expect(res).to.not.exist
done()
it 'executes the query if callback is specified', (done) ->
qb.skip "abc", "callback"
expect(qb.exec.calledOnce).to.be.true
expect(qb.exec.calledWithExactly "callback").to.be.true
done()
describe 'limit', ->
beforeEach ->
sinon.spy qb, 'exec'
it 'returns the query builder if no callback', (done) ->
res = qb.limit "abc"
expect(qb).to.equal res
done()
it 'does not run a query if no callback passed', (done) ->
qb.limit "abc"
expect(qb.exec.notCalled, "exec was called").to.be.true
done()
it 'returns nothing if callback is specified', (done) ->
res = qb.limit "abc", "callback"
expect(res).to.not.exist
done()
it 'executes the query if callback is specified', (done) ->
qb.limit "abc", "callback"
expect(qb.exec.calledOnce).to.be.true
expect(qb.exec.calledWithExactly "callback").to.be.true
done()
describe 'count', ->
beforeEach ->
sinon.spy qb, 'exec'
it 'sets the query to count all rows in the table', (done) ->
qb.count "abc", "def"
expect(qb.query).to.equal 'SELECT COUNT(*) FROM aTable'
done()
it 'returns the query builder if no callback', (done) ->
res = qb.count "abc"
expect(qb).to.equal res
done()
it 'does not run a query if no callback passed', (done) ->
qb.count "abc"
expect(qb.exec.notCalled, "exec was called").to.be.true
done()
it 'returns nothing if callback is specified', (done) ->
res = qb.count "abc", "callback"
expect(res).to.not.exist
done()
it 'executes the query if callback is specified', (done) ->
qb.count "abc", "callback"
expect(qb.exec.calledOnce).to.be.true
expect(qb.exec.calledWithExactly "callback").to.be.true
done()
describe 'findByIdAndUpdate', ->
beforeEach ->
sinon.spy qb, 'exec'
it 'sets the query', (done) ->
qb.findByIdAndUpdate "123", {abc: "def"}, "callback"
expect(qb.query)
.to.equal "UPDATE aTable SET abc= 'def' WHERE _id = 123"
done()
it 'returns the query builder if no callback', (done) ->
res = qb.findByIdAndUpdate "123", {abc: "def"}
expect(qb).to.equal res
done()
it 'does not run a query if no callback passed', (done) ->
qb.findByIdAndUpdate "123", {abc: "def"}
expect(qb.exec.notCalled, "exec was called").to.be.true
done()
it 'returns nothing if callback is specified', (done) ->
res = qb.findByIdAndUpdate "123", {abc: "def"}, "callback"
expect(res).to.not.exist
done()
it 'executes the query if callback is specified', (done) ->
qb.count qb.findByIdAndUpdate "123", {abc: "def"}, "callback"
expect(qb.exec.calledOnce).to.be.true
done()
describe 'remove', ->
beforeEach ->
sinon.spy qb, 'exec'
qb.returnArray = true
it 'sets returnArray to false', (done) ->
qb.remove {id: "123"}, "callback"
expect(qb.returnArray).to.be.false
done()
it 'removes based on the id given in query', (done) ->
qb.remove {_id: "123"}, "callback"
expect(qb.query)
.to.equal 'DELETE FROM aTable WHERE _id = ' + "123"
done()
it 'returns the query builder if no callback', (done) ->
res = qb.remove {id: "123"}
expect(qb).to.equal res
done()
it 'does not run a query if no callback passed', (done) ->
qb.remove {id: "123"}
expect(qb.exec.notCalled, "exec was called").to.be.true
done()
it 'returns nothing if callback is specified', (done) ->
res = qb.remove {id: "123"}, "callback"
expect(res).to.not.exist
done()
it 'executes the query if callback is specified', (done) ->
qb.remove {id: "123"}, "callback"
expect(qb.exec.calledOnce).to.be.true
expect(qb.exec.calledWithExactly "callback").to.be.true
done()
describe 'connect', ->
beforeEach ->
sinon.stub tediousStub, 'Connection'
tediousStub.Connection.returns {a: 'sql connection'}
afterEach ->
tediousStub.Connection.restore()
it 'creates a new sql connection with conf', (done) ->
res = sql.connect 'bob'
expect(tediousStub.Connection.calledOnce).to.be.true
expect(tediousStub.Connection.calledWithExactly 'bob')
.to.be.true
expect(res).to.deep.equal {a: 'sql connection'}
done()
describe 'schemaFactory', ->
result = {}
def =
schemaDefiniton: 'a definition'
beforeEach ->
result = sql.schemaFactory def
it 'sets schema to the given defition', (done) ->
expect(result).to.have.ownProperty 'schema'
expect(result.schema).to.equal def.schemaDefinition
done()
it 'defines a virtual method on the returned schema object'
, (done) ->
expect(result).to.have.ownProperty 'virtual'
expect(result.virtual).to.be.a 'function'
done()
it 'the virtual method defines a get function', (done) ->
virtualResult = result.virtual 'bob'
expect(virtualResult).to.have.ownProperty 'get'
expect(virtualResult.get).to.be.a 'function'
expect(virtualResult.get('abc')).to.not.exist
done()
it 'the virtual method defines a set function', (done) ->
virtualResult = result.virtual 'bob'
expect(virtualResult).to.have.ownProperty 'set'
expect(virtualResult.set).to.be.a 'function'
expect(virtualResult.set('abc')).to.not.exist
done()
it 'defines an empty methods object on the returned schema object'
, (done) ->
expect(result).to.have.ownProperty 'methods'
expect(result.methods).to.be.an 'object'
expect(result.methods).to.deep.equal {}
done()
it 'defines a pre function on the returned schema object', (done) ->
expect(result).to.have.ownProperty 'pre'
expect(result.pre).to.be.a 'function'
expect(result.pre('abc','def')).to.not.exist
done()
describe 'modelFactory', ->
def = {}
res = null
beforeEach ->
def =
name: 'a new model'
Qb: sinon.spy()
afterEach ->
def = null
it 'returns a new query builder instance from def.qb', (done) ->
res = sql.modelFactory def, 'asqlcon'
expect(def.Qb.calledWithNew()).to.be.true
done()
it 'uses def.name for the table name', (done) ->
res = sql.modelFactory def, 'asqlcon'
expect(def.Qb.calledWith('a new model')).to.be.true
done()
it 'uses def.options.collectionName for the table name if present'
, (done) ->
def.options =
collectionName: 'anothername'
res = sql.modelFactory def, 'asqlcon'
expect(def.Qb.calledOnce).to.be.true
expect(def.Qb.calledWith('anothername')).to.be.true
done()
it 'uses the given database connection for the connection', (done) ->
res = sql.modelFactory def, 'asqlcon'
expect(def.Qb.calledWith('a new model', 'asqlcon')).to.be.true
done()
it 'sets modelName on the QueryBuilder object to the def.name'
, (done) ->
res = sql.modelFactory def, 'asqlcon'
expect(res).to.have.property 'modelName'
expect(res.modelName).to.equal 'a new model'
def.options =
collectionName: 'PI:NAME:<NAME>END_PI'
res2 = sql.modelFactory def, 'asqlcon'
expect(res2.modelName).to.equal 'a new model'
done()
describe 'crudFactory', ->
it 'returns the common.crudModelFactory', (done) ->
expect(sql.crudFactory).to.equal 'crudModelFactoryStub'
done()
|
[
{
"context": "###\n Pokemon Go (c) ManInTheMiddle Radar \"mod\"\n Michael Strassburger <codepoet@cpan.org>\n\n Enriches every PokeStop de",
"end": 70,
"score": 0.9998758435249329,
"start": 50,
"tag": "NAME",
"value": "Michael Strassburger"
},
{
"context": "anInTheMiddle Radar \"mo... | example.ingameRadar.coffee | noobcakes4603/tinkering | 393 | ###
Pokemon Go (c) ManInTheMiddle Radar "mod"
Michael Strassburger <codepoet@cpan.org>
Enriches every PokeStop description with information about
- directions to nearby wild pokemons
- time left if a PokeStop has an active lure
###
PokemonGoMITM = require './lib/pokemon-go-mitm'
changeCase = require 'change-case'
moment = require 'moment'
LatLon = require('geodesy').LatLonSpherical
pokemons = []
currentLocation = null
mapRadius = 150 # Approx size of level 15 s2 cell
server = new PokemonGoMITM port: 8081
# Fetch our current location as soon as it gets passed to the API
.addRequestHandler "GetMapObjects", (data) ->
currentLocation = new LatLon data.latitude, data.longitude
console.log "[+] Current position of the player #{currentLocation}"
false
# Parse the wild pokemons nearby
.addResponseHandler "GetMapObjects", (data) ->
return false if not data.map_cells.length
oldPokemons = pokemons
pokemons = []
seen = {}
# Store wild pokemons
addPokemon = (pokemon) ->
return if seen[pokemon.encounter_id]
return if pokemon.time_till_hidden_ms < 0
console.log "new wild pokemon", pokemon
pokemons.push pokemon
seen[pokemon.encounter_id] = pokemon
for cell in data.map_cells
addPokemon pokemon for pokemon in cell.wild_pokemons
# Use server timestamp
timestampMs = Number(data.map_cells[0].current_timestamp_ms)
# Add previously known pokemon, unless expired
for pokemon in oldPokemons when not seen[pokemon.encounter_id]
expirationMs = Number(pokemon.last_modified_timestamp_ms) + pokemon.time_till_hidden_ms
pokemons.push pokemon unless expirationMs < timestampMs
seen[pokemon.encounter_id] = pokemon
# Correct steps display for known nearby Pokémon (idea by @zaksabeast)
return false if not currentLocation
for cell in data.map_cells
for nearby in cell.nearby_pokemons when seen[nearby.encounter_id]
pokemon = seen[nearby.encounter_id]
position = new LatLon pokemon.latitude, pokemon.longitude
nearby.distance_in_meters = Math.floor currentLocation.distanceTo position
data
# Whenever a poke spot is opened, populate it with the radar info!
.addResponseHandler "FortDetails", (data) ->
console.log "fetched fort request", data
info = ""
# Populate some neat info about the pokemon's whereabouts
pokemonInfo = (pokemon) ->
name = changeCase.titleCase pokemon.pokemon_data.pokemon_id
name = name.replace(" Male", "♂").replace(" Female", "♀")
expirationMs = Number(pokemon.last_modified_timestamp_ms) + pokemon.time_till_hidden_ms
position = new LatLon pokemon.latitude, pokemon.longitude
expires = moment(expirationMs).fromNow()
distance = Math.floor currentLocation.distanceTo position
bearing = currentLocation.bearingTo position
direction = switch true
when bearing>330 then "↑"
when bearing>285 then "↖"
when bearing>240 then "←"
when bearing>195 then "↙"
when bearing>150 then "↓"
when bearing>105 then "↘"
when bearing>60 then "→"
when bearing>15 then "↗"
else "↑"
"#{name} #{direction} #{distance}m expires #{expires}"
# Create map marker for pokemon location
markers = {}
addMarker = (id, lat, lon) ->
label = id.charAt(0)
name = changeCase.paramCase id.replace(/_([MF]).*/, "_$1")
icon = "http://raw.github.com/msikma/pokesprite/master/icons/pokemon/regular/#{name}.png"
markers[id] = "&markers=label:#{label}%7Cicon:#{icon}" if not markers[id]
markers[id] += "%7C#{lat},#{lon}"
for modifier in data.modifiers when modifier.item_id is 'ITEM_TROY_DISK'
expires = moment(Number(modifier.expiration_timestamp_ms)).fromNow()
info += "Lure by #{modifier.deployer_player_codename} expires #{expires}\n"
mapPokemons = []
if currentLocation
# Limit to map radius
for pokemon in pokemons
position = new LatLon pokemon.latitude, pokemon.longitude
if mapRadius > currentLocation.distanceTo position
mapPokemons.push pokemon
addMarker(pokemon.pokemon_data.pokemon_id, pokemon.latitude, pokemon.longitude)
# Create map image url
loc = "#{currentLocation.lat},#{currentLocation.lon}"
img = "http://maps.googleapis.com/maps/api/staticmap?" +
"center=#{loc}&zoom=17&size=384x512&markers=color:blue%7Csize:tiny%7C#{loc}"
img += (marker for id, marker of markers).join ""
data.image_urls.unshift img
# Sort pokemons by distance
mapPokemons.sort (p1, p2) ->
d1 = currentLocation.distanceTo new LatLon(p1.latitude, p1.longitude)
d2 = currentLocation.distanceTo new LatLon(p2.latitude, p2.longitude)
d1 - d2
info += if mapPokemons.length
(pokemonInfo(pokemon) for pokemon in mapPokemons).join "\n"
else
"No wild Pokémon near you..."
data.description = info
data
| 210865 | ###
Pokemon Go (c) ManInTheMiddle Radar "mod"
<NAME> <<EMAIL>>
Enriches every PokeStop description with information about
- directions to nearby wild pokemons
- time left if a PokeStop has an active lure
###
PokemonGoMITM = require './lib/pokemon-go-mitm'
changeCase = require 'change-case'
moment = require 'moment'
LatLon = require('geodesy').LatLonSpherical
pokemons = []
currentLocation = null
mapRadius = 150 # Approx size of level 15 s2 cell
server = new PokemonGoMITM port: 8081
# Fetch our current location as soon as it gets passed to the API
.addRequestHandler "GetMapObjects", (data) ->
currentLocation = new LatLon data.latitude, data.longitude
console.log "[+] Current position of the player #{currentLocation}"
false
# Parse the wild pokemons nearby
.addResponseHandler "GetMapObjects", (data) ->
return false if not data.map_cells.length
oldPokemons = pokemons
pokemons = []
seen = {}
# Store wild pokemons
addPokemon = (pokemon) ->
return if seen[pokemon.encounter_id]
return if pokemon.time_till_hidden_ms < 0
console.log "new wild pokemon", pokemon
pokemons.push pokemon
seen[pokemon.encounter_id] = pokemon
for cell in data.map_cells
addPokemon pokemon for pokemon in cell.wild_pokemons
# Use server timestamp
timestampMs = Number(data.map_cells[0].current_timestamp_ms)
# Add previously known pokemon, unless expired
for pokemon in oldPokemons when not seen[pokemon.encounter_id]
expirationMs = Number(pokemon.last_modified_timestamp_ms) + pokemon.time_till_hidden_ms
pokemons.push pokemon unless expirationMs < timestampMs
seen[pokemon.encounter_id] = pokemon
# Correct steps display for known nearby Pokémon (idea by @zaksabeast)
return false if not currentLocation
for cell in data.map_cells
for nearby in cell.nearby_pokemons when seen[nearby.encounter_id]
pokemon = seen[nearby.encounter_id]
position = new LatLon pokemon.latitude, pokemon.longitude
nearby.distance_in_meters = Math.floor currentLocation.distanceTo position
data
# Whenever a poke spot is opened, populate it with the radar info!
.addResponseHandler "FortDetails", (data) ->
console.log "fetched fort request", data
info = ""
# Populate some neat info about the pokemon's whereabouts
pokemonInfo = (pokemon) ->
name = changeCase.titleCase pokemon.pokemon_data.pokemon_id
name = name.replace(" Male", "♂").replace(" Female", "♀")
expirationMs = Number(pokemon.last_modified_timestamp_ms) + pokemon.time_till_hidden_ms
position = new LatLon pokemon.latitude, pokemon.longitude
expires = moment(expirationMs).fromNow()
distance = Math.floor currentLocation.distanceTo position
bearing = currentLocation.bearingTo position
direction = switch true
when bearing>330 then "↑"
when bearing>285 then "↖"
when bearing>240 then "←"
when bearing>195 then "↙"
when bearing>150 then "↓"
when bearing>105 then "↘"
when bearing>60 then "→"
when bearing>15 then "↗"
else "↑"
"#{name} #{direction} #{distance}m expires #{expires}"
# Create map marker for pokemon location
markers = {}
addMarker = (id, lat, lon) ->
label = id.charAt(0)
name = changeCase.paramCase id.replace(/_([MF]).*/, "_$1")
icon = "http://raw.github.com/msikma/pokesprite/master/icons/pokemon/regular/#{name}.png"
markers[id] = "&markers=label:#{label}%7Cicon:#{icon}" if not markers[id]
markers[id] += "%7C#{lat},#{lon}"
for modifier in data.modifiers when modifier.item_id is 'ITEM_TROY_DISK'
expires = moment(Number(modifier.expiration_timestamp_ms)).fromNow()
info += "Lure by #{modifier.deployer_player_codename} expires #{expires}\n"
mapPokemons = []
if currentLocation
# Limit to map radius
for pokemon in pokemons
position = new LatLon pokemon.latitude, pokemon.longitude
if mapRadius > currentLocation.distanceTo position
mapPokemons.push pokemon
addMarker(pokemon.pokemon_data.pokemon_id, pokemon.latitude, pokemon.longitude)
# Create map image url
loc = "#{currentLocation.lat},#{currentLocation.lon}"
img = "http://maps.googleapis.com/maps/api/staticmap?" +
"center=#{loc}&zoom=17&size=384x512&markers=color:blue%7Csize:tiny%7C#{loc}"
img += (marker for id, marker of markers).join ""
data.image_urls.unshift img
# Sort pokemons by distance
mapPokemons.sort (p1, p2) ->
d1 = currentLocation.distanceTo new LatLon(p1.latitude, p1.longitude)
d2 = currentLocation.distanceTo new LatLon(p2.latitude, p2.longitude)
d1 - d2
info += if mapPokemons.length
(pokemonInfo(pokemon) for pokemon in mapPokemons).join "\n"
else
"No wild Pokémon near you..."
data.description = info
data
| true | ###
Pokemon Go (c) ManInTheMiddle Radar "mod"
PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI>
Enriches every PokeStop description with information about
- directions to nearby wild pokemons
- time left if a PokeStop has an active lure
###
PokemonGoMITM = require './lib/pokemon-go-mitm'
changeCase = require 'change-case'
moment = require 'moment'
LatLon = require('geodesy').LatLonSpherical
pokemons = []
currentLocation = null
mapRadius = 150 # Approx size of level 15 s2 cell
server = new PokemonGoMITM port: 8081
# Fetch our current location as soon as it gets passed to the API
.addRequestHandler "GetMapObjects", (data) ->
currentLocation = new LatLon data.latitude, data.longitude
console.log "[+] Current position of the player #{currentLocation}"
false
# Parse the wild pokemons nearby
.addResponseHandler "GetMapObjects", (data) ->
return false if not data.map_cells.length
oldPokemons = pokemons
pokemons = []
seen = {}
# Store wild pokemons
addPokemon = (pokemon) ->
return if seen[pokemon.encounter_id]
return if pokemon.time_till_hidden_ms < 0
console.log "new wild pokemon", pokemon
pokemons.push pokemon
seen[pokemon.encounter_id] = pokemon
for cell in data.map_cells
addPokemon pokemon for pokemon in cell.wild_pokemons
# Use server timestamp
timestampMs = Number(data.map_cells[0].current_timestamp_ms)
# Add previously known pokemon, unless expired
for pokemon in oldPokemons when not seen[pokemon.encounter_id]
expirationMs = Number(pokemon.last_modified_timestamp_ms) + pokemon.time_till_hidden_ms
pokemons.push pokemon unless expirationMs < timestampMs
seen[pokemon.encounter_id] = pokemon
# Correct steps display for known nearby Pokémon (idea by @zaksabeast)
return false if not currentLocation
for cell in data.map_cells
for nearby in cell.nearby_pokemons when seen[nearby.encounter_id]
pokemon = seen[nearby.encounter_id]
position = new LatLon pokemon.latitude, pokemon.longitude
nearby.distance_in_meters = Math.floor currentLocation.distanceTo position
data
# Whenever a poke spot is opened, populate it with the radar info!
.addResponseHandler "FortDetails", (data) ->
console.log "fetched fort request", data
info = ""
# Populate some neat info about the pokemon's whereabouts
pokemonInfo = (pokemon) ->
name = changeCase.titleCase pokemon.pokemon_data.pokemon_id
name = name.replace(" Male", "♂").replace(" Female", "♀")
expirationMs = Number(pokemon.last_modified_timestamp_ms) + pokemon.time_till_hidden_ms
position = new LatLon pokemon.latitude, pokemon.longitude
expires = moment(expirationMs).fromNow()
distance = Math.floor currentLocation.distanceTo position
bearing = currentLocation.bearingTo position
direction = switch true
when bearing>330 then "↑"
when bearing>285 then "↖"
when bearing>240 then "←"
when bearing>195 then "↙"
when bearing>150 then "↓"
when bearing>105 then "↘"
when bearing>60 then "→"
when bearing>15 then "↗"
else "↑"
"#{name} #{direction} #{distance}m expires #{expires}"
# Create map marker for pokemon location
markers = {}
addMarker = (id, lat, lon) ->
label = id.charAt(0)
name = changeCase.paramCase id.replace(/_([MF]).*/, "_$1")
icon = "http://raw.github.com/msikma/pokesprite/master/icons/pokemon/regular/#{name}.png"
markers[id] = "&markers=label:#{label}%7Cicon:#{icon}" if not markers[id]
markers[id] += "%7C#{lat},#{lon}"
for modifier in data.modifiers when modifier.item_id is 'ITEM_TROY_DISK'
expires = moment(Number(modifier.expiration_timestamp_ms)).fromNow()
info += "Lure by #{modifier.deployer_player_codename} expires #{expires}\n"
mapPokemons = []
if currentLocation
# Limit to map radius
for pokemon in pokemons
position = new LatLon pokemon.latitude, pokemon.longitude
if mapRadius > currentLocation.distanceTo position
mapPokemons.push pokemon
addMarker(pokemon.pokemon_data.pokemon_id, pokemon.latitude, pokemon.longitude)
# Create map image url
loc = "#{currentLocation.lat},#{currentLocation.lon}"
img = "http://maps.googleapis.com/maps/api/staticmap?" +
"center=#{loc}&zoom=17&size=384x512&markers=color:blue%7Csize:tiny%7C#{loc}"
img += (marker for id, marker of markers).join ""
data.image_urls.unshift img
# Sort pokemons by distance
mapPokemons.sort (p1, p2) ->
d1 = currentLocation.distanceTo new LatLon(p1.latitude, p1.longitude)
d2 = currentLocation.distanceTo new LatLon(p2.latitude, p2.longitude)
d1 - d2
info += if mapPokemons.length
(pokemonInfo(pokemon) for pokemon in mapPokemons).join "\n"
else
"No wild Pokémon near you..."
data.description = info
data
|
[
{
"context": "\n type: 'text-field'\n dataKey: 'firstName'\n displayName: 'First Name'\n }\n ",
"end": 232,
"score": 0.9863082766532898,
"start": 223,
"tag": "NAME",
"value": "firstName"
},
{
"context": " dataKey: 'firstName'\n displayN... | examples/form-groups/FormGroupsFormDef.coffee | vedantchoubey098/react-form-builder | 44 | FormGroupsFormDef =
type: 'form'
components: [
type: 'form-group'
title: 'ID'
dataKey: 'id-group'
collapsable: true
components: [
{
type: 'text-field'
dataKey: 'firstName'
displayName: 'First Name'
}
{
type: 'text-field'
dataKey: 'lastName'
displayName: 'Last Name'
}
{
type: 'date-field'
dataKey: 'dob'
displayName: 'Date of Birth'
}
]
,
type: 'form-group'
title: 'Contact Information'
dataKey: 'contact-group'
collapsable: true
components: [
type: 'form-group'
title: 'Old School'
dataKey: 'oldschool'
collapsable: true
components: [
{
type: 'text-field'
dataKey: 'address'
displayName: 'Address'
}
{
type: 'text-field'
dataKey: 'city'
displayName: 'City'
}
{
type: 'number-field'
dataKey: 'zipcode'
displayName: 'Zip Code'
}
]
,
type: 'form-group'
title: 'Cool kid'
dataKey: 'coolkid'
collapsable: true
components: [
{
type: 'text-field'
dataKey: 'email'
displayName: 'Email'
}
{
type: 'text-field'
dataKey: 'twitter'
displayName: 'Twitter'
}
]
]
]
module.exports = FormGroupsFormDef | 21305 | FormGroupsFormDef =
type: 'form'
components: [
type: 'form-group'
title: 'ID'
dataKey: 'id-group'
collapsable: true
components: [
{
type: 'text-field'
dataKey: '<NAME>'
displayName: '<NAME>'
}
{
type: 'text-field'
dataKey: '<NAME>'
displayName: '<NAME>'
}
{
type: 'date-field'
dataKey: 'dob'
displayName: 'Date of B<NAME>'
}
]
,
type: 'form-group'
title: 'Contact Information'
dataKey: 'contact-group'
collapsable: true
components: [
type: 'form-group'
title: 'Old School'
dataKey: 'oldschool'
collapsable: true
components: [
{
type: 'text-field'
dataKey: 'address'
displayName: 'Address'
}
{
type: 'text-field'
dataKey: 'city'
displayName: 'City'
}
{
type: 'number-field'
dataKey: 'zipcode'
displayName: 'Zip Code'
}
]
,
type: 'form-group'
title: 'Cool kid'
dataKey: 'coolkid'
collapsable: true
components: [
{
type: 'text-field'
dataKey: 'email'
displayName: 'Email'
}
{
type: 'text-field'
dataKey: 'twitter'
displayName: 'Twitter'
}
]
]
]
module.exports = FormGroupsFormDef | true | FormGroupsFormDef =
type: 'form'
components: [
type: 'form-group'
title: 'ID'
dataKey: 'id-group'
collapsable: true
components: [
{
type: 'text-field'
dataKey: 'PI:NAME:<NAME>END_PI'
displayName: 'PI:NAME:<NAME>END_PI'
}
{
type: 'text-field'
dataKey: 'PI:NAME:<NAME>END_PI'
displayName: 'PI:NAME:<NAME>END_PI'
}
{
type: 'date-field'
dataKey: 'dob'
displayName: 'Date of BPI:NAME:<NAME>END_PI'
}
]
,
type: 'form-group'
title: 'Contact Information'
dataKey: 'contact-group'
collapsable: true
components: [
type: 'form-group'
title: 'Old School'
dataKey: 'oldschool'
collapsable: true
components: [
{
type: 'text-field'
dataKey: 'address'
displayName: 'Address'
}
{
type: 'text-field'
dataKey: 'city'
displayName: 'City'
}
{
type: 'number-field'
dataKey: 'zipcode'
displayName: 'Zip Code'
}
]
,
type: 'form-group'
title: 'Cool kid'
dataKey: 'coolkid'
collapsable: true
components: [
{
type: 'text-field'
dataKey: 'email'
displayName: 'Email'
}
{
type: 'text-field'
dataKey: 'twitter'
displayName: 'Twitter'
}
]
]
]
module.exports = FormGroupsFormDef |
[
{
"context": ">\n expect(BackboneValidate.validators.email('test@example.net')).toBe(false)\n\n it 'should return true when a",
"end": 2383,
"score": 0.9999188184738159,
"start": 2367,
"tag": "EMAIL",
"value": "test@example.net"
},
{
"context": ">\n expect(BackboneValida... | specs/coffee/ValidatorSpecs.coffee | zestia/backbone.validate | 0 | describe 'Default validators', ->
it 'should be exposed', ->
expect(BackboneValidate.validators).toBeDefined()
describe 'the "required" validator', ->
it 'should be exposed', ->
expect(BackboneValidate.validators.required).toBeDefined()
it 'should return false when a value is provided', ->
expect(BackboneValidate.validators.required('testy')).toBe(false)
it 'should return true when a value is not provided', ->
expect(BackboneValidate.validators.required(undefined)).toBe(true)
expect(BackboneValidate.validators.required(null)).toBe(true)
expect(BackboneValidate.validators.required('')).toBe(true)
describe 'the "range" validator', ->
it 'should be exposed', ->
expect(BackboneValidate.validators.range).toBeDefined()
it 'should return false when a value isnt provided', ->
expect(BackboneValidate.validators.range([1, 10], undefined)).toBe(false)
expect(BackboneValidate.validators.range([1, 10], null)).toBe(false)
expect(BackboneValidate.validators.range([1, 10], '')).toBe(false)
it 'should return undefined when the value is within the specified range', ->
expect(BackboneValidate.validators.range([1, 10], 5)).toBe(false)
it 'should return true when the value is outside the specified range', ->
expect(BackboneValidate.validators.range([1, 10], 0)).toBe(true)
expect(BackboneValidate.validators.range([1, 10], 11)).toBe(true)
describe 'the "custom" validator', ->
it 'should be exposed', ->
expect(BackboneValidate.validators.custom).toBeDefined()
it 'should call the provided function with the correct parameters', ->
fn = jasmine.createSpy()
value = ''
attrs = {}
BackboneValidate.validators.custom(fn, value, attrs)
expect(fn).toHaveBeenCalledWith(value, attrs)
it 'should return the result of the provided function', ->
expect(BackboneValidate.validators.custom(-> true)).toBe(true)
expect(BackboneValidate.validators.custom(-> false)).toBe(false)
expect(BackboneValidate.validators.custom(->)).toBeUndefined
describe 'the "email" validator', ->
it 'should be exposed', ->
expect(BackboneValidate.validators.email).toBeDefined()
it 'should return false when a valid email address is provided', ->
expect(BackboneValidate.validators.email('test@example.net')).toBe(false)
it 'should return true when an invalid email address is provided', ->
expect(BackboneValidate.validators.email('abcd')).toBe(true)
it 'should return true when an accented character is in the email address', ->
expect(BackboneValidate.validators.email('fooé@gmail.com')).toBe(true)
it 'should return true when an email begins with mailto:', ->
expect(BackboneValidate.validators.email('mailto:someemail@google.com')).toBe(true)
it 'should return true when an email contains a space', ->
expect(BackboneValidate.validators.email('someemail@google.com Name Surname')).toBe(true)
it 'should return true when an email ends with a period', ->
expect(BackboneValidate.validators.email('someemail@google.com.')).toBe(true)
it 'should return true when an email is in the format name (email)', ->
expect(BackboneValidate.validators.email('Name Surname (email@gmail.com)')).toBe(true)
it 'should return false when an email contains a plus in the first part', ->
expect(BackboneValidate.validators.email('foo+bar@gmail.com')).toBe(false)
it 'should return false when an email TLD is long', ->
expect(BackboneValidate.validators.email('foo@gmail.community')).toBe(false)
it 'should return false when a local path is used as the first part', ->
expect(BackboneValidate.validators.email('"Hello World"@gmail.com')).toBe(false)
describe 'the "maxLength" validator', ->
it 'should be exposed', ->
expect(BackboneValidate.validators.maxLength).toBeDefined()
it 'should return false when a value not longer than max length provided', ->
expect(BackboneValidate.validators.maxLength(6, 'Hello')).toBe(false)
it 'should return true when a value longer than max length provided', ->
expect(BackboneValidate.validators.maxLength(6, 'Hello World')).toBe(true)
it 'should return true when an undefined or null value is provided', ->
expect(BackboneValidate.validators.maxLength(6, undefined)).toBe(false)
expect(BackboneValidate.validators.maxLength(6, null)).toBe(false)
describe 'the "minLength" validator', ->
it 'should be exposed', ->
expect(BackboneValidate.validators.minLength).toBeDefined()
it 'should return false when a value is longer than min length provided', ->
expect(BackboneValidate.validators.minLength(4, 'Hello')).toBe(false)
it 'should return true when a value shorter than min length provided', ->
expect(BackboneValidate.validators.minLength(16, 'Hello World')).toBe(true)
it 'should return true when an undefined or null value is provided', ->
expect(BackboneValidate.validators.minLength(6, undefined)).toBe(false)
expect(BackboneValidate.validators.minLength(6, null)).toBe(false)
describe 'the "lengthRange" validator', ->
it 'should be exposed', ->
expect(BackboneValidate.validators.lengthRange).toBeDefined()
it 'should return false when a value length is inside the length range provided', ->
expect(BackboneValidate.validators.lengthRange([4, 10], 'Hello')).toBe(false)
it 'should return true when a value length is outside of the length range provided', ->
expect(BackboneValidate.validators.lengthRange([1, 5], 'Hello World')).toBe(true)
it 'should return true when an undefined or null value is provided', ->
expect(BackboneValidate.validators.lengthRange([1, 5], undefined)).toBe(true)
expect(BackboneValidate.validators.lengthRange([1, 5], null)).toBe(true)
| 9878 | describe 'Default validators', ->
it 'should be exposed', ->
expect(BackboneValidate.validators).toBeDefined()
describe 'the "required" validator', ->
it 'should be exposed', ->
expect(BackboneValidate.validators.required).toBeDefined()
it 'should return false when a value is provided', ->
expect(BackboneValidate.validators.required('testy')).toBe(false)
it 'should return true when a value is not provided', ->
expect(BackboneValidate.validators.required(undefined)).toBe(true)
expect(BackboneValidate.validators.required(null)).toBe(true)
expect(BackboneValidate.validators.required('')).toBe(true)
describe 'the "range" validator', ->
it 'should be exposed', ->
expect(BackboneValidate.validators.range).toBeDefined()
it 'should return false when a value isnt provided', ->
expect(BackboneValidate.validators.range([1, 10], undefined)).toBe(false)
expect(BackboneValidate.validators.range([1, 10], null)).toBe(false)
expect(BackboneValidate.validators.range([1, 10], '')).toBe(false)
it 'should return undefined when the value is within the specified range', ->
expect(BackboneValidate.validators.range([1, 10], 5)).toBe(false)
it 'should return true when the value is outside the specified range', ->
expect(BackboneValidate.validators.range([1, 10], 0)).toBe(true)
expect(BackboneValidate.validators.range([1, 10], 11)).toBe(true)
describe 'the "custom" validator', ->
it 'should be exposed', ->
expect(BackboneValidate.validators.custom).toBeDefined()
it 'should call the provided function with the correct parameters', ->
fn = jasmine.createSpy()
value = ''
attrs = {}
BackboneValidate.validators.custom(fn, value, attrs)
expect(fn).toHaveBeenCalledWith(value, attrs)
it 'should return the result of the provided function', ->
expect(BackboneValidate.validators.custom(-> true)).toBe(true)
expect(BackboneValidate.validators.custom(-> false)).toBe(false)
expect(BackboneValidate.validators.custom(->)).toBeUndefined
describe 'the "email" validator', ->
it 'should be exposed', ->
expect(BackboneValidate.validators.email).toBeDefined()
it 'should return false when a valid email address is provided', ->
expect(BackboneValidate.validators.email('<EMAIL>')).toBe(false)
it 'should return true when an invalid email address is provided', ->
expect(BackboneValidate.validators.email('abcd')).toBe(true)
it 'should return true when an accented character is in the email address', ->
expect(BackboneValidate.validators.email('<EMAIL>')).toBe(true)
it 'should return true when an email begins with mailto:', ->
expect(BackboneValidate.validators.email('mailto:<EMAIL>')).toBe(true)
it 'should return true when an email contains a space', ->
expect(BackboneValidate.validators.email('<EMAIL> Name Surname')).toBe(true)
it 'should return true when an email ends with a period', ->
expect(BackboneValidate.validators.email('<EMAIL>.')).toBe(true)
it 'should return true when an email is in the format name (email)', ->
expect(BackboneValidate.validators.email('Name Surname (<EMAIL>)')).toBe(true)
it 'should return false when an email contains a plus in the first part', ->
expect(BackboneValidate.validators.email('<EMAIL>')).toBe(false)
it 'should return false when an email TLD is long', ->
expect(BackboneValidate.validators.email('<EMAIL>')).toBe(false)
it 'should return false when a local path is used as the first part', ->
expect(BackboneValidate.validators.email('"Hello World"<EMAIL>')).toBe(false)
describe 'the "maxLength" validator', ->
it 'should be exposed', ->
expect(BackboneValidate.validators.maxLength).toBeDefined()
it 'should return false when a value not longer than max length provided', ->
expect(BackboneValidate.validators.maxLength(6, 'Hello')).toBe(false)
it 'should return true when a value longer than max length provided', ->
expect(BackboneValidate.validators.maxLength(6, 'Hello World')).toBe(true)
it 'should return true when an undefined or null value is provided', ->
expect(BackboneValidate.validators.maxLength(6, undefined)).toBe(false)
expect(BackboneValidate.validators.maxLength(6, null)).toBe(false)
describe 'the "minLength" validator', ->
it 'should be exposed', ->
expect(BackboneValidate.validators.minLength).toBeDefined()
it 'should return false when a value is longer than min length provided', ->
expect(BackboneValidate.validators.minLength(4, 'Hello')).toBe(false)
it 'should return true when a value shorter than min length provided', ->
expect(BackboneValidate.validators.minLength(16, 'Hello World')).toBe(true)
it 'should return true when an undefined or null value is provided', ->
expect(BackboneValidate.validators.minLength(6, undefined)).toBe(false)
expect(BackboneValidate.validators.minLength(6, null)).toBe(false)
describe 'the "lengthRange" validator', ->
it 'should be exposed', ->
expect(BackboneValidate.validators.lengthRange).toBeDefined()
it 'should return false when a value length is inside the length range provided', ->
expect(BackboneValidate.validators.lengthRange([4, 10], 'Hello')).toBe(false)
it 'should return true when a value length is outside of the length range provided', ->
expect(BackboneValidate.validators.lengthRange([1, 5], 'Hello World')).toBe(true)
it 'should return true when an undefined or null value is provided', ->
expect(BackboneValidate.validators.lengthRange([1, 5], undefined)).toBe(true)
expect(BackboneValidate.validators.lengthRange([1, 5], null)).toBe(true)
| true | describe 'Default validators', ->
it 'should be exposed', ->
expect(BackboneValidate.validators).toBeDefined()
describe 'the "required" validator', ->
it 'should be exposed', ->
expect(BackboneValidate.validators.required).toBeDefined()
it 'should return false when a value is provided', ->
expect(BackboneValidate.validators.required('testy')).toBe(false)
it 'should return true when a value is not provided', ->
expect(BackboneValidate.validators.required(undefined)).toBe(true)
expect(BackboneValidate.validators.required(null)).toBe(true)
expect(BackboneValidate.validators.required('')).toBe(true)
describe 'the "range" validator', ->
it 'should be exposed', ->
expect(BackboneValidate.validators.range).toBeDefined()
it 'should return false when a value isnt provided', ->
expect(BackboneValidate.validators.range([1, 10], undefined)).toBe(false)
expect(BackboneValidate.validators.range([1, 10], null)).toBe(false)
expect(BackboneValidate.validators.range([1, 10], '')).toBe(false)
it 'should return undefined when the value is within the specified range', ->
expect(BackboneValidate.validators.range([1, 10], 5)).toBe(false)
it 'should return true when the value is outside the specified range', ->
expect(BackboneValidate.validators.range([1, 10], 0)).toBe(true)
expect(BackboneValidate.validators.range([1, 10], 11)).toBe(true)
describe 'the "custom" validator', ->
it 'should be exposed', ->
expect(BackboneValidate.validators.custom).toBeDefined()
it 'should call the provided function with the correct parameters', ->
fn = jasmine.createSpy()
value = ''
attrs = {}
BackboneValidate.validators.custom(fn, value, attrs)
expect(fn).toHaveBeenCalledWith(value, attrs)
it 'should return the result of the provided function', ->
expect(BackboneValidate.validators.custom(-> true)).toBe(true)
expect(BackboneValidate.validators.custom(-> false)).toBe(false)
expect(BackboneValidate.validators.custom(->)).toBeUndefined
describe 'the "email" validator', ->
it 'should be exposed', ->
expect(BackboneValidate.validators.email).toBeDefined()
it 'should return false when a valid email address is provided', ->
expect(BackboneValidate.validators.email('PI:EMAIL:<EMAIL>END_PI')).toBe(false)
it 'should return true when an invalid email address is provided', ->
expect(BackboneValidate.validators.email('abcd')).toBe(true)
it 'should return true when an accented character is in the email address', ->
expect(BackboneValidate.validators.email('PI:EMAIL:<EMAIL>END_PI')).toBe(true)
it 'should return true when an email begins with mailto:', ->
expect(BackboneValidate.validators.email('mailto:PI:EMAIL:<EMAIL>END_PI')).toBe(true)
it 'should return true when an email contains a space', ->
expect(BackboneValidate.validators.email('PI:EMAIL:<EMAIL>END_PI Name Surname')).toBe(true)
it 'should return true when an email ends with a period', ->
expect(BackboneValidate.validators.email('PI:EMAIL:<EMAIL>END_PI.')).toBe(true)
it 'should return true when an email is in the format name (email)', ->
expect(BackboneValidate.validators.email('Name Surname (PI:EMAIL:<EMAIL>END_PI)')).toBe(true)
it 'should return false when an email contains a plus in the first part', ->
expect(BackboneValidate.validators.email('PI:EMAIL:<EMAIL>END_PI')).toBe(false)
it 'should return false when an email TLD is long', ->
expect(BackboneValidate.validators.email('PI:EMAIL:<EMAIL>END_PI')).toBe(false)
it 'should return false when a local path is used as the first part', ->
expect(BackboneValidate.validators.email('"Hello World"PI:EMAIL:<EMAIL>END_PI')).toBe(false)
describe 'the "maxLength" validator', ->
it 'should be exposed', ->
expect(BackboneValidate.validators.maxLength).toBeDefined()
it 'should return false when a value not longer than max length provided', ->
expect(BackboneValidate.validators.maxLength(6, 'Hello')).toBe(false)
it 'should return true when a value longer than max length provided', ->
expect(BackboneValidate.validators.maxLength(6, 'Hello World')).toBe(true)
it 'should return true when an undefined or null value is provided', ->
expect(BackboneValidate.validators.maxLength(6, undefined)).toBe(false)
expect(BackboneValidate.validators.maxLength(6, null)).toBe(false)
describe 'the "minLength" validator', ->
it 'should be exposed', ->
expect(BackboneValidate.validators.minLength).toBeDefined()
it 'should return false when a value is longer than min length provided', ->
expect(BackboneValidate.validators.minLength(4, 'Hello')).toBe(false)
it 'should return true when a value shorter than min length provided', ->
expect(BackboneValidate.validators.minLength(16, 'Hello World')).toBe(true)
it 'should return true when an undefined or null value is provided', ->
expect(BackboneValidate.validators.minLength(6, undefined)).toBe(false)
expect(BackboneValidate.validators.minLength(6, null)).toBe(false)
describe 'the "lengthRange" validator', ->
it 'should be exposed', ->
expect(BackboneValidate.validators.lengthRange).toBeDefined()
it 'should return false when a value length is inside the length range provided', ->
expect(BackboneValidate.validators.lengthRange([4, 10], 'Hello')).toBe(false)
it 'should return true when a value length is outside of the length range provided', ->
expect(BackboneValidate.validators.lengthRange([1, 5], 'Hello World')).toBe(true)
it 'should return true when an undefined or null value is provided', ->
expect(BackboneValidate.validators.lengthRange([1, 5], undefined)).toBe(true)
expect(BackboneValidate.validators.lengthRange([1, 5], null)).toBe(true)
|
[
{
"context": "atch[1]\n [closestMatch, closestMatchScore] = ['Murgatroyd Kerfluffle', 0]\n explained = false\n for commonMethod i",
"end": 5874,
"score": 0.99969482421875,
"start": 5853,
"tag": "NAME",
"value": "Murgatroyd Kerfluffle"
},
{
"context": "'s error messages track... | src/problems.coffee | deepak1556/aether | 1 | ranges = require './ranges'
module.exports.createUserCodeProblem = (options) ->
options ?= {}
options.aether ?= @ # Can either be called standalone or as an Aether method
if options.type is 'transpile' and options.error
extractTranspileErrorDetails options
if options.type is 'runtime'
extractRuntimeErrorDetails options
reporter = options.reporter or 'unknown' # Source of the problem, like 'jshint' or 'esprima' or 'aether'
kind = options.kind or 'Unknown' # Like 'W075' or 'InvalidLHSInAssignment'
id = reporter + '_' + kind # Uniquely identifies reporter + kind combination
config = options.aether?.options?.problems?[id] or {} # Default problem level/message/hint overrides
p = isUserCodeProblem: true
p.id = id
p.level = config.level or options.level or 'error' # 'error', 'warning', 'info'
p.type = options.type or 'generic' # Like 'runtime' or 'transpile', maybe later 'lint'
p.message = config.message or options.message or "Unknown #{p.type} #{p.level}" # Main error message (short phrase)
p.hint = config.hint or options.hint or '' # Additional details about error message (sentence)
p.range = options.range # Like [{ofs: 305, row: 15, col: 15}, {ofs: 312, row: 15, col: 22}], or null
p.userInfo = options.userInfo ? {} # Record extra information with the error here
p
extractTranspileErrorDetails = (options) ->
code = options.code or ''
codePrefix = options.codePrefix or 'function wrapped() {\n"use strict";\n'
error = options.error
options.message = error.message
originalLines = code.slice(codePrefix.length).split '\n'
lineOffset = codePrefix.split('\n').length - 1
# TODO: move these into language-specific plugins
switch options.reporter
when 'jshint'
options.message ?= error.reason
options.kind ?= error.code
unless options.level
options.level = {E: 'error', W: 'warning', I: 'info'}[error.code[0]]
line = error.line - codePrefix.split('\n').length
if line >= 0
if error.evidence?.length
startCol = originalLines[line].indexOf error.evidence
endCol = startCol + error.evidence.length
else
[startCol, endCol] = [0, originalLines[line].length - 1]
# TODO: no way this works; what am I doing with code prefixes?
options.range = [ranges.rowColToPos(line, startCol, code, codePrefix),
ranges.rowColToPos(line, endCol, code, codePrefix)]
else
# TODO: if we type an unmatched {, for example, then it thinks that line -2's function wrapped() { is unmatched...
# TODO: no way this works; what am I doing with code prefixes?
options.range = [ranges.offsetToPos(0, code, codePrefix),
ranges.offsetToPos(code.length - 1, code, codePrefix)]
when 'esprima'
# TODO: column range should extend to whole token. Mod Esprima, or extend to end of line?
# TODO: no way this works; what am I doing with code prefixes?
options.range = [ranges.rowColToPos(error.lineNumber - 1 - lineOffset, error.column - 1, code, codePrefix),
ranges.rowColToPos(error.lineNumber - 1 - lineOffset, error.column, code, codePrefix)]
when 'acorn_loose'
null
when 'csredux'
options.range = [ranges.rowColToPos(error.lineNumber - 1 - lineOffset, error.column - 1, code, codePrefix),
ranges.rowColToPos(error.lineNumber - 1 - lineOffset, error.column, code, codePrefix)]
when 'aether'
null
when 'closer'
if error.startOffset and error.endOffset
range = ranges.offsetsToRange(error.startOffset, error.endOffset, code)
options.range = [range.start, range.end]
when 'lua2js', 'luapegjs'
options.message ?= error.message
rng = ranges.offsetsToRange(error.offset, error.offset, code, '')
options.range = [rng.start, rng.end]
when 'filbert'
if error.loc
columnOffset = 0
columnOffset++ while originalLines[lineOffset - 2][columnOffset] is ' '
# filbert lines are 1-based, columns are 0-based
row = error.loc.line - lineOffset - 1
col = error.loc.column - columnOffset
start = ranges.rowColToPos(row, col, code)
end = ranges.rowColToPos(row, col + error.raisedAt - error.pos, code)
options.range = [start, end]
when 'iota'
null
else
console.warn "Unhandled UserCodeProblem reporter", options.reporter
options
extractRuntimeErrorDetails = (options) ->
if error = options.error
options.kind ?= error.name # I think this will pick up [Error, EvalError, RangeError, ReferenceError, SyntaxError, TypeError, URIError, DOMException]
options.message = explainErrorMessage error.message or error.toString()
options.hint = error.hint
options.level = error.level
options.userInfo = error.userInfo
options.range ?= options.aether?.lastStatementRange
if options.range
lineNumber = options.range[0].row + 1
if options.message.search(/^Line \d+/) != -1
options.message = options.message.replace /^Line \d+/, (match, n) -> "Line #{lineNumber}"
else
options.message = "Line #{lineNumber}: #{options.message}"
module.exports.commonMethods = commonMethods = ['moveRight', 'moveLeft', 'moveUp', 'moveDown', 'attackNearbyEnemy', 'say', 'move', 'attackNearestEnemy', 'shootAt', 'rotateTo', 'shoot', 'distance', 'getNearestEnemy', 'getEnemies', 'attack', 'setAction', 'setTarget', 'getFriends', 'patrol'] # TODO: should be part of user configuration
explainErrorMessage = (m) ->
if m is "RangeError: Maximum call stack size exceeded"
m += ". (Did you use call a function recursively?)"
missingMethodMatch = m.match /has no method '(.*?)'/
if missingMethodMatch
method = missingMethodMatch[1]
[closestMatch, closestMatchScore] = ['Murgatroyd Kerfluffle', 0]
explained = false
for commonMethod in commonMethods
if method is commonMethod
m += ". (#{missingMethodMatch[1]} not available in this challenge.)"
explained = true
break
else if method.toLowerCase() is commonMethod.toLowerCase()
m = "#{method} should be #{commonMethod} because JavaScript is case-sensitive."
explained = true
break
else
matchScore = string_score?.score commonMethod, method, 0.5
if matchScore > closestMatchScore
[closestMatch, closestMatchScore] = [commonMethod, matchScore]
unless explained
if closestMatchScore > 0.25
m += ". (Did you mean #{closestMatch}?)"
m = m.replace 'TypeError:', 'Error:'
m
# Esprima Harmony's error messages track V8's
# https://github.com/ariya/esprima/blob/harmony/esprima.js#L194
# JSHint's error and warning messages
# https://github.com/jshint/jshint/blob/master/src/messages.js
| 139048 | ranges = require './ranges'
module.exports.createUserCodeProblem = (options) ->
options ?= {}
options.aether ?= @ # Can either be called standalone or as an Aether method
if options.type is 'transpile' and options.error
extractTranspileErrorDetails options
if options.type is 'runtime'
extractRuntimeErrorDetails options
reporter = options.reporter or 'unknown' # Source of the problem, like 'jshint' or 'esprima' or 'aether'
kind = options.kind or 'Unknown' # Like 'W075' or 'InvalidLHSInAssignment'
id = reporter + '_' + kind # Uniquely identifies reporter + kind combination
config = options.aether?.options?.problems?[id] or {} # Default problem level/message/hint overrides
p = isUserCodeProblem: true
p.id = id
p.level = config.level or options.level or 'error' # 'error', 'warning', 'info'
p.type = options.type or 'generic' # Like 'runtime' or 'transpile', maybe later 'lint'
p.message = config.message or options.message or "Unknown #{p.type} #{p.level}" # Main error message (short phrase)
p.hint = config.hint or options.hint or '' # Additional details about error message (sentence)
p.range = options.range # Like [{ofs: 305, row: 15, col: 15}, {ofs: 312, row: 15, col: 22}], or null
p.userInfo = options.userInfo ? {} # Record extra information with the error here
p
extractTranspileErrorDetails = (options) ->
code = options.code or ''
codePrefix = options.codePrefix or 'function wrapped() {\n"use strict";\n'
error = options.error
options.message = error.message
originalLines = code.slice(codePrefix.length).split '\n'
lineOffset = codePrefix.split('\n').length - 1
# TODO: move these into language-specific plugins
switch options.reporter
when 'jshint'
options.message ?= error.reason
options.kind ?= error.code
unless options.level
options.level = {E: 'error', W: 'warning', I: 'info'}[error.code[0]]
line = error.line - codePrefix.split('\n').length
if line >= 0
if error.evidence?.length
startCol = originalLines[line].indexOf error.evidence
endCol = startCol + error.evidence.length
else
[startCol, endCol] = [0, originalLines[line].length - 1]
# TODO: no way this works; what am I doing with code prefixes?
options.range = [ranges.rowColToPos(line, startCol, code, codePrefix),
ranges.rowColToPos(line, endCol, code, codePrefix)]
else
# TODO: if we type an unmatched {, for example, then it thinks that line -2's function wrapped() { is unmatched...
# TODO: no way this works; what am I doing with code prefixes?
options.range = [ranges.offsetToPos(0, code, codePrefix),
ranges.offsetToPos(code.length - 1, code, codePrefix)]
when 'esprima'
# TODO: column range should extend to whole token. Mod Esprima, or extend to end of line?
# TODO: no way this works; what am I doing with code prefixes?
options.range = [ranges.rowColToPos(error.lineNumber - 1 - lineOffset, error.column - 1, code, codePrefix),
ranges.rowColToPos(error.lineNumber - 1 - lineOffset, error.column, code, codePrefix)]
when 'acorn_loose'
null
when 'csredux'
options.range = [ranges.rowColToPos(error.lineNumber - 1 - lineOffset, error.column - 1, code, codePrefix),
ranges.rowColToPos(error.lineNumber - 1 - lineOffset, error.column, code, codePrefix)]
when 'aether'
null
when 'closer'
if error.startOffset and error.endOffset
range = ranges.offsetsToRange(error.startOffset, error.endOffset, code)
options.range = [range.start, range.end]
when 'lua2js', 'luapegjs'
options.message ?= error.message
rng = ranges.offsetsToRange(error.offset, error.offset, code, '')
options.range = [rng.start, rng.end]
when 'filbert'
if error.loc
columnOffset = 0
columnOffset++ while originalLines[lineOffset - 2][columnOffset] is ' '
# filbert lines are 1-based, columns are 0-based
row = error.loc.line - lineOffset - 1
col = error.loc.column - columnOffset
start = ranges.rowColToPos(row, col, code)
end = ranges.rowColToPos(row, col + error.raisedAt - error.pos, code)
options.range = [start, end]
when 'iota'
null
else
console.warn "Unhandled UserCodeProblem reporter", options.reporter
options
extractRuntimeErrorDetails = (options) ->
if error = options.error
options.kind ?= error.name # I think this will pick up [Error, EvalError, RangeError, ReferenceError, SyntaxError, TypeError, URIError, DOMException]
options.message = explainErrorMessage error.message or error.toString()
options.hint = error.hint
options.level = error.level
options.userInfo = error.userInfo
options.range ?= options.aether?.lastStatementRange
if options.range
lineNumber = options.range[0].row + 1
if options.message.search(/^Line \d+/) != -1
options.message = options.message.replace /^Line \d+/, (match, n) -> "Line #{lineNumber}"
else
options.message = "Line #{lineNumber}: #{options.message}"
module.exports.commonMethods = commonMethods = ['moveRight', 'moveLeft', 'moveUp', 'moveDown', 'attackNearbyEnemy', 'say', 'move', 'attackNearestEnemy', 'shootAt', 'rotateTo', 'shoot', 'distance', 'getNearestEnemy', 'getEnemies', 'attack', 'setAction', 'setTarget', 'getFriends', 'patrol'] # TODO: should be part of user configuration
explainErrorMessage = (m) ->
if m is "RangeError: Maximum call stack size exceeded"
m += ". (Did you use call a function recursively?)"
missingMethodMatch = m.match /has no method '(.*?)'/
if missingMethodMatch
method = missingMethodMatch[1]
[closestMatch, closestMatchScore] = ['<NAME>', 0]
explained = false
for commonMethod in commonMethods
if method is commonMethod
m += ". (#{missingMethodMatch[1]} not available in this challenge.)"
explained = true
break
else if method.toLowerCase() is commonMethod.toLowerCase()
m = "#{method} should be #{commonMethod} because JavaScript is case-sensitive."
explained = true
break
else
matchScore = string_score?.score commonMethod, method, 0.5
if matchScore > closestMatchScore
[closestMatch, closestMatchScore] = [commonMethod, matchScore]
unless explained
if closestMatchScore > 0.25
m += ". (Did you mean #{closestMatch}?)"
m = m.replace 'TypeError:', 'Error:'
m
# Esprima Harmony's error messages track V8's
# https://github.com/ariya/esprima/blob/harmony/esprima.js#L194
# JSHint's error and warning messages
# https://github.com/jshint/jshint/blob/master/src/messages.js
| true | ranges = require './ranges'
module.exports.createUserCodeProblem = (options) ->
options ?= {}
options.aether ?= @ # Can either be called standalone or as an Aether method
if options.type is 'transpile' and options.error
extractTranspileErrorDetails options
if options.type is 'runtime'
extractRuntimeErrorDetails options
reporter = options.reporter or 'unknown' # Source of the problem, like 'jshint' or 'esprima' or 'aether'
kind = options.kind or 'Unknown' # Like 'W075' or 'InvalidLHSInAssignment'
id = reporter + '_' + kind # Uniquely identifies reporter + kind combination
config = options.aether?.options?.problems?[id] or {} # Default problem level/message/hint overrides
p = isUserCodeProblem: true
p.id = id
p.level = config.level or options.level or 'error' # 'error', 'warning', 'info'
p.type = options.type or 'generic' # Like 'runtime' or 'transpile', maybe later 'lint'
p.message = config.message or options.message or "Unknown #{p.type} #{p.level}" # Main error message (short phrase)
p.hint = config.hint or options.hint or '' # Additional details about error message (sentence)
p.range = options.range # Like [{ofs: 305, row: 15, col: 15}, {ofs: 312, row: 15, col: 22}], or null
p.userInfo = options.userInfo ? {} # Record extra information with the error here
p
extractTranspileErrorDetails = (options) ->
code = options.code or ''
codePrefix = options.codePrefix or 'function wrapped() {\n"use strict";\n'
error = options.error
options.message = error.message
originalLines = code.slice(codePrefix.length).split '\n'
lineOffset = codePrefix.split('\n').length - 1
# TODO: move these into language-specific plugins
switch options.reporter
when 'jshint'
options.message ?= error.reason
options.kind ?= error.code
unless options.level
options.level = {E: 'error', W: 'warning', I: 'info'}[error.code[0]]
line = error.line - codePrefix.split('\n').length
if line >= 0
if error.evidence?.length
startCol = originalLines[line].indexOf error.evidence
endCol = startCol + error.evidence.length
else
[startCol, endCol] = [0, originalLines[line].length - 1]
# TODO: no way this works; what am I doing with code prefixes?
options.range = [ranges.rowColToPos(line, startCol, code, codePrefix),
ranges.rowColToPos(line, endCol, code, codePrefix)]
else
# TODO: if we type an unmatched {, for example, then it thinks that line -2's function wrapped() { is unmatched...
# TODO: no way this works; what am I doing with code prefixes?
options.range = [ranges.offsetToPos(0, code, codePrefix),
ranges.offsetToPos(code.length - 1, code, codePrefix)]
when 'esprima'
# TODO: column range should extend to whole token. Mod Esprima, or extend to end of line?
# TODO: no way this works; what am I doing with code prefixes?
options.range = [ranges.rowColToPos(error.lineNumber - 1 - lineOffset, error.column - 1, code, codePrefix),
ranges.rowColToPos(error.lineNumber - 1 - lineOffset, error.column, code, codePrefix)]
when 'acorn_loose'
null
when 'csredux'
options.range = [ranges.rowColToPos(error.lineNumber - 1 - lineOffset, error.column - 1, code, codePrefix),
ranges.rowColToPos(error.lineNumber - 1 - lineOffset, error.column, code, codePrefix)]
when 'aether'
null
when 'closer'
if error.startOffset and error.endOffset
range = ranges.offsetsToRange(error.startOffset, error.endOffset, code)
options.range = [range.start, range.end]
when 'lua2js', 'luapegjs'
options.message ?= error.message
rng = ranges.offsetsToRange(error.offset, error.offset, code, '')
options.range = [rng.start, rng.end]
when 'filbert'
if error.loc
columnOffset = 0
columnOffset++ while originalLines[lineOffset - 2][columnOffset] is ' '
# filbert lines are 1-based, columns are 0-based
row = error.loc.line - lineOffset - 1
col = error.loc.column - columnOffset
start = ranges.rowColToPos(row, col, code)
end = ranges.rowColToPos(row, col + error.raisedAt - error.pos, code)
options.range = [start, end]
when 'iota'
null
else
console.warn "Unhandled UserCodeProblem reporter", options.reporter
options
extractRuntimeErrorDetails = (options) ->
if error = options.error
options.kind ?= error.name # I think this will pick up [Error, EvalError, RangeError, ReferenceError, SyntaxError, TypeError, URIError, DOMException]
options.message = explainErrorMessage error.message or error.toString()
options.hint = error.hint
options.level = error.level
options.userInfo = error.userInfo
options.range ?= options.aether?.lastStatementRange
if options.range
lineNumber = options.range[0].row + 1
if options.message.search(/^Line \d+/) != -1
options.message = options.message.replace /^Line \d+/, (match, n) -> "Line #{lineNumber}"
else
options.message = "Line #{lineNumber}: #{options.message}"
module.exports.commonMethods = commonMethods = ['moveRight', 'moveLeft', 'moveUp', 'moveDown', 'attackNearbyEnemy', 'say', 'move', 'attackNearestEnemy', 'shootAt', 'rotateTo', 'shoot', 'distance', 'getNearestEnemy', 'getEnemies', 'attack', 'setAction', 'setTarget', 'getFriends', 'patrol'] # TODO: should be part of user configuration
explainErrorMessage = (m) ->
if m is "RangeError: Maximum call stack size exceeded"
m += ". (Did you use call a function recursively?)"
missingMethodMatch = m.match /has no method '(.*?)'/
if missingMethodMatch
method = missingMethodMatch[1]
[closestMatch, closestMatchScore] = ['PI:NAME:<NAME>END_PI', 0]
explained = false
for commonMethod in commonMethods
if method is commonMethod
m += ". (#{missingMethodMatch[1]} not available in this challenge.)"
explained = true
break
else if method.toLowerCase() is commonMethod.toLowerCase()
m = "#{method} should be #{commonMethod} because JavaScript is case-sensitive."
explained = true
break
else
matchScore = string_score?.score commonMethod, method, 0.5
if matchScore > closestMatchScore
[closestMatch, closestMatchScore] = [commonMethod, matchScore]
unless explained
if closestMatchScore > 0.25
m += ". (Did you mean #{closestMatch}?)"
m = m.replace 'TypeError:', 'Error:'
m
# Esprima Harmony's error messages track V8's
# https://github.com/ariya/esprima/blob/harmony/esprima.js#L194
# JSHint's error and warning messages
# https://github.com/jshint/jshint/blob/master/src/messages.js
|
[
{
"context": "stance represents a Kaffee log message.\n\n @author Fabian M. <mail.fabianm@gmail.com>\n###\nclass LogEvent\n\n\t##",
"end": 86,
"score": 0.9951158761978149,
"start": 78,
"tag": "NAME",
"value": "Fabian M"
},
{
"context": "sents a Kaffee log message.\n\n @author Fabia... | kaffee/src/main/kaffee/event/logevent.coffee | fabianm/kaffee | 1 | ###
A {@link LogEvent} instance represents a Kaffee log message.
@author Fabian M. <mail.fabianm@gmail.com>
###
class LogEvent
###
Constructs a new {@link LogEvent}.
@since 0.3.0
@param manager The {@link EventManager} of this {@link LogEvent}.
@param level The level of this {@link LogEvent}.
@param message The message of this {@link LogEvent}.
@param stack The stacktrace.
@param callee The arguments.callee variable to provide more accurate stacktraces.
###
constructor: (@manager, @level, @message = "", @stack = "", callee = null) ->
this.message = message.message if message instanceof Error
this.stack ||= message.stack if message
unless this.stack
err = new Error this.message
err.name = ""
Error.captureStackTrace(err, callee || arguments.callee);
this.stack = err.stack
this.time = Date.now()
###
Returns the {@link EventManager} of this {@link LogEvent}.
@since 0.3.0
@return The {@link EventManager} of this {@link LogEvent}.
###
getEventManager: -> this.manager
###
Returns the message of this {@link LogEvent}.
@since 0.3.0
@return The message of this {@link LogEvent}.
###
getMessage: -> this.message
###
Sets the message of this {@link LogEvent}.
@since 0.3.0
@param message The message to set.
###
setMessage: (message) ->
this.message = message
this
###
Returns the level of this {@link LogEvent}.
@since 0.3.0
@return The level of this {@link LogEvent}.
###
getLevel: -> this.level
###
Sets the level of this {@link LogEvent}.
@since 0.3.0
@param level The level to set.
###
setLevel: (@level) -> this
###
Returns the time in milliseconds of this {@link LogEvent}.
@since 0.3.0
@return The time in milliseconds.
###
getTime: -> this.time
###
Sets the time in milliseconds of this {@link LogEvent}.
@since 0.3.0
@param time The time to set.
###
setTime: (@time) -> this.time = time
###
Determines if this {@link LogEvent} has a stack or not.
@since 0.3.0
@return <code>true</code> if this {@link LogEvent} has a stack, <code>false</code> otherwise.
###
hasStack: -> this.stack.length > 0
###
Returns the stack of this {@link LogEvent}.
@since 0.3.0
@return The stack of this {@link LogEvent}.
###
getStack: -> this.stack
###
Sets the stack of this {@link LogEvent}.
@since 0.3.0
@param stack The stack to set.
###
setStack: (@stack) -> this
module.exports = LogEvent
| 178923 | ###
A {@link LogEvent} instance represents a Kaffee log message.
@author <NAME>. <<EMAIL>>
###
class LogEvent
###
Constructs a new {@link LogEvent}.
@since 0.3.0
@param manager The {@link EventManager} of this {@link LogEvent}.
@param level The level of this {@link LogEvent}.
@param message The message of this {@link LogEvent}.
@param stack The stacktrace.
@param callee The arguments.callee variable to provide more accurate stacktraces.
###
constructor: (@manager, @level, @message = "", @stack = "", callee = null) ->
this.message = message.message if message instanceof Error
this.stack ||= message.stack if message
unless this.stack
err = new Error this.message
err.name = ""
Error.captureStackTrace(err, callee || arguments.callee);
this.stack = err.stack
this.time = Date.now()
###
Returns the {@link EventManager} of this {@link LogEvent}.
@since 0.3.0
@return The {@link EventManager} of this {@link LogEvent}.
###
getEventManager: -> this.manager
###
Returns the message of this {@link LogEvent}.
@since 0.3.0
@return The message of this {@link LogEvent}.
###
getMessage: -> this.message
###
Sets the message of this {@link LogEvent}.
@since 0.3.0
@param message The message to set.
###
setMessage: (message) ->
this.message = message
this
###
Returns the level of this {@link LogEvent}.
@since 0.3.0
@return The level of this {@link LogEvent}.
###
getLevel: -> this.level
###
Sets the level of this {@link LogEvent}.
@since 0.3.0
@param level The level to set.
###
setLevel: (@level) -> this
###
Returns the time in milliseconds of this {@link LogEvent}.
@since 0.3.0
@return The time in milliseconds.
###
getTime: -> this.time
###
Sets the time in milliseconds of this {@link LogEvent}.
@since 0.3.0
@param time The time to set.
###
setTime: (@time) -> this.time = time
###
Determines if this {@link LogEvent} has a stack or not.
@since 0.3.0
@return <code>true</code> if this {@link LogEvent} has a stack, <code>false</code> otherwise.
###
hasStack: -> this.stack.length > 0
###
Returns the stack of this {@link LogEvent}.
@since 0.3.0
@return The stack of this {@link LogEvent}.
###
getStack: -> this.stack
###
Sets the stack of this {@link LogEvent}.
@since 0.3.0
@param stack The stack to set.
###
setStack: (@stack) -> this
module.exports = LogEvent
| true | ###
A {@link LogEvent} instance represents a Kaffee log message.
@author PI:NAME:<NAME>END_PI. <PI:EMAIL:<EMAIL>END_PI>
###
class LogEvent
###
Constructs a new {@link LogEvent}.
@since 0.3.0
@param manager The {@link EventManager} of this {@link LogEvent}.
@param level The level of this {@link LogEvent}.
@param message The message of this {@link LogEvent}.
@param stack The stacktrace.
@param callee The arguments.callee variable to provide more accurate stacktraces.
###
constructor: (@manager, @level, @message = "", @stack = "", callee = null) ->
this.message = message.message if message instanceof Error
this.stack ||= message.stack if message
unless this.stack
err = new Error this.message
err.name = ""
Error.captureStackTrace(err, callee || arguments.callee);
this.stack = err.stack
this.time = Date.now()
###
Returns the {@link EventManager} of this {@link LogEvent}.
@since 0.3.0
@return The {@link EventManager} of this {@link LogEvent}.
###
getEventManager: -> this.manager
###
Returns the message of this {@link LogEvent}.
@since 0.3.0
@return The message of this {@link LogEvent}.
###
getMessage: -> this.message
###
Sets the message of this {@link LogEvent}.
@since 0.3.0
@param message The message to set.
###
setMessage: (message) ->
this.message = message
this
###
Returns the level of this {@link LogEvent}.
@since 0.3.0
@return The level of this {@link LogEvent}.
###
getLevel: -> this.level
###
Sets the level of this {@link LogEvent}.
@since 0.3.0
@param level The level to set.
###
setLevel: (@level) -> this
###
Returns the time in milliseconds of this {@link LogEvent}.
@since 0.3.0
@return The time in milliseconds.
###
getTime: -> this.time
###
Sets the time in milliseconds of this {@link LogEvent}.
@since 0.3.0
@param time The time to set.
###
setTime: (@time) -> this.time = time
###
Determines if this {@link LogEvent} has a stack or not.
@since 0.3.0
@return <code>true</code> if this {@link LogEvent} has a stack, <code>false</code> otherwise.
###
hasStack: -> this.stack.length > 0
###
Returns the stack of this {@link LogEvent}.
@since 0.3.0
@return The stack of this {@link LogEvent}.
###
getStack: -> this.stack
###
Sets the stack of this {@link LogEvent}.
@since 0.3.0
@param stack The stack to set.
###
setStack: (@stack) -> this
module.exports = LogEvent
|
[
{
"context": "modules building tools\n#\n# Copyright (C) 2012-2013 Nikolay Nemshilov\n#\n\nfs = require('fs')\npath = require('pat",
"end": 81,
"score": 0.9998869895935059,
"start": 64,
"tag": "NAME",
"value": "Nikolay Nemshilov"
}
] | cli/test/build.coffee | lovely-io/lovely.io-stl | 2 | #
# The test modules building tools
#
# Copyright (C) 2012-2013 Nikolay Nemshilov
#
fs = require('fs')
path = require('path')
server = require('./server')
source = require('../source')
packge = require('../package')
cur_dir = process.cwd()
packg = packge.read(cur_dir)
method = if process.argv.indexOf('--minify') > -1 then 'minify' else 'compile'
build = source[method](cur_dir)
server.set "/#{packg.name}.js", build
server.set "/#{packg.name}-auto-dummy.html", """
<html>
<head>
<script src="/core.js"></script>
<script type="text/javascript">
Lovely(['#{packg.name}'], function() {});
</script>
</head>
<body>
Dummy page for: '#{packg.name}'
</body>
</html>
"""
exports.build = build
#
# Creates a callback for the mocha `before` calls
# that automatically binds the module, load's its
# page, extracts the module object and returns it
# in the callback function
#
exports.load = (options, callback)->
if !callback
callback = options
options = {}
if typeof(options) is 'string'
url = options
options = {}
else
url = "/#{packg.name}-auto-dummy.html"
(done)->
server.get url, options, (browser)->
callback(browser.window.Lovely.module(packg.name), browser.window, browser)
done()
| 200939 | #
# The test modules building tools
#
# Copyright (C) 2012-2013 <NAME>
#
fs = require('fs')
path = require('path')
server = require('./server')
source = require('../source')
packge = require('../package')
cur_dir = process.cwd()
packg = packge.read(cur_dir)
method = if process.argv.indexOf('--minify') > -1 then 'minify' else 'compile'
build = source[method](cur_dir)
server.set "/#{packg.name}.js", build
server.set "/#{packg.name}-auto-dummy.html", """
<html>
<head>
<script src="/core.js"></script>
<script type="text/javascript">
Lovely(['#{packg.name}'], function() {});
</script>
</head>
<body>
Dummy page for: '#{packg.name}'
</body>
</html>
"""
exports.build = build
#
# Creates a callback for the mocha `before` calls
# that automatically binds the module, load's its
# page, extracts the module object and returns it
# in the callback function
#
exports.load = (options, callback)->
if !callback
callback = options
options = {}
if typeof(options) is 'string'
url = options
options = {}
else
url = "/#{packg.name}-auto-dummy.html"
(done)->
server.get url, options, (browser)->
callback(browser.window.Lovely.module(packg.name), browser.window, browser)
done()
| true | #
# The test modules building tools
#
# Copyright (C) 2012-2013 PI:NAME:<NAME>END_PI
#
fs = require('fs')
path = require('path')
server = require('./server')
source = require('../source')
packge = require('../package')
cur_dir = process.cwd()
packg = packge.read(cur_dir)
method = if process.argv.indexOf('--minify') > -1 then 'minify' else 'compile'
build = source[method](cur_dir)
server.set "/#{packg.name}.js", build
server.set "/#{packg.name}-auto-dummy.html", """
<html>
<head>
<script src="/core.js"></script>
<script type="text/javascript">
Lovely(['#{packg.name}'], function() {});
</script>
</head>
<body>
Dummy page for: '#{packg.name}'
</body>
</html>
"""
exports.build = build
#
# Creates a callback for the mocha `before` calls
# that automatically binds the module, load's its
# page, extracts the module object and returns it
# in the callback function
#
exports.load = (options, callback)->
if !callback
callback = options
options = {}
if typeof(options) is 'string'
url = options
options = {}
else
url = "/#{packg.name}-auto-dummy.html"
(done)->
server.get url, options, (browser)->
callback(browser.window.Lovely.module(packg.name), browser.window, browser)
done()
|
[
{
"context": " \"add\": [\n {\n \"name\": \"Humans Survive\",\n \"id\": \"humans-survive\",\n ",
"end": 210,
"score": 0.7530972957611084,
"start": 196,
"tag": "NAME",
"value": "Humans Survive"
},
{
"context": " \"name\": \"Humans Surv... | app/lib/scripts/defaultScripts.coffee | flauta/codecombat | 1 | module.exports = [
{
"id": "Add Default Goals",
"channel": "god:new-world-created",
"noteChain": [
{
"goals": {
"add": [
{
"name": "Humans Survive",
"id": "humans-survive",
"saveThangs": [
"humans"
],
"worldEndsAfter": 3,
"howMany": 1,
"hiddenGoal": true
},
{
"name": "Ogres Die",
"id": "ogres-die",
"killThangs": [
"ogres"
],
"worldEndsAfter": 3,
"hiddenGoal": true
}
]
}
}
]
}
]
# Could add other default scripts, like not having to redo Victory Playback sequence from scratch every time.
| 210290 | module.exports = [
{
"id": "Add Default Goals",
"channel": "god:new-world-created",
"noteChain": [
{
"goals": {
"add": [
{
"name": "<NAME>",
"id": "humans-survive",
"saveThangs": [
"humans"
],
"worldEndsAfter": 3,
"howMany": 1,
"hiddenGoal": true
},
{
"name": "<NAME>",
"id": "ogres-die",
"killThangs": [
"ogres"
],
"worldEndsAfter": 3,
"hiddenGoal": true
}
]
}
}
]
}
]
# Could add other default scripts, like not having to redo Victory Playback sequence from scratch every time.
| true | module.exports = [
{
"id": "Add Default Goals",
"channel": "god:new-world-created",
"noteChain": [
{
"goals": {
"add": [
{
"name": "PI:NAME:<NAME>END_PI",
"id": "humans-survive",
"saveThangs": [
"humans"
],
"worldEndsAfter": 3,
"howMany": 1,
"hiddenGoal": true
},
{
"name": "PI:NAME:<NAME>END_PI",
"id": "ogres-die",
"killThangs": [
"ogres"
],
"worldEndsAfter": 3,
"hiddenGoal": true
}
]
}
}
]
}
]
# Could add other default scripts, like not having to redo Victory Playback sequence from scratch every time.
|
[
{
"context": "ates.STEADY_STATE\n\n secretData.passwordData = passwordData\n return secretData\n updateProfile(profile",
"end": 825,
"score": 0.9976686239242554,
"start": 813,
"tag": "PASSWORD",
"value": "passwordData"
},
{
"context": "ata.token = token\n\n secretDa... | src/js/extension/profiles/state_manager.coffee | obi1kenobi/jester | 2 | async = require('../../../deps/async')
logger = require('../../lib/util/logging').logger(['ext', 'profiles', 'state'])
secureStore = require('../../lib/secure_store')
states = require('./states')
updateProfile = (profile, storePassword, updateSecretData, cb) ->
async.waterfall [
(done) ->
secureStore.getSecret(profile, storePassword, done)
(secretData, done) ->
publicData = secureStore.getPublic(profile)
secretData = updateSecretData(secretData)
secureStore.setProfile(profile, storePassword, publicData, secretData, done)
], cb
StateManager =
isUsingRandomPassword: (profile, storePassword, cb) ->
update = (secretData) ->
{passwordData} = secretData
passwordData.state = states.STEADY_STATE
secretData.passwordData = passwordData
return secretData
updateProfile(profile, storePassword, update, cb)
isCreatingToken: (profile, storePassword, token, cb) ->
update = (secretData) ->
{passwordData} = secretData
passwordData.state = states.CREATING_TOKEN
passwordData.token = token
secretData.passwordData = passwordData
return secretData
updateProfile(profile, storePassword, update, cb)
isUsingToken: (profile, storePassword, cb) ->
update = (secretData) ->
{passwordData} = secretData
passwordData.state = states.USING_TOKEN
secretData.passwordData = passwordData
return secretData
updateProfile(profile, storePassword, update, cb)
isRevokingToken: (profile, storePassword, newRandomPassword, cb) ->
update = (secretData) ->
{passwordData} = secretData
passwordData.state = states.REVOKING_TOKEN
passwordData.randomPassword = newRandomPassword
secretData.passwordData = passwordData
return secretData
updateProfile(profile, storePassword, update, cb)
isInvalid: (profile, storePassword, cb) ->
update = (secretData) ->
{passwordData} = secretData
passwordData.state = states.INVALID
secretData.passwordData = passwordData
return secretData
updateProfile(profile, storePassword, update, cb)
isInitializing: (profile, storePassword, service, publicData, username, \
userPassword, randomPassword, cb) ->
passwordData = {userPassword, randomPassword, state: states.INITIALIZING}
profileData = {service, username, passwordData}
secureStore.setProfile(profile, storePassword, publicData, profileData, cb)
isFinishedRepair: (profile, storePassword, service, publicData, username, \
userPassword, randomPassword, cb) ->
passwordData = {userPassword, randomPassword, state: states.STEADY_STATE}
profileData = {service, username, passwordData}
secureStore.setProfile(profile, storePassword, publicData, profileData, cb)
module.exports = StateManager
| 129659 | async = require('../../../deps/async')
logger = require('../../lib/util/logging').logger(['ext', 'profiles', 'state'])
secureStore = require('../../lib/secure_store')
states = require('./states')
updateProfile = (profile, storePassword, updateSecretData, cb) ->
async.waterfall [
(done) ->
secureStore.getSecret(profile, storePassword, done)
(secretData, done) ->
publicData = secureStore.getPublic(profile)
secretData = updateSecretData(secretData)
secureStore.setProfile(profile, storePassword, publicData, secretData, done)
], cb
StateManager =
isUsingRandomPassword: (profile, storePassword, cb) ->
update = (secretData) ->
{passwordData} = secretData
passwordData.state = states.STEADY_STATE
secretData.passwordData = <PASSWORD>
return secretData
updateProfile(profile, storePassword, update, cb)
isCreatingToken: (profile, storePassword, token, cb) ->
update = (secretData) ->
{passwordData} = secretData
passwordData.state = states.CREATING_TOKEN
passwordData.token = token
secretData.passwordData = <PASSWORD>
return secretData
updateProfile(profile, storePassword, update, cb)
isUsingToken: (profile, storePassword, cb) ->
update = (secretData) ->
{passwordData} = secretData
passwordData.state = states.USING_TOKEN
secretData.passwordData = <PASSWORD>
return secretData
updateProfile(profile, storePassword, update, cb)
isRevokingToken: (profile, storePassword, newRandomPassword, cb) ->
update = (secretData) ->
{passwordData} = secretData
passwordData.state = states.REVOKING_TOKEN
passwordData.randomPassword = <PASSWORD>
secretData.passwordData = <PASSWORD>
return secretData
updateProfile(profile, storePassword, update, cb)
isInvalid: (profile, storePassword, cb) ->
update = (secretData) ->
{passwordData} = secretData
passwordData.state = states.INVALID
secretData.passwordData = <PASSWORD>
return secretData
updateProfile(profile, storePassword, update, cb)
isInitializing: (profile, storePassword, service, publicData, username, \
userPassword, randomPassword, cb) ->
passwordData = {<PASSWORD>, <PASSWORD>, state: states.INITIALIZING}
profileData = {service, username, passwordData}
secureStore.setProfile(profile, storePassword, publicData, profileData, cb)
isFinishedRepair: (profile, storePassword, service, publicData, username, \
userPassword, randomPassword, cb) ->
passwordData = {<PASSWORD>, <PASSWORD>, state: states.STEADY_STATE}
profileData = {service, username, passwordData}
secureStore.setProfile(profile, storePassword, publicData, profileData, cb)
module.exports = StateManager
| true | async = require('../../../deps/async')
logger = require('../../lib/util/logging').logger(['ext', 'profiles', 'state'])
secureStore = require('../../lib/secure_store')
states = require('./states')
updateProfile = (profile, storePassword, updateSecretData, cb) ->
async.waterfall [
(done) ->
secureStore.getSecret(profile, storePassword, done)
(secretData, done) ->
publicData = secureStore.getPublic(profile)
secretData = updateSecretData(secretData)
secureStore.setProfile(profile, storePassword, publicData, secretData, done)
], cb
StateManager =
isUsingRandomPassword: (profile, storePassword, cb) ->
update = (secretData) ->
{passwordData} = secretData
passwordData.state = states.STEADY_STATE
secretData.passwordData = PI:PASSWORD:<PASSWORD>END_PI
return secretData
updateProfile(profile, storePassword, update, cb)
isCreatingToken: (profile, storePassword, token, cb) ->
update = (secretData) ->
{passwordData} = secretData
passwordData.state = states.CREATING_TOKEN
passwordData.token = token
secretData.passwordData = PI:PASSWORD:<PASSWORD>END_PI
return secretData
updateProfile(profile, storePassword, update, cb)
isUsingToken: (profile, storePassword, cb) ->
update = (secretData) ->
{passwordData} = secretData
passwordData.state = states.USING_TOKEN
secretData.passwordData = PI:PASSWORD:<PASSWORD>END_PI
return secretData
updateProfile(profile, storePassword, update, cb)
isRevokingToken: (profile, storePassword, newRandomPassword, cb) ->
update = (secretData) ->
{passwordData} = secretData
passwordData.state = states.REVOKING_TOKEN
passwordData.randomPassword = PI:PASSWORD:<PASSWORD>END_PI
secretData.passwordData = PI:PASSWORD:<PASSWORD>END_PI
return secretData
updateProfile(profile, storePassword, update, cb)
isInvalid: (profile, storePassword, cb) ->
update = (secretData) ->
{passwordData} = secretData
passwordData.state = states.INVALID
secretData.passwordData = PI:PASSWORD:<PASSWORD>END_PI
return secretData
updateProfile(profile, storePassword, update, cb)
isInitializing: (profile, storePassword, service, publicData, username, \
userPassword, randomPassword, cb) ->
passwordData = {PI:PASSWORD:<PASSWORD>END_PI, PI:PASSWORD:<PASSWORD>END_PI, state: states.INITIALIZING}
profileData = {service, username, passwordData}
secureStore.setProfile(profile, storePassword, publicData, profileData, cb)
isFinishedRepair: (profile, storePassword, service, publicData, username, \
userPassword, randomPassword, cb) ->
passwordData = {PI:PASSWORD:<PASSWORD>END_PI, PI:PASSWORD:<PASSWORD>END_PI, state: states.STEADY_STATE}
profileData = {service, username, passwordData}
secureStore.setProfile(profile, storePassword, publicData, profileData, cb)
module.exports = StateManager
|
[
{
"context": "$(document).ready ->\n\tkey = 'CTRL'\n\t# key = 'Cmd' if navigator.userAgent.toLowerCas",
"end": 33,
"score": 0.9965521097183228,
"start": 29,
"tag": "KEY",
"value": "CTRL"
},
{
"context": "$(document).ready ->\n\tkey = 'CTRL'\n\t# key = 'Cmd' if navigator.userAgent.toL... | webroot/coffee/main.coffee | sitex/theme_olia | 0 | $(document).ready ->
key = 'CTRL'
# key = 'Cmd' if navigator.userAgent.toLowerCase().indexOf('mac') != - 1
$('#bookmarkme').click ->
if window.sidebar && window.sidebar.addPanel
window.sidebar.addPanel(document.title,window.location.href,'')
else if window.external && ('AddFavorite' in window.external) # IE Favorite
window.external.AddFavorite(location.href,document.title)
else if(window.opera && window.print) # Opera Hotlist
this.title=document.title;
return true;
else # webkit - safari/chrome
alert('Нажмите ' + key + ' + D, чтобы добавить в избранное.');
$('.names a').click ->
rel = $(this).data('rel')
$('.reviewer').hide()
$('.names a').removeClass('initial')
$('.names a').removeClass('active')
$(this).addClass('active')
$(rel).show()
$('#ob_overlay').show()
$('.ytvideo').hide() # video
false
$('a.close_reviewer, #ob_overlay').click ->
$('.names a').removeClass('active')
$('.names a').addClass('initial')
$('.reviewer').hide()
$('#ob_overlay').hide()
$('.ytvideo').show() # video
false
| 64683 | $(document).ready ->
key = '<KEY>'
# key = '<KEY>' if navigator.userAgent.toLowerCase().indexOf('mac') != - 1
$('#bookmarkme').click ->
if window.sidebar && window.sidebar.addPanel
window.sidebar.addPanel(document.title,window.location.href,'')
else if window.external && ('AddFavorite' in window.external) # IE Favorite
window.external.AddFavorite(location.href,document.title)
else if(window.opera && window.print) # Opera Hotlist
this.title=document.title;
return true;
else # webkit - safari/chrome
alert('Нажмите ' + key + ' + D, чтобы добавить в избранное.');
$('.names a').click ->
rel = $(this).data('rel')
$('.reviewer').hide()
$('.names a').removeClass('initial')
$('.names a').removeClass('active')
$(this).addClass('active')
$(rel).show()
$('#ob_overlay').show()
$('.ytvideo').hide() # video
false
$('a.close_reviewer, #ob_overlay').click ->
$('.names a').removeClass('active')
$('.names a').addClass('initial')
$('.reviewer').hide()
$('#ob_overlay').hide()
$('.ytvideo').show() # video
false
| true | $(document).ready ->
key = 'PI:KEY:<KEY>END_PI'
# key = 'PI:KEY:<KEY>END_PI' if navigator.userAgent.toLowerCase().indexOf('mac') != - 1
$('#bookmarkme').click ->
if window.sidebar && window.sidebar.addPanel
window.sidebar.addPanel(document.title,window.location.href,'')
else if window.external && ('AddFavorite' in window.external) # IE Favorite
window.external.AddFavorite(location.href,document.title)
else if(window.opera && window.print) # Opera Hotlist
this.title=document.title;
return true;
else # webkit - safari/chrome
alert('Нажмите ' + key + ' + D, чтобы добавить в избранное.');
$('.names a').click ->
rel = $(this).data('rel')
$('.reviewer').hide()
$('.names a').removeClass('initial')
$('.names a').removeClass('active')
$(this).addClass('active')
$(rel).show()
$('#ob_overlay').show()
$('.ytvideo').hide() # video
false
$('a.close_reviewer, #ob_overlay').click ->
$('.names a').removeClass('active')
$('.names a').addClass('initial')
$('.reviewer').hide()
$('#ob_overlay').hide()
$('.ytvideo').show() # video
false
|
[
{
"context": "s not on the schema', (done) ->\n doc = {name: 'wallace', dog: 'gromit'}\n wongo.save 'MockStrict', doc",
"end": 464,
"score": 0.9923456907272339,
"start": 457,
"tag": "NAME",
"value": "wallace"
},
{
"context": "ert.ifError(err)\n assert.equal(result.name, 'w... | test/strict.test.coffee | wookets/wongo | 0 | assert = require 'assert'
wongo = require '../lib/wongo'
wongo.schema 'MockStrict',
fields:
name: String # simplest property
mixed: {type: 'mixed'} # support mixed types
child:
name: String
grandchild:
name: String
children: [
name: String
grandchildren: [
name: String
]
]
describe 'Wongo Strict', ->
it 'should prune any values not on the schema', (done) ->
doc = {name: 'wallace', dog: 'gromit'}
wongo.save 'MockStrict', doc, (err, result) ->
assert.ifError(err)
assert.equal(result.name, 'wallace')
assert.ok(not result.dog)
done()
it 'should ignore if something is put into a mixed type property', (done) ->
doc = {name: 'meow', mixed: 'stinger'}
wongo.save 'MockStrict', doc, (err, result) ->
assert.ifError(err)
assert.equal(result.name, 'meow')
assert.equal(result.mixed, 'stinger')
done()
it 'should prune any values not on a child schema', (done) ->
doc = {name: 'wallace', child: {name: 'pete', dog: 'grommit'}}
wongo.save 'MockStrict', doc, (err, result) ->
assert.ifError(err)
assert.equal(result.child.name, 'pete')
assert.ok(not result.child.dog)
done()
it 'should prune any values not on a grandchild schema', (done) ->
doc = {name: 'wallace', child: {name: 'pete', grandchild: {name: 'bum', dog: 'buca'}}}
wongo.save 'MockStrict', doc, (err, result) ->
assert.ifError(err)
assert.equal(result.child.grandchild.name, 'bum')
assert.ok(not result.child.grandchild.dog)
done()
it 'should prune any values not on a children schema', (done) ->
doc = {name: 'wallace', children: [{name: 'bum', dog: 'meow'}]}
wongo.save 'MockStrict', doc, (err, result) ->
assert.ifError(err)
assert.equal(result.children[0].name, 'bum')
assert.ok(not result.children[0].dog)
done() | 114457 | assert = require 'assert'
wongo = require '../lib/wongo'
wongo.schema 'MockStrict',
fields:
name: String # simplest property
mixed: {type: 'mixed'} # support mixed types
child:
name: String
grandchild:
name: String
children: [
name: String
grandchildren: [
name: String
]
]
describe 'Wongo Strict', ->
it 'should prune any values not on the schema', (done) ->
doc = {name: '<NAME>', dog: 'gromit'}
wongo.save 'MockStrict', doc, (err, result) ->
assert.ifError(err)
assert.equal(result.name, '<NAME>')
assert.ok(not result.dog)
done()
it 'should ignore if something is put into a mixed type property', (done) ->
doc = {name: '<NAME>', mixed: 'stinger'}
wongo.save 'MockStrict', doc, (err, result) ->
assert.ifError(err)
assert.equal(result.name, '<NAME>')
assert.equal(result.mixed, 'stinger')
done()
it 'should prune any values not on a child schema', (done) ->
doc = {name: '<NAME>', child: {name: '<NAME>', dog: 'grommit'}}
wongo.save 'MockStrict', doc, (err, result) ->
assert.ifError(err)
assert.equal(result.child.name, '<NAME>')
assert.ok(not result.child.dog)
done()
it 'should prune any values not on a grandchild schema', (done) ->
doc = {name: '<NAME>', child: {name: '<NAME>', grandchild: {name: '<NAME>', dog: 'buca'}}}
wongo.save 'MockStrict', doc, (err, result) ->
assert.ifError(err)
assert.equal(result.child.grandchild.name, '<NAME>')
assert.ok(not result.child.grandchild.dog)
done()
it 'should prune any values not on a children schema', (done) ->
doc = {name: '<NAME>', children: [{name: 'bum', dog: 'meow'}]}
wongo.save 'MockStrict', doc, (err, result) ->
assert.ifError(err)
assert.equal(result.children[0].name, 'bum')
assert.ok(not result.children[0].dog)
done() | true | assert = require 'assert'
wongo = require '../lib/wongo'
wongo.schema 'MockStrict',
fields:
name: String # simplest property
mixed: {type: 'mixed'} # support mixed types
child:
name: String
grandchild:
name: String
children: [
name: String
grandchildren: [
name: String
]
]
describe 'Wongo Strict', ->
it 'should prune any values not on the schema', (done) ->
doc = {name: 'PI:NAME:<NAME>END_PI', dog: 'gromit'}
wongo.save 'MockStrict', doc, (err, result) ->
assert.ifError(err)
assert.equal(result.name, 'PI:NAME:<NAME>END_PI')
assert.ok(not result.dog)
done()
it 'should ignore if something is put into a mixed type property', (done) ->
doc = {name: 'PI:NAME:<NAME>END_PI', mixed: 'stinger'}
wongo.save 'MockStrict', doc, (err, result) ->
assert.ifError(err)
assert.equal(result.name, 'PI:NAME:<NAME>END_PI')
assert.equal(result.mixed, 'stinger')
done()
it 'should prune any values not on a child schema', (done) ->
doc = {name: 'PI:NAME:<NAME>END_PI', child: {name: 'PI:NAME:<NAME>END_PI', dog: 'grommit'}}
wongo.save 'MockStrict', doc, (err, result) ->
assert.ifError(err)
assert.equal(result.child.name, 'PI:NAME:<NAME>END_PI')
assert.ok(not result.child.dog)
done()
it 'should prune any values not on a grandchild schema', (done) ->
doc = {name: 'PI:NAME:<NAME>END_PI', child: {name: 'PI:NAME:<NAME>END_PI', grandchild: {name: 'PI:NAME:<NAME>END_PI', dog: 'buca'}}}
wongo.save 'MockStrict', doc, (err, result) ->
assert.ifError(err)
assert.equal(result.child.grandchild.name, 'PI:NAME:<NAME>END_PI')
assert.ok(not result.child.grandchild.dog)
done()
it 'should prune any values not on a children schema', (done) ->
doc = {name: 'PI:NAME:<NAME>END_PI', children: [{name: 'bum', dog: 'meow'}]}
wongo.save 'MockStrict', doc, (err, result) ->
assert.ifError(err)
assert.equal(result.children[0].name, 'bum')
assert.ok(not result.children[0].dog)
done() |
[
{
"context": "ub.io/grunt-docc-multi\n#\n# Copyright (©) 2013 Joe Mathews (abovethewater)\n# Licensed under the [MIT](http:/",
"end": 112,
"score": 0.9998605847358704,
"start": 101,
"tag": "NAME",
"value": "Joe Mathews"
}
] | tasks/docco.coffee | gruntjs-updater/grunt-docco-multi | 0 | #
# grunt-docco-multi
# https://abovethewater.github.io/grunt-docc-multi
#
# Copyright (©) 2013 Joe Mathews (abovethewater)
# Licensed under the [MIT](http://abovethewater.mit-license.org) licence
#
module.exports = (grunt) ->
docco = require 'docco'
grunt.registerMultiTask 'docco', 'Generate multiple linked docs with docco', () ->
files = this.filesSrc
if not this.filesSrc or this.filesSrc.length is 0
grunt.log.warn 'no files to docco.'
return
options = this.options({ args : files})
timeout = options.timeout or 500
# docco is async due to use of mkdir -p
done = this.async()
docco.document options, () ->
grunt.log.ok "done"
# todo having now looked at grunt-docco, that seems to work with a callback, but it just does not wash here
setTimeout () ->
grunt.log.ok files.length + ' file' + (if files.length is 1 then '' else 's') + ' doccoed.'
done()
, timeout
| 118106 | #
# grunt-docco-multi
# https://abovethewater.github.io/grunt-docc-multi
#
# Copyright (©) 2013 <NAME> (abovethewater)
# Licensed under the [MIT](http://abovethewater.mit-license.org) licence
#
module.exports = (grunt) ->
docco = require 'docco'
grunt.registerMultiTask 'docco', 'Generate multiple linked docs with docco', () ->
files = this.filesSrc
if not this.filesSrc or this.filesSrc.length is 0
grunt.log.warn 'no files to docco.'
return
options = this.options({ args : files})
timeout = options.timeout or 500
# docco is async due to use of mkdir -p
done = this.async()
docco.document options, () ->
grunt.log.ok "done"
# todo having now looked at grunt-docco, that seems to work with a callback, but it just does not wash here
setTimeout () ->
grunt.log.ok files.length + ' file' + (if files.length is 1 then '' else 's') + ' doccoed.'
done()
, timeout
| true | #
# grunt-docco-multi
# https://abovethewater.github.io/grunt-docc-multi
#
# Copyright (©) 2013 PI:NAME:<NAME>END_PI (abovethewater)
# Licensed under the [MIT](http://abovethewater.mit-license.org) licence
#
module.exports = (grunt) ->
docco = require 'docco'
grunt.registerMultiTask 'docco', 'Generate multiple linked docs with docco', () ->
files = this.filesSrc
if not this.filesSrc or this.filesSrc.length is 0
grunt.log.warn 'no files to docco.'
return
options = this.options({ args : files})
timeout = options.timeout or 500
# docco is async due to use of mkdir -p
done = this.async()
docco.document options, () ->
grunt.log.ok "done"
# todo having now looked at grunt-docco, that seems to work with a callback, but it just does not wash here
setTimeout () ->
grunt.log.ok files.length + ' file' + (if files.length is 1 then '' else 's') + ' doccoed.'
done()
, timeout
|
[
{
"context": "= cityToLevel city\n\n @emit 'data',\n key: \"cityid:#{city.cityId}\"\n value: val\n\n quad = quadtre",
"end": 301,
"score": 0.9714847207069397,
"start": 292,
"tag": "KEY",
"value": "cityid:#{"
},
{
"context": "city\n\n @emit 'data',\n key: \"cit... | src/setup/transform/cityToKeyedRecords.coffee | brianshaler/kerplunk-place | 0 | es = require 'event-stream'
cityToLevel = require './cityToLevel'
quadtree = require '../../quadtree'
module.exports = ->
es.through (city) ->
name = city.asciiName
unless name? and name.length > 0
name = city.name
val = cityToLevel city
@emit 'data',
key: "cityid:#{city.cityId}"
value: val
quad = quadtree city.location, 12
@emit 'data',
key: "ll:#{quad}#{city.cityId}"
value: "cityid:#{city.cityId}"
if name and name.length > 0
# add uniqueness to the end
@emit 'data',
key: "nm:#{name.toLowerCase() + city.cityId}"
value: val
| 193669 | es = require 'event-stream'
cityToLevel = require './cityToLevel'
quadtree = require '../../quadtree'
module.exports = ->
es.through (city) ->
name = city.asciiName
unless name? and name.length > 0
name = city.name
val = cityToLevel city
@emit 'data',
key: "<KEY>city.<KEY>
value: val
quad = quadtree city.location, 12
@emit 'data',
key: "<KEY>
value: "cityid:#{city.cityId}"
if name and name.length > 0
# add uniqueness to the end
@emit 'data',
key: "<KEY>
value: val
| true | es = require 'event-stream'
cityToLevel = require './cityToLevel'
quadtree = require '../../quadtree'
module.exports = ->
es.through (city) ->
name = city.asciiName
unless name? and name.length > 0
name = city.name
val = cityToLevel city
@emit 'data',
key: "PI:KEY:<KEY>END_PIcity.PI:KEY:<KEY>END_PI
value: val
quad = quadtree city.location, 12
@emit 'data',
key: "PI:KEY:<KEY>END_PI
value: "cityid:#{city.cityId}"
if name and name.length > 0
# add uniqueness to the end
@emit 'data',
key: "PI:KEY:<KEY>END_PI
value: val
|
[
{
"context": " keyKey = current.key\n else\n keyKey = 'name'\n\n key = node.attributes[keyKey]\n\n if not k",
"end": 581,
"score": 0.6806005835533142,
"start": 577,
"tag": "KEY",
"value": "name"
},
{
"context": "ttributes[keyKey]\n\n if not key\n key = node.n... | src/eveapi/parser.coffee | sseemayer/NEOW | 12 | Q = require 'q'
_ = require 'lodash'
sax = require 'sax'
root = exports ? this
root.parse = (data, strict=true) ->
deferred = Q.defer()
parser = sax.createStream strict, trim: true
result = {}
stack = []
current = {element: result, name: null}
down = (newframe) ->
stack.push(newframe)
newframe
up = ->
head = stack.pop()
peek()
peek = ->
stack[stack.length - 1]
parser.on 'error', (err) ->
deferred.reject(err)
parser.on 'opentag', (node) ->
if node.name == 'row'
keyKey = current.key
else
keyKey = 'name'
key = node.attributes[keyKey]
if not key
key = node.name
if not current.element[key]
current.element[key] = {}
if node.name != 'rowset'
_.merge current.element[key], node.attributes
current = down {
element: current.element[key]
name: node.name
key: node.attributes.key
}
parser.on 'closetag', (tagName) ->
head = peek()
if not head then return
if head.name == tagName
current = up()
parser.on 'text', (text) ->
current.element.content = text
parser.on 'end', ->
if not result.eveapi
return deferred.reject new Error "Malformed XML reply!"
if result.eveapi.version != '2'
return deferred.reject new Error "Wrong eveapi version!"
if result.eveapi.error
return deferred.reject new Error result.eveapi.error.content
res = result.eveapi.result
res.currentTime = result.eveapi.currentTime.content
res.cachedUntil = result.eveapi.cachedUntil.content
deferred.resolve res
if data.pipe
data.pipe(parser)
else
parser.write(data)
parser.end()
deferred.promise
| 16325 | Q = require 'q'
_ = require 'lodash'
sax = require 'sax'
root = exports ? this
root.parse = (data, strict=true) ->
deferred = Q.defer()
parser = sax.createStream strict, trim: true
result = {}
stack = []
current = {element: result, name: null}
down = (newframe) ->
stack.push(newframe)
newframe
up = ->
head = stack.pop()
peek()
peek = ->
stack[stack.length - 1]
parser.on 'error', (err) ->
deferred.reject(err)
parser.on 'opentag', (node) ->
if node.name == 'row'
keyKey = current.key
else
keyKey = '<KEY>'
key = node.attributes[keyKey]
if not key
key = node<KEY>.name
if not current.element[key]
current.element[key] = {}
if node.name != 'rowset'
_.merge current.element[key], node.attributes
current = down {
element: current.element[key]
name: node.name
key: node.attributes.key
}
parser.on 'closetag', (tagName) ->
head = peek()
if not head then return
if head.name == tagName
current = up()
parser.on 'text', (text) ->
current.element.content = text
parser.on 'end', ->
if not result.eveapi
return deferred.reject new Error "Malformed XML reply!"
if result.eveapi.version != '2'
return deferred.reject new Error "Wrong eveapi version!"
if result.eveapi.error
return deferred.reject new Error result.eveapi.error.content
res = result.eveapi.result
res.currentTime = result.eveapi.currentTime.content
res.cachedUntil = result.eveapi.cachedUntil.content
deferred.resolve res
if data.pipe
data.pipe(parser)
else
parser.write(data)
parser.end()
deferred.promise
| true | Q = require 'q'
_ = require 'lodash'
sax = require 'sax'
root = exports ? this
root.parse = (data, strict=true) ->
deferred = Q.defer()
parser = sax.createStream strict, trim: true
result = {}
stack = []
current = {element: result, name: null}
down = (newframe) ->
stack.push(newframe)
newframe
up = ->
head = stack.pop()
peek()
peek = ->
stack[stack.length - 1]
parser.on 'error', (err) ->
deferred.reject(err)
parser.on 'opentag', (node) ->
if node.name == 'row'
keyKey = current.key
else
keyKey = 'PI:KEY:<KEY>END_PI'
key = node.attributes[keyKey]
if not key
key = nodePI:KEY:<KEY>END_PI.name
if not current.element[key]
current.element[key] = {}
if node.name != 'rowset'
_.merge current.element[key], node.attributes
current = down {
element: current.element[key]
name: node.name
key: node.attributes.key
}
parser.on 'closetag', (tagName) ->
head = peek()
if not head then return
if head.name == tagName
current = up()
parser.on 'text', (text) ->
current.element.content = text
parser.on 'end', ->
if not result.eveapi
return deferred.reject new Error "Malformed XML reply!"
if result.eveapi.version != '2'
return deferred.reject new Error "Wrong eveapi version!"
if result.eveapi.error
return deferred.reject new Error result.eveapi.error.content
res = result.eveapi.result
res.currentTime = result.eveapi.currentTime.content
res.cachedUntil = result.eveapi.cachedUntil.content
deferred.resolve res
if data.pipe
data.pipe(parser)
else
parser.write(data)
parser.end()
deferred.promise
|
[
{
"context": "######################\n# Copyright (c) 2013, 2014, William Stein and R. Andrew Ohana\n# All rights reserved.\n#\n# Re",
"end": 121,
"score": 0.9998456239700317,
"start": 108,
"tag": "NAME",
"value": "William Stein"
},
{
"context": "####\n# Copyright (c) 2013, 2014, Wil... | src/math3d.coffee | ohanar/math3d.js | 3 | ###############################################################################
# Copyright (c) 2013, 2014, William Stein and R. Andrew Ohana
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
###############################################################################
if not document?
document = @document
math3d = {}
trunc = (str, max_length) ->
if not str?.length?
return str
if not max_length?
max_length = 1024
if str.length > max_length then str[0...max_length-3] + "..." else str
# Returns a new object with properties determined by those of opts and
# base. The properties in opts *must* all also appear in base. If an
# base property has value "defaults.required", then it must appear in
# opts. For each property prop of base not specified in opts, the
# corresponding value opts[prop] is set (all in a new copy of opts) to
# be base[prop].
defaults = (opts, base, allow_extra = false) ->
if not opts?
opts = {}
args = ->
try
"(opts=#{trunc JSON.stringify opts}, base=#{trunc JSON.stringify base})"
catch
""
if typeof opts isnt 'object'
# We put explicit traces before the errors in this function,
# since otherwise they can be very hard to debug.
console.trace()
throw "defaults -- TypeError: function takes inputs as an object #{args()}"
optsHasProp = (prop) -> opts.hasOwnProperty and opts[prop]?
res = {}
for prop, val of base
if val? and not optsHasProp(prop) # only record not undefined properties
if val is defaults.required
console.trace()
throw "defaults -- TypeError: property '#{prop}' must be specified #{args()}"
else
res[prop] = val
for prop, val of opts
if not (allow_extra or base.hasOwnProperty(prop))
console.trace()
throw "defaults -- TypeError: got an unexpected argument '#{prop}' #{args()}"
if val? # only record not undefined properties
res[prop] = val
return res
# WARNING -- don't accidentally use this as a default:
required = defaults.required = "__!!!!!!this is a required property!!!!!!__"
removeElement = (element) ->
if element? and (parent = element.parentElement)?
parent.removeChild element
loadScript = (script_src, callback) ->
runCallback = true
script = document.createElement 'script'
script.onload = ->
removeElement script
if runCallback
runCallback = false
callback()
script.onerror = ->
removeElement script
if runCallback
runCallback = false
callback "error loading script #{script.src}"
script.type = 'text/javascript'
script.charset = 'utf-8'
script.async = true
script.src = script_src
document.head.appendChild script
_loadingThreejsCallbacks = []
_orbitControlsSetup = false
math3d.threejsSource = "//cdnjs.cloudflare.com/ajax/libs/three.js/r68/three.min.js"
math3d.fontSources = []
math3d.loadThreejs = (callback) ->
if THREE? and _orbitControlsSetup and not math3d.fontSources.length
return callback()
_loadingThreejsCallbacks.push callback
if _loadingThreejsCallbacks.length > 1
return
runCallbacks = (error) ->
while callback = _loadingThreejsCallbacks.shift()
callback error
setupOrbitControls = (callback) ->
if not _orbitControlsSetup
OrbitControls.prototype = Object.create THREE.EventDispatcher.prototype
_orbitControlsSetup = true
callback()
setupFonts = (callback) ->
load_helvetiker_regular?()
if math3d.fontSources.length
loadScript math3d.fontSources.shift(), (error) ->
if error
runCallbacks error
else
setupFonts callback
else
callback()
setupThreejs = (callback) ->
if THREE?
callback()
else
loadScript math3d.threejsSource, (error) ->
if error
runCallbacks error
else
callback()
setupThreejs (-> setupFonts (-> setupOrbitControls runCallbacks))
_sceneUsingRenderer = undefined
_renderer = {}
# get the best-possible THREE.js renderer (once and for all)
# based on Detector.js's webgl detection
try
if @WebGLRenderingContext
canvas = document.createElement 'canvas'
if canvas.getContext('webgl') or canvas.getContext('experimental-webgl')
_defaultRendererType = 'webgl'
if not _defaultRendererType?
_defaultRendererType = 'canvas'
getRenderer = (scene, type) ->
# if there is a scene currently using this renderer, tell it to switch to
# the static renderer.
if _sceneUsingRenderer? and _sceneUsingRenderer isnt scene
_sceneUsingRenderer.setStaticRenderer()
# now scene takes over using this renderer
_sceneUsingRenderer = scene
if not _renderer[type]?
switch type
when 'webgl'
_renderer[type] = new THREE.WebGLRenderer
antialias : true
alpha : true
preserveDrawingBuffer : true
when 'canvas'
_renderer[type] = new THREE.CanvasRenderer
antialias : true
alpha : true
else
throw "bug -- unkown dynamic renderer type = #{type}"
_renderer[type].domElement.className = 'math-3d-dynamic-renderer'
_renderer[type]
class Math3dThreeJS
constructor: (opts) ->
@opts = defaults opts,
parent : required
width : undefined
height : undefined
renderer : _defaultRendererType # 'webgl' or 'canvas' (defaults to a best guess)
background : [1, 1, 1]
spin : false # if true, image spins by itself when mouse is over it.
aspect_ratio : [1, 1, 1] # a triple [x,y,z] of length three, which scales the x,y,z coordinates of everything by the given values.
stop_when_gone : undefined # if given, animation, etc., stops when this html element (not jquery!) is no longer in the DOM
fast_points : false # if true will use a faster point implementation, but they will be square and not work without WebGL
frame : undefined # frame options
light : undefined # light options
callback : undefined # opts.callback(this object, error)
@frameOpts = defaults @opts.frame,
color : undefined # defaults to the color-wise negation of the background
thickness : .4 # zero thickness disables the frame
labels : true # whether or not to enable labels on the axes
fontface : undefined # defaults to default fontface
@lightOpts = defaults @opts.light,
color : [1, 1, 1]
intensity : 0.75
math3d.loadThreejs (error) =>
if error
return @opts.callback? undefined, error
if @_init
return @opts.callback? @
@_init = true
# IMPORTANT: There is a major bug in three.js -- if you make the width below more than .5 of the window
# width, then after 8 3d renders, things get foobared in WebGL mode. This happens even with the simplest
# demo using the basic cube example from their site with R68. It even sometimes happens with this workaround, but
# at least retrying a few times can fix it.
@opts.width ?= document.documentElement.clientWidth/2
@opts.height ?= @opts.width*2/3
# setup aspect ratio stuff
aspectRatio = @aspectRatio = new THREE.Vector3 @opts.aspect_ratio...
@scaleSize = @aspectRatio.length()
@squareScale = (new THREE.Vector3 1, 1, 1).normalize()
@squareScale.multiplyScalar @scaleSize
@rescale = (vector) -> vector.multiply aspectRatio
# initialize the scene
@scene = new THREE.Scene()
# functions in render hooks will be run when before rendering
@renderHooks = []
@opts.callback? @
# client code should call this when done adding objects to the scene
finalize: ->
@computeDimensions()
@element = document.createElement 'span'
@element.className = 'math-3d-viewer'
@element.style.display = 'inline-block'
if @opts.renderer is 'canvas'
# possibly show the canvas warning.
@element.title = 'WARNING: using slow non-WebGL canvas renderer'
@setColor()
@setFrame()
@setCamera()
@setOrbitControls()
@setLight()
# borrow the renderer to do the initial render
owner = _sceneUsingRenderer
wasDynamic = owner? and owner.rendererType is 'dynamic'
@setDynamicRenderer() # used to do the first render
@setStaticRenderer()
# then give it back to whoever had it (if they were using it)
if wasDynamic
owner.setDynamicRenderer()
@setOnMouseOver()
@opts.parent.appendChild @element
setDynamicRenderer: ->
if @rendererType is 'dynamic'
# already have it
return
@renderer = getRenderer @, @opts.renderer
@rendererType = 'dynamic'
@renderer.setClearColor @background, 1
@renderer.setSize @opts.width, @opts.height
@controls.enabled = true
if @opts.spin
@animate render: false
@renderScene true
# remove the current renderer (if it exists)
removeElement @element.lastChild
# place renderer in correct place in the DOM
@element.appendChild @renderer.domElement
setStaticRenderer: ->
if @rendererType is 'static'
# already have it
return
@controls.enabled = false
if not @staticImage?
@staticImage = document.createElement 'img'
@staticImage.className = 'math-3d-static-renderer'
@staticImage.style.width = @opts.width
@staticImage.style.height = @opts.height
@staticImage.src = @dataUrl()
@rendererType = 'static'
# remove the current renderer (if it exists)
removeElement @element.lastChild
# place renderer in correct place in the DOM
@element.appendChild @staticImage
dataUrl: (opts) ->
opts = defaults opts,
type : 'png' # 'png' or 'jpeg' or 'webp' (the best)
quality : undefined # 1 is best quality; 0 is worst; only applies for jpeg or webp
@renderer.domElement.toDataURL "image/#{opts.type}", opts.quality
updateBoundingBox: (obj) ->
obj.geometry.computeBoundingBox()
if @_boundingBoxHelper?
@_boundingBoxHelper.copy obj.geometry.boundingBox.clone()
else
@_boundingBoxHelper = obj.geometry.boundingBox.clone()
@_boundingBoxHelper.min.add obj.position
@_boundingBoxHelper.max.add obj.position
if @boundingBox?
@boundingBox.geometry.boundingBox.union @_boundingBoxHelper
else
@boundingBox = new THREE.BoxHelper()
@boundingBox.geometry.boundingBox = @_boundingBoxHelper.clone()
@boundingBox.update @boundingBox
_finalizeObj: (obj, in_frame) ->
obj.scale.copy @aspectRatio
if in_frame
@updateBoundingBox obj
@rescale obj.position
@scene.add obj
return obj
addText: (opts) ->
opts = defaults opts,
text : required
loc : required
fontface : undefined # defaults to Text3d's default font
rotation : undefined # by default will always face the camera
size : 1 # should really be specified
texture : required
if not (opts.rotation? or @_text?)
@_text = []
up = new THREE.Vector3()
@renderHooks.push =>
up.set(0, 1, 0).applyQuaternion @camera.quaternion
for text in @_text
text.up.copy up
text.lookAt @camera.position
opts.depth = 0
text = @addText3d opts
if not opts.rotation?
@_text.push text
return text
addText3d: (opts) ->
opts = defaults opts,
text : required
loc : [0, 0, 0]
rotation : [0, 0, 0]
fontface : "helvetiker"
size : 1 # should really be specified
depth : 1 # ditto
texture : required
geometry = new THREE.TextGeometry opts.text,
size : opts.size
height : opts.depth
font : opts.fontface
material = new THREE.MeshBasicMaterial
opacity : opts.texture.opacity
transparent : opts.texture.opacity < 1
material.color.setRGB opts.texture.color...
text = new THREE.Mesh geometry, material
text.position.set opts.loc...
text.rotation.set opts.rotation...
# we shift the origin of the text to the center so
# that rotations will be about the center of the text
# rather than the lower left corner
geometry.computeBoundingBox()
shift = (new THREE.Matrix4()).makeTranslation(
geometry.boundingBox.center().negate().toArray()...)
geometry.applyMatrix shift
geometry.boundingBox.applyMatrix4 shift
@_finalizeObj text, false
text.scale.copy @squareScale
return text
addLine: (opts) ->
opts = defaults opts,
points : required
thickness : 1
arrow_head : false # TODO
texture : required
in_frame : true
geometry = new THREE.Geometry()
for point in opts.points
geometry.vertices.push new THREE.Vector3(point...)
line = new THREE.Line geometry, new THREE.LineBasicMaterial(linewidth:opts.thickness)
line.material.color.setRGB opts.texture.color...
return @_finalizeObj line, opts.in_frame
addSphere: (opts) ->
opts = defaults opts,
loc : required
radius : 5
texture : required
in_frame : true
segments : if @opts.renderer is 'webgl' then 64 else 24
_basic_material : false
geometry = new THREE.SphereGeometry opts.radius, opts.segments, opts.segments
if opts._basic_material
material = new THREE.MeshBasicMaterial
transparent : opts.texture.opacity < 1
else
material = new THREE.MeshPhongMaterial
transparent : opts.texture.opacity < 1
side : THREE.DoubleSide
material.ambient.setRGB opts.texture.ambient...
material.specular.setRGB opts.texture.specular...
material.color.setRGB opts.texture.color...
material.opacity = opts.texture.opacity
sphere = new THREE.Mesh geometry, material
sphere.position.set opts.loc...
@_finalizeObj sphere, opts.in_frame
return sphere
addTorus: (opts) ->
opts = defaults opts,
loc : required
inner_radius : .3
outer_radius : 1
texture : required
in_frame : true
segments : if @opts.renderer is 'webgl' then 64 else 24
tube_diameter = opts.outer_radius-opts.inner_radius
doughnut_radius = (opts.outer_radius+opts.inner_radius)/2
geometry = new THREE.TorusGeometry doughnut_radius, tube_diameter, opts.segments, opts.segments
material = new THREE.MeshPhongMaterial
transparent : opts.texture.opacity < 1
side : THREE.DoubleSide
material.ambient.setRGB opts.texture.ambient...
material.specular.setRGB opts.texture.specular...
material.color.setRGB opts.texture.color...
material.opacity = opts.texture.opacity
torus = new THREE.Mesh geometry, material
torus.position.set opts.loc...
return @_finalizeObj torus, opts.in_frame
_addCloudPoint: (opts) ->
if not @_cloud
@_cloud = {}
key = opts.size+opts.texture.color+opts.in_frame
if not (cloud = @_cloud[key])?
material = new THREE.PointCloudMaterial
size : opts.size*2
sizeAttenuation : false
material.color.setRGB opts.texture.color...
cloud = @_cloud[key] = new THREE.PointCloud(
new THREE.Geometry(), material)
cloud.scale.copy @aspectRatio
@scene.add cloud
cloud.geometry.vertices.push new THREE.Vector3(opts.loc...)
if opts.in_frame
@updateBoundingBox cloud
return cloud
_initPointHelper: ->
if not @_pointHelper?
@_pointHelperVec = new THREE.Vector3()
@_pointHelper = new THREE.Mesh()
@_pointHelper.geometry.vertices.push @_pointHelperVec
_addSpherePoint: (opts) ->
if not @_points?
@_points = []
@renderHooks.push =>
for point in @_points
scale = @camera.position.distanceTo point.position
point.scale.set scale, scale, scale
if opts.in_frame
@_initPointHelper()
@_pointHelperVec.set opts.loc...
@updateBoundingBox @_pointHelper
opts.radius = opts.size/1200
opts._basic_material = true
opts.in_frame = false
delete opts.size
point = @addSphere opts
@_points.push point
return point
addPoint: (opts) ->
opts = defaults opts,
loc : required
size : 5
texture : required
in_frame : true
if @opts.fast_points
return @_addCloudPoint opts
else
return @_addSpherePoint opts
addIndexFaceSet: (opts) ->
opts = defaults opts,
vertices : required
faces : required
texture : required
wireframe : undefined
in_frame : true
geometry = new THREE.Geometry()
for vector in opts.vertices
geometry.vertices.push new THREE.Vector3(vector...)
for vertex in opts.faces
a = vertex.shift()
b = vertex.shift()
while c = vertex.shift()
geometry.faces.push new THREE.Face3 a, b, c
b = c
geometry.mergeVertices()
geometry.computeFaceNormals()
if opts.wireframe
material = new THREE.MeshBasicMaterial
wireframe : true
wireframeLinewidth : opts.wireframe
transparent : opts.texture.opacity < 1
side : THREE.DoubleSide
else
material = new THREE.MeshPhongMaterial
transparent : opts.texture.opacity < 1
side : THREE.DoubleSide
material.ambient.setRGB opts.texture.ambient...
material.specular.setRGB opts.texture.specular...
material.color.setRGB opts.texture.color...
material.opacity = opts.texture.opacity
mesh = new THREE.Mesh geometry, material
return @_finalizeObj mesh, opts.in_frame
addGroup: (opts) ->
opts = defaults opts,
subobjs : required
ret = for obj in opts.subobjs
@addObj obj
return ret
addObj: (opts) ->
opts = defaults opts, {type: required}, true
type = opts.type
delete opts.type
switch type
when 'group'
return @addGroup opts
when 'text'
return @addText opts
when 'text3d'
return @addText3d opts
when 'index_face_set'
return @addIndexFaceSet opts
when 'line'
return @addLine opts
when 'point'
return @addPoint opts
when 'sphere'
return @addSphere opts
when 'torus'
return @addTorus opts
else
console.log "ERROR: bad object type #{opts.obj.type}"
computeDimensions: ->
dim = @rescale @boundingBox.geometry.boundingBox.size()
@maxDim = Math.max dim.x, dim.y, dim.z
@minDim = Math.min dim.x, dim.y, dim.z
@center = @rescale @boundingBox.geometry.boundingBox.center()
setColor: ->
# setup color stuff
@background = new THREE.Color @opts.background...
@element.style.background = @background.getStyle()
if @frameOpts.color?
@frameColor = new THREE.Color @frameOpts.color...
else
@frameColor = new THREE.Color(
1-@background.r, 1-@background.g, 1-@background.b)
# always call this after adding things to the scene to make sure track
# controls are sorted out, etc. The client should never have to worry
# about calling this
setFrame: ->
###
if Math.abs(x1 - x0) < eps
x1 += 1
x0 -= 1
if Math.abs(y1 - y0) < eps
y1 += 1
y0 -= 1
if Math.abs(z1 - z0) < eps
z1 += 1
z0 -= 1
###
if @frameOpts.thickness isnt 0 and not @_bounded
@_bounded = true
if not @boundingBox?
# no objects in the scene
return
# set the color and linewidth of the bounding box
@boundingBox.material.color = @frameColor
@boundingBox.material.linewidth = @frameOpts.thickness
# set the scale for the bounding box
@boundingBox.scale.copy @aspectRatio
# the update method of BoxHelper disables matrixAutoUpdate but
# we still need it for the aspect ratio to be taken into account
@boundingBox.matrixAutoUpdate = true
# add the bounding box to the scene
@scene.add @boundingBox
if @frameOpts.labels and not @_labeled
@_labeled = true
min = @boundingBox.geometry.boundingBox.min
max = @boundingBox.geometry.boundingBox.max
avg = @boundingBox.geometry.boundingBox.center()
offset = @maxDim*0.05
offsets = (new THREE.Vector3 offset, offset, offset).divide @aspectRatio
textSize = @minDim/@scaleSize/8
if textSize is 0
return
frameColor = [@frameColor.r, @frameColor.g, @frameColor.b]
addHashMark = (loc) =>
loc2 = new THREE.Vector3 loc...
if offsetDir[0] is '+'
loc2[offsetDir[1]] += offsets[offsetDir[1]]*0.75
else if offsetDir[0] is '-'
loc2[offsetDir[1]] -= offsets[offsetDir[1]]*0.75
loc2 = [loc2.x, loc2.y, loc2.z]
@addLine
points : [loc, loc2]
thickness : @frameOpts.thickness*5
in_frame : false
texture :
color : frameColor
opacity : 1
addLabel = (loc, text) =>
addHashMark loc
text = @addText
loc : loc
text : text
size : textSize
fontface : @frameOpts.fontface
texture :
color : frameColor
opacity : 1
# add a bit of extra offset based on the size of the text
textBox = text.geometry.boundingBox.size().multiply @squareScale
extraOffset = Math.max(textBox.x, textBox.y, textBox.z)/2
realOffset = offset + extraOffset
if offsetDir[0] is '+'
text.position[offsetDir[1]] += realOffset
else if offsetDir[0] is '-'
text.position[offsetDir[1]] -= realOffset
format = (num) ->
Number(num.toFixed 2).toString()
offsetDir = ['-','y']
addLabel [max.x, min.y, min.z], format(min.z)
addLabel [max.x, min.y, avg.z], "z = #{format avg.z}"
addLabel [max.x, min.y, max.z], format(max.z)
offsetDir = ['+','x']
addLabel [max.x, min.y, min.z], format(min.y)
addLabel [max.x, avg.y, min.z], "y = #{format avg.y}"
addLabel [max.x, max.y, min.z], format(max.y)
offsetDir = ['+','y']
addLabel [max.x, max.y, min.z], format(max.x)
addLabel [avg.x, max.y, min.z], "x = #{format avg.x}"
addLabel [min.x, max.y, min.z], format(min.x)
setLight: ->
ambientLight = new THREE.AmbientLight()
ambientLight.color.setRGB @lightOpts.color...
@scene.add ambientLight
cameraLight = new THREE.PointLight 0, @lightOpts.intensity
cameraLight.color.setRGB @lightOpts.color...
@scene.add cameraLight
@renderHooks.push =>
cameraLight.position.copy @camera.position
setCamera: ->
view_angle = 45
aspect = @opts.width/@opts.height
near = @minDim/4
far = @maxDim*16
@camera = new THREE.PerspectiveCamera view_angle, aspect, near, far
@camera.up = new THREE.Vector3 0, 0, 1
@camera.lookAt @center
@camera.position.set 1.5, 1.5, 0.75
@camera.position.multiplyScalar(@maxDim).add @center
@camera.updateProjectionMatrix()
@scene.add @camera
setOrbitControls: ->
# set up camera controls
@controls = new OrbitControls @camera, @element
@controls.target = @center
@controls.damping = 2
@controls.noKeys = true
#@controls.zoomSpeed = 0.6
if @opts.spin
if typeof @opts.spin is "boolean"
@controls.autoRotateSpeed = 2.0
else
@controls.autoRotateSpeed = @opts.spin
@controls.autoRotate = true
@controls.addEventListener 'change', => @renderScene()
# on mouseover, we switch the renderer out to use the dynamic renderer
setOnMouseOver: ->
@element.addEventListener 'mouseover', (=> @setDynamicRenderer()), false
@element.addEventListener 'mouseleave', (=> @setStaticRenderer()), false
animate: (opts = {}) ->
opts = defaults opts,
fps : undefined
stop : false
mouseover : undefined # ignored now
render : true
if @_animate_started and not opts.stop
return
@_animate_started = true
@_animate opts
_animate: (opts) ->
if @rendererType is 'static'
# will try again when we switch to dynamic renderer
@_animate_started = false
return
if @element.offsetWidth <= 0 and @element.offsetWidth <= 0
if @opts.stop_when_gone? and not document.contains(@opts.stop_when_gone)
@_animate_started = false
else if not document.contains(@element)
setTimeout (=> @_animate opts), 5000
else
setTimeout (=> @_animate opts), 1000
return
if opts.stop
@_stop_animating = true
# so next time around will start
return
if @_stop_animating
@_stop_animating = false
@_animate_started = false
return
@renderScene opts.render
delete opts.render
f = =>
requestAnimationFrame (=> @_animate opts)
if opts.fps? and opts.fps
setTimeout f, 1000/opts.fps
else
f()
renderScene: (force = false) ->
if @_rendering? and @_rendering
# already in the process of rendering
return
if @rendererType isnt 'dynamic'
# scene's are only rendered when they are dynamic
return
if not @camera?
return # nothing to do yet
@_rendering = true
if not force
position = @camera.position
if not @_lastPosition?
force = true
@_lastPosition = position.clone()
#@_renderDistance = @minDim*@minDim/10000
else if @_lastPosition.distanceToSquared(position) > 0.05
force = true
@_lastPosition.copy position
if force
@controls.update()
for hook in @renderHooks
hook()
@renderer.render @scene, @camera
@_rendering = false
@math3d = (opts) ->
opts = defaults opts,
scene : required # {opts:?, obj:?} or url from which to download (via ajax) a JSON string that parses to {opts:?,obj:?}
element : required # DOM element to attach to
timeout : 30000 # milleseconds for timing out fetchs
callback : undefined # callback(scene object, error)
# Render a 3-d scene
create_scene = (scene) ->
scene.opts ?= {}
scene.opts.parent = opts.element
scene.opts.callback = (sceneobj, error) ->
if not error
if scene.obj?
sceneobj.addObj scene.obj
sceneobj.finalize()
opts.callback? sceneobj, error
new Math3dThreeJS scene.opts
switch typeof opts.scene
when 'object'
create_scene opts.scene
when 'string'
xhr = new XMLHttpRequest()
xhr.timeout = opts.timeout
xhr.onload = ->
if @status is 200 # success
try
create_scene JSON.parse @responseText
catch error
opts.callback? error
else
opts.callback? "errno #{@status} when trying to download #{opts.scene}"
xhr.onerror = ->
opts.callback? "error when trying to download #{opts.scene}"
xhr.onabort = ->
opts.callback? "downloading #{opts.scene} aborted"
xhr.ontimeout = ->
opts.callback? "downloading #{opts.scene} timed out"
xhr.open 'get', opts.scene
xhr.send()
else
opts.callback? "bad scene type #{typeof opts.scene}"
for key, value of math3d
@math3d[key] = value
| 5080 | ###############################################################################
# Copyright (c) 2013, 2014, <NAME> and <NAME>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
###############################################################################
if not document?
document = @document
math3d = {}
trunc = (str, max_length) ->
if not str?.length?
return str
if not max_length?
max_length = 1024
if str.length > max_length then str[0...max_length-3] + "..." else str
# Returns a new object with properties determined by those of opts and
# base. The properties in opts *must* all also appear in base. If an
# base property has value "defaults.required", then it must appear in
# opts. For each property prop of base not specified in opts, the
# corresponding value opts[prop] is set (all in a new copy of opts) to
# be base[prop].
defaults = (opts, base, allow_extra = false) ->
if not opts?
opts = {}
args = ->
try
"(opts=#{trunc JSON.stringify opts}, base=#{trunc JSON.stringify base})"
catch
""
if typeof opts isnt 'object'
# We put explicit traces before the errors in this function,
# since otherwise they can be very hard to debug.
console.trace()
throw "defaults -- TypeError: function takes inputs as an object #{args()}"
optsHasProp = (prop) -> opts.hasOwnProperty and opts[prop]?
res = {}
for prop, val of base
if val? and not optsHasProp(prop) # only record not undefined properties
if val is defaults.required
console.trace()
throw "defaults -- TypeError: property '#{prop}' must be specified #{args()}"
else
res[prop] = val
for prop, val of opts
if not (allow_extra or base.hasOwnProperty(prop))
console.trace()
throw "defaults -- TypeError: got an unexpected argument '#{prop}' #{args()}"
if val? # only record not undefined properties
res[prop] = val
return res
# WARNING -- don't accidentally use this as a default:
required = defaults.required = "__!!!!!!this is a required property!!!!!!__"
removeElement = (element) ->
if element? and (parent = element.parentElement)?
parent.removeChild element
loadScript = (script_src, callback) ->
runCallback = true
script = document.createElement 'script'
script.onload = ->
removeElement script
if runCallback
runCallback = false
callback()
script.onerror = ->
removeElement script
if runCallback
runCallback = false
callback "error loading script #{script.src}"
script.type = 'text/javascript'
script.charset = 'utf-8'
script.async = true
script.src = script_src
document.head.appendChild script
_loadingThreejsCallbacks = []
_orbitControlsSetup = false
math3d.threejsSource = "//cdnjs.cloudflare.com/ajax/libs/three.js/r68/three.min.js"
math3d.fontSources = []
math3d.loadThreejs = (callback) ->
if THREE? and _orbitControlsSetup and not math3d.fontSources.length
return callback()
_loadingThreejsCallbacks.push callback
if _loadingThreejsCallbacks.length > 1
return
runCallbacks = (error) ->
while callback = _loadingThreejsCallbacks.shift()
callback error
setupOrbitControls = (callback) ->
if not _orbitControlsSetup
OrbitControls.prototype = Object.create THREE.EventDispatcher.prototype
_orbitControlsSetup = true
callback()
setupFonts = (callback) ->
load_helvetiker_regular?()
if math3d.fontSources.length
loadScript math3d.fontSources.shift(), (error) ->
if error
runCallbacks error
else
setupFonts callback
else
callback()
setupThreejs = (callback) ->
if THREE?
callback()
else
loadScript math3d.threejsSource, (error) ->
if error
runCallbacks error
else
callback()
setupThreejs (-> setupFonts (-> setupOrbitControls runCallbacks))
_sceneUsingRenderer = undefined
_renderer = {}
# get the best-possible THREE.js renderer (once and for all)
# based on Detector.js's webgl detection
try
if @WebGLRenderingContext
canvas = document.createElement 'canvas'
if canvas.getContext('webgl') or canvas.getContext('experimental-webgl')
_defaultRendererType = 'webgl'
if not _defaultRendererType?
_defaultRendererType = 'canvas'
getRenderer = (scene, type) ->
# if there is a scene currently using this renderer, tell it to switch to
# the static renderer.
if _sceneUsingRenderer? and _sceneUsingRenderer isnt scene
_sceneUsingRenderer.setStaticRenderer()
# now scene takes over using this renderer
_sceneUsingRenderer = scene
if not _renderer[type]?
switch type
when 'webgl'
_renderer[type] = new THREE.WebGLRenderer
antialias : true
alpha : true
preserveDrawingBuffer : true
when 'canvas'
_renderer[type] = new THREE.CanvasRenderer
antialias : true
alpha : true
else
throw "bug -- unkown dynamic renderer type = #{type}"
_renderer[type].domElement.className = 'math-3d-dynamic-renderer'
_renderer[type]
class Math3dThreeJS
constructor: (opts) ->
@opts = defaults opts,
parent : required
width : undefined
height : undefined
renderer : _defaultRendererType # 'webgl' or 'canvas' (defaults to a best guess)
background : [1, 1, 1]
spin : false # if true, image spins by itself when mouse is over it.
aspect_ratio : [1, 1, 1] # a triple [x,y,z] of length three, which scales the x,y,z coordinates of everything by the given values.
stop_when_gone : undefined # if given, animation, etc., stops when this html element (not jquery!) is no longer in the DOM
fast_points : false # if true will use a faster point implementation, but they will be square and not work without WebGL
frame : undefined # frame options
light : undefined # light options
callback : undefined # opts.callback(this object, error)
@frameOpts = defaults @opts.frame,
color : undefined # defaults to the color-wise negation of the background
thickness : .4 # zero thickness disables the frame
labels : true # whether or not to enable labels on the axes
fontface : undefined # defaults to default fontface
@lightOpts = defaults @opts.light,
color : [1, 1, 1]
intensity : 0.75
math3d.loadThreejs (error) =>
if error
return @opts.callback? undefined, error
if @_init
return @opts.callback? @
@_init = true
# IMPORTANT: There is a major bug in three.js -- if you make the width below more than .5 of the window
# width, then after 8 3d renders, things get foobared in WebGL mode. This happens even with the simplest
# demo using the basic cube example from their site with R68. It even sometimes happens with this workaround, but
# at least retrying a few times can fix it.
@opts.width ?= document.documentElement.clientWidth/2
@opts.height ?= @opts.width*2/3
# setup aspect ratio stuff
aspectRatio = @aspectRatio = new THREE.Vector3 @opts.aspect_ratio...
@scaleSize = @aspectRatio.length()
@squareScale = (new THREE.Vector3 1, 1, 1).normalize()
@squareScale.multiplyScalar @scaleSize
@rescale = (vector) -> vector.multiply aspectRatio
# initialize the scene
@scene = new THREE.Scene()
# functions in render hooks will be run when before rendering
@renderHooks = []
@opts.callback? @
# client code should call this when done adding objects to the scene
finalize: ->
@computeDimensions()
@element = document.createElement 'span'
@element.className = 'math-3d-viewer'
@element.style.display = 'inline-block'
if @opts.renderer is 'canvas'
# possibly show the canvas warning.
@element.title = 'WARNING: using slow non-WebGL canvas renderer'
@setColor()
@setFrame()
@setCamera()
@setOrbitControls()
@setLight()
# borrow the renderer to do the initial render
owner = _sceneUsingRenderer
wasDynamic = owner? and owner.rendererType is 'dynamic'
@setDynamicRenderer() # used to do the first render
@setStaticRenderer()
# then give it back to whoever had it (if they were using it)
if wasDynamic
owner.setDynamicRenderer()
@setOnMouseOver()
@opts.parent.appendChild @element
setDynamicRenderer: ->
if @rendererType is 'dynamic'
# already have it
return
@renderer = getRenderer @, @opts.renderer
@rendererType = 'dynamic'
@renderer.setClearColor @background, 1
@renderer.setSize @opts.width, @opts.height
@controls.enabled = true
if @opts.spin
@animate render: false
@renderScene true
# remove the current renderer (if it exists)
removeElement @element.lastChild
# place renderer in correct place in the DOM
@element.appendChild @renderer.domElement
setStaticRenderer: ->
if @rendererType is 'static'
# already have it
return
@controls.enabled = false
if not @staticImage?
@staticImage = document.createElement 'img'
@staticImage.className = 'math-3d-static-renderer'
@staticImage.style.width = @opts.width
@staticImage.style.height = @opts.height
@staticImage.src = @dataUrl()
@rendererType = 'static'
# remove the current renderer (if it exists)
removeElement @element.lastChild
# place renderer in correct place in the DOM
@element.appendChild @staticImage
dataUrl: (opts) ->
opts = defaults opts,
type : 'png' # 'png' or 'jpeg' or 'webp' (the best)
quality : undefined # 1 is best quality; 0 is worst; only applies for jpeg or webp
@renderer.domElement.toDataURL "image/#{opts.type}", opts.quality
updateBoundingBox: (obj) ->
obj.geometry.computeBoundingBox()
if @_boundingBoxHelper?
@_boundingBoxHelper.copy obj.geometry.boundingBox.clone()
else
@_boundingBoxHelper = obj.geometry.boundingBox.clone()
@_boundingBoxHelper.min.add obj.position
@_boundingBoxHelper.max.add obj.position
if @boundingBox?
@boundingBox.geometry.boundingBox.union @_boundingBoxHelper
else
@boundingBox = new THREE.BoxHelper()
@boundingBox.geometry.boundingBox = @_boundingBoxHelper.clone()
@boundingBox.update @boundingBox
_finalizeObj: (obj, in_frame) ->
obj.scale.copy @aspectRatio
if in_frame
@updateBoundingBox obj
@rescale obj.position
@scene.add obj
return obj
addText: (opts) ->
opts = defaults opts,
text : required
loc : required
fontface : undefined # defaults to Text3d's default font
rotation : undefined # by default will always face the camera
size : 1 # should really be specified
texture : required
if not (opts.rotation? or @_text?)
@_text = []
up = new THREE.Vector3()
@renderHooks.push =>
up.set(0, 1, 0).applyQuaternion @camera.quaternion
for text in @_text
text.up.copy up
text.lookAt @camera.position
opts.depth = 0
text = @addText3d opts
if not opts.rotation?
@_text.push text
return text
addText3d: (opts) ->
opts = defaults opts,
text : required
loc : [0, 0, 0]
rotation : [0, 0, 0]
fontface : "helvetiker"
size : 1 # should really be specified
depth : 1 # ditto
texture : required
geometry = new THREE.TextGeometry opts.text,
size : opts.size
height : opts.depth
font : opts.fontface
material = new THREE.MeshBasicMaterial
opacity : opts.texture.opacity
transparent : opts.texture.opacity < 1
material.color.setRGB opts.texture.color...
text = new THREE.Mesh geometry, material
text.position.set opts.loc...
text.rotation.set opts.rotation...
# we shift the origin of the text to the center so
# that rotations will be about the center of the text
# rather than the lower left corner
geometry.computeBoundingBox()
shift = (new THREE.Matrix4()).makeTranslation(
geometry.boundingBox.center().negate().toArray()...)
geometry.applyMatrix shift
geometry.boundingBox.applyMatrix4 shift
@_finalizeObj text, false
text.scale.copy @squareScale
return text
addLine: (opts) ->
opts = defaults opts,
points : required
thickness : 1
arrow_head : false # TODO
texture : required
in_frame : true
geometry = new THREE.Geometry()
for point in opts.points
geometry.vertices.push new THREE.Vector3(point...)
line = new THREE.Line geometry, new THREE.LineBasicMaterial(linewidth:opts.thickness)
line.material.color.setRGB opts.texture.color...
return @_finalizeObj line, opts.in_frame
addSphere: (opts) ->
opts = defaults opts,
loc : required
radius : 5
texture : required
in_frame : true
segments : if @opts.renderer is 'webgl' then 64 else 24
_basic_material : false
geometry = new THREE.SphereGeometry opts.radius, opts.segments, opts.segments
if opts._basic_material
material = new THREE.MeshBasicMaterial
transparent : opts.texture.opacity < 1
else
material = new THREE.MeshPhongMaterial
transparent : opts.texture.opacity < 1
side : THREE.DoubleSide
material.ambient.setRGB opts.texture.ambient...
material.specular.setRGB opts.texture.specular...
material.color.setRGB opts.texture.color...
material.opacity = opts.texture.opacity
sphere = new THREE.Mesh geometry, material
sphere.position.set opts.loc...
@_finalizeObj sphere, opts.in_frame
return sphere
addTorus: (opts) ->
opts = defaults opts,
loc : required
inner_radius : .3
outer_radius : 1
texture : required
in_frame : true
segments : if @opts.renderer is 'webgl' then 64 else 24
tube_diameter = opts.outer_radius-opts.inner_radius
doughnut_radius = (opts.outer_radius+opts.inner_radius)/2
geometry = new THREE.TorusGeometry doughnut_radius, tube_diameter, opts.segments, opts.segments
material = new THREE.MeshPhongMaterial
transparent : opts.texture.opacity < 1
side : THREE.DoubleSide
material.ambient.setRGB opts.texture.ambient...
material.specular.setRGB opts.texture.specular...
material.color.setRGB opts.texture.color...
material.opacity = opts.texture.opacity
torus = new THREE.Mesh geometry, material
torus.position.set opts.loc...
return @_finalizeObj torus, opts.in_frame
_addCloudPoint: (opts) ->
if not @_cloud
@_cloud = {}
key = opts.size+opts.texture.color+opts.in_frame
if not (cloud = @_cloud[key])?
material = new THREE.PointCloudMaterial
size : opts.size*2
sizeAttenuation : false
material.color.setRGB opts.texture.color...
cloud = @_cloud[key] = new THREE.PointCloud(
new THREE.Geometry(), material)
cloud.scale.copy @aspectRatio
@scene.add cloud
cloud.geometry.vertices.push new THREE.Vector3(opts.loc...)
if opts.in_frame
@updateBoundingBox cloud
return cloud
_initPointHelper: ->
if not @_pointHelper?
@_pointHelperVec = new THREE.Vector3()
@_pointHelper = new THREE.Mesh()
@_pointHelper.geometry.vertices.push @_pointHelperVec
_addSpherePoint: (opts) ->
if not @_points?
@_points = []
@renderHooks.push =>
for point in @_points
scale = @camera.position.distanceTo point.position
point.scale.set scale, scale, scale
if opts.in_frame
@_initPointHelper()
@_pointHelperVec.set opts.loc...
@updateBoundingBox @_pointHelper
opts.radius = opts.size/1200
opts._basic_material = true
opts.in_frame = false
delete opts.size
point = @addSphere opts
@_points.push point
return point
addPoint: (opts) ->
opts = defaults opts,
loc : required
size : 5
texture : required
in_frame : true
if @opts.fast_points
return @_addCloudPoint opts
else
return @_addSpherePoint opts
addIndexFaceSet: (opts) ->
opts = defaults opts,
vertices : required
faces : required
texture : required
wireframe : undefined
in_frame : true
geometry = new THREE.Geometry()
for vector in opts.vertices
geometry.vertices.push new THREE.Vector3(vector...)
for vertex in opts.faces
a = vertex.shift()
b = vertex.shift()
while c = vertex.shift()
geometry.faces.push new THREE.Face3 a, b, c
b = c
geometry.mergeVertices()
geometry.computeFaceNormals()
if opts.wireframe
material = new THREE.MeshBasicMaterial
wireframe : true
wireframeLinewidth : opts.wireframe
transparent : opts.texture.opacity < 1
side : THREE.DoubleSide
else
material = new THREE.MeshPhongMaterial
transparent : opts.texture.opacity < 1
side : THREE.DoubleSide
material.ambient.setRGB opts.texture.ambient...
material.specular.setRGB opts.texture.specular...
material.color.setRGB opts.texture.color...
material.opacity = opts.texture.opacity
mesh = new THREE.Mesh geometry, material
return @_finalizeObj mesh, opts.in_frame
addGroup: (opts) ->
opts = defaults opts,
subobjs : required
ret = for obj in opts.subobjs
@addObj obj
return ret
addObj: (opts) ->
opts = defaults opts, {type: required}, true
type = opts.type
delete opts.type
switch type
when 'group'
return @addGroup opts
when 'text'
return @addText opts
when 'text3d'
return @addText3d opts
when 'index_face_set'
return @addIndexFaceSet opts
when 'line'
return @addLine opts
when 'point'
return @addPoint opts
when 'sphere'
return @addSphere opts
when 'torus'
return @addTorus opts
else
console.log "ERROR: bad object type #{opts.obj.type}"
computeDimensions: ->
dim = @rescale @boundingBox.geometry.boundingBox.size()
@maxDim = Math.max dim.x, dim.y, dim.z
@minDim = Math.min dim.x, dim.y, dim.z
@center = @rescale @boundingBox.geometry.boundingBox.center()
setColor: ->
# setup color stuff
@background = new THREE.Color @opts.background...
@element.style.background = @background.getStyle()
if @frameOpts.color?
@frameColor = new THREE.Color @frameOpts.color...
else
@frameColor = new THREE.Color(
1-@background.r, 1-@background.g, 1-@background.b)
# always call this after adding things to the scene to make sure track
# controls are sorted out, etc. The client should never have to worry
# about calling this
setFrame: ->
###
if Math.abs(x1 - x0) < eps
x1 += 1
x0 -= 1
if Math.abs(y1 - y0) < eps
y1 += 1
y0 -= 1
if Math.abs(z1 - z0) < eps
z1 += 1
z0 -= 1
###
if @frameOpts.thickness isnt 0 and not @_bounded
@_bounded = true
if not @boundingBox?
# no objects in the scene
return
# set the color and linewidth of the bounding box
@boundingBox.material.color = @frameColor
@boundingBox.material.linewidth = @frameOpts.thickness
# set the scale for the bounding box
@boundingBox.scale.copy @aspectRatio
# the update method of BoxHelper disables matrixAutoUpdate but
# we still need it for the aspect ratio to be taken into account
@boundingBox.matrixAutoUpdate = true
# add the bounding box to the scene
@scene.add @boundingBox
if @frameOpts.labels and not @_labeled
@_labeled = true
min = @boundingBox.geometry.boundingBox.min
max = @boundingBox.geometry.boundingBox.max
avg = @boundingBox.geometry.boundingBox.center()
offset = @maxDim*0.05
offsets = (new THREE.Vector3 offset, offset, offset).divide @aspectRatio
textSize = @minDim/@scaleSize/8
if textSize is 0
return
frameColor = [@frameColor.r, @frameColor.g, @frameColor.b]
addHashMark = (loc) =>
loc2 = new THREE.Vector3 loc...
if offsetDir[0] is '+'
loc2[offsetDir[1]] += offsets[offsetDir[1]]*0.75
else if offsetDir[0] is '-'
loc2[offsetDir[1]] -= offsets[offsetDir[1]]*0.75
loc2 = [loc2.x, loc2.y, loc2.z]
@addLine
points : [loc, loc2]
thickness : @frameOpts.thickness*5
in_frame : false
texture :
color : frameColor
opacity : 1
addLabel = (loc, text) =>
addHashMark loc
text = @addText
loc : loc
text : text
size : textSize
fontface : @frameOpts.fontface
texture :
color : frameColor
opacity : 1
# add a bit of extra offset based on the size of the text
textBox = text.geometry.boundingBox.size().multiply @squareScale
extraOffset = Math.max(textBox.x, textBox.y, textBox.z)/2
realOffset = offset + extraOffset
if offsetDir[0] is '+'
text.position[offsetDir[1]] += realOffset
else if offsetDir[0] is '-'
text.position[offsetDir[1]] -= realOffset
format = (num) ->
Number(num.toFixed 2).toString()
offsetDir = ['-','y']
addLabel [max.x, min.y, min.z], format(min.z)
addLabel [max.x, min.y, avg.z], "z = #{format avg.z}"
addLabel [max.x, min.y, max.z], format(max.z)
offsetDir = ['+','x']
addLabel [max.x, min.y, min.z], format(min.y)
addLabel [max.x, avg.y, min.z], "y = #{format avg.y}"
addLabel [max.x, max.y, min.z], format(max.y)
offsetDir = ['+','y']
addLabel [max.x, max.y, min.z], format(max.x)
addLabel [avg.x, max.y, min.z], "x = #{format avg.x}"
addLabel [min.x, max.y, min.z], format(min.x)
setLight: ->
ambientLight = new THREE.AmbientLight()
ambientLight.color.setRGB @lightOpts.color...
@scene.add ambientLight
cameraLight = new THREE.PointLight 0, @lightOpts.intensity
cameraLight.color.setRGB @lightOpts.color...
@scene.add cameraLight
@renderHooks.push =>
cameraLight.position.copy @camera.position
setCamera: ->
view_angle = 45
aspect = @opts.width/@opts.height
near = @minDim/4
far = @maxDim*16
@camera = new THREE.PerspectiveCamera view_angle, aspect, near, far
@camera.up = new THREE.Vector3 0, 0, 1
@camera.lookAt @center
@camera.position.set 1.5, 1.5, 0.75
@camera.position.multiplyScalar(@maxDim).add @center
@camera.updateProjectionMatrix()
@scene.add @camera
setOrbitControls: ->
# set up camera controls
@controls = new OrbitControls @camera, @element
@controls.target = @center
@controls.damping = 2
@controls.noKeys = true
#@controls.zoomSpeed = 0.6
if @opts.spin
if typeof @opts.spin is "boolean"
@controls.autoRotateSpeed = 2.0
else
@controls.autoRotateSpeed = @opts.spin
@controls.autoRotate = true
@controls.addEventListener 'change', => @renderScene()
# on mouseover, we switch the renderer out to use the dynamic renderer
setOnMouseOver: ->
@element.addEventListener 'mouseover', (=> @setDynamicRenderer()), false
@element.addEventListener 'mouseleave', (=> @setStaticRenderer()), false
animate: (opts = {}) ->
opts = defaults opts,
fps : undefined
stop : false
mouseover : undefined # ignored now
render : true
if @_animate_started and not opts.stop
return
@_animate_started = true
@_animate opts
_animate: (opts) ->
if @rendererType is 'static'
# will try again when we switch to dynamic renderer
@_animate_started = false
return
if @element.offsetWidth <= 0 and @element.offsetWidth <= 0
if @opts.stop_when_gone? and not document.contains(@opts.stop_when_gone)
@_animate_started = false
else if not document.contains(@element)
setTimeout (=> @_animate opts), 5000
else
setTimeout (=> @_animate opts), 1000
return
if opts.stop
@_stop_animating = true
# so next time around will start
return
if @_stop_animating
@_stop_animating = false
@_animate_started = false
return
@renderScene opts.render
delete opts.render
f = =>
requestAnimationFrame (=> @_animate opts)
if opts.fps? and opts.fps
setTimeout f, 1000/opts.fps
else
f()
renderScene: (force = false) ->
if @_rendering? and @_rendering
# already in the process of rendering
return
if @rendererType isnt 'dynamic'
# scene's are only rendered when they are dynamic
return
if not @camera?
return # nothing to do yet
@_rendering = true
if not force
position = @camera.position
if not @_lastPosition?
force = true
@_lastPosition = position.clone()
#@_renderDistance = @minDim*@minDim/10000
else if @_lastPosition.distanceToSquared(position) > 0.05
force = true
@_lastPosition.copy position
if force
@controls.update()
for hook in @renderHooks
hook()
@renderer.render @scene, @camera
@_rendering = false
@math3d = (opts) ->
opts = defaults opts,
scene : required # {opts:?, obj:?} or url from which to download (via ajax) a JSON string that parses to {opts:?,obj:?}
element : required # DOM element to attach to
timeout : 30000 # milleseconds for timing out fetchs
callback : undefined # callback(scene object, error)
# Render a 3-d scene
create_scene = (scene) ->
scene.opts ?= {}
scene.opts.parent = opts.element
scene.opts.callback = (sceneobj, error) ->
if not error
if scene.obj?
sceneobj.addObj scene.obj
sceneobj.finalize()
opts.callback? sceneobj, error
new Math3dThreeJS scene.opts
switch typeof opts.scene
when 'object'
create_scene opts.scene
when 'string'
xhr = new XMLHttpRequest()
xhr.timeout = opts.timeout
xhr.onload = ->
if @status is 200 # success
try
create_scene JSON.parse @responseText
catch error
opts.callback? error
else
opts.callback? "errno #{@status} when trying to download #{opts.scene}"
xhr.onerror = ->
opts.callback? "error when trying to download #{opts.scene}"
xhr.onabort = ->
opts.callback? "downloading #{opts.scene} aborted"
xhr.ontimeout = ->
opts.callback? "downloading #{opts.scene} timed out"
xhr.open 'get', opts.scene
xhr.send()
else
opts.callback? "bad scene type #{typeof opts.scene}"
for key, value of math3d
@math3d[key] = value
| true | ###############################################################################
# Copyright (c) 2013, 2014, PI:NAME:<NAME>END_PI and PI:NAME:<NAME>END_PI
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
###############################################################################
if not document?
document = @document
math3d = {}
trunc = (str, max_length) ->
if not str?.length?
return str
if not max_length?
max_length = 1024
if str.length > max_length then str[0...max_length-3] + "..." else str
# Returns a new object with properties determined by those of opts and
# base. The properties in opts *must* all also appear in base. If an
# base property has value "defaults.required", then it must appear in
# opts. For each property prop of base not specified in opts, the
# corresponding value opts[prop] is set (all in a new copy of opts) to
# be base[prop].
defaults = (opts, base, allow_extra = false) ->
if not opts?
opts = {}
args = ->
try
"(opts=#{trunc JSON.stringify opts}, base=#{trunc JSON.stringify base})"
catch
""
if typeof opts isnt 'object'
# We put explicit traces before the errors in this function,
# since otherwise they can be very hard to debug.
console.trace()
throw "defaults -- TypeError: function takes inputs as an object #{args()}"
optsHasProp = (prop) -> opts.hasOwnProperty and opts[prop]?
res = {}
for prop, val of base
if val? and not optsHasProp(prop) # only record not undefined properties
if val is defaults.required
console.trace()
throw "defaults -- TypeError: property '#{prop}' must be specified #{args()}"
else
res[prop] = val
for prop, val of opts
if not (allow_extra or base.hasOwnProperty(prop))
console.trace()
throw "defaults -- TypeError: got an unexpected argument '#{prop}' #{args()}"
if val? # only record not undefined properties
res[prop] = val
return res
# WARNING -- don't accidentally use this as a default:
required = defaults.required = "__!!!!!!this is a required property!!!!!!__"
removeElement = (element) ->
if element? and (parent = element.parentElement)?
parent.removeChild element
loadScript = (script_src, callback) ->
runCallback = true
script = document.createElement 'script'
script.onload = ->
removeElement script
if runCallback
runCallback = false
callback()
script.onerror = ->
removeElement script
if runCallback
runCallback = false
callback "error loading script #{script.src}"
script.type = 'text/javascript'
script.charset = 'utf-8'
script.async = true
script.src = script_src
document.head.appendChild script
_loadingThreejsCallbacks = []
_orbitControlsSetup = false
math3d.threejsSource = "//cdnjs.cloudflare.com/ajax/libs/three.js/r68/three.min.js"
math3d.fontSources = []
math3d.loadThreejs = (callback) ->
if THREE? and _orbitControlsSetup and not math3d.fontSources.length
return callback()
_loadingThreejsCallbacks.push callback
if _loadingThreejsCallbacks.length > 1
return
runCallbacks = (error) ->
while callback = _loadingThreejsCallbacks.shift()
callback error
setupOrbitControls = (callback) ->
if not _orbitControlsSetup
OrbitControls.prototype = Object.create THREE.EventDispatcher.prototype
_orbitControlsSetup = true
callback()
setupFonts = (callback) ->
load_helvetiker_regular?()
if math3d.fontSources.length
loadScript math3d.fontSources.shift(), (error) ->
if error
runCallbacks error
else
setupFonts callback
else
callback()
setupThreejs = (callback) ->
if THREE?
callback()
else
loadScript math3d.threejsSource, (error) ->
if error
runCallbacks error
else
callback()
setupThreejs (-> setupFonts (-> setupOrbitControls runCallbacks))
_sceneUsingRenderer = undefined
_renderer = {}
# get the best-possible THREE.js renderer (once and for all)
# based on Detector.js's webgl detection
try
if @WebGLRenderingContext
canvas = document.createElement 'canvas'
if canvas.getContext('webgl') or canvas.getContext('experimental-webgl')
_defaultRendererType = 'webgl'
if not _defaultRendererType?
_defaultRendererType = 'canvas'
getRenderer = (scene, type) ->
# if there is a scene currently using this renderer, tell it to switch to
# the static renderer.
if _sceneUsingRenderer? and _sceneUsingRenderer isnt scene
_sceneUsingRenderer.setStaticRenderer()
# now scene takes over using this renderer
_sceneUsingRenderer = scene
if not _renderer[type]?
switch type
when 'webgl'
_renderer[type] = new THREE.WebGLRenderer
antialias : true
alpha : true
preserveDrawingBuffer : true
when 'canvas'
_renderer[type] = new THREE.CanvasRenderer
antialias : true
alpha : true
else
throw "bug -- unkown dynamic renderer type = #{type}"
_renderer[type].domElement.className = 'math-3d-dynamic-renderer'
_renderer[type]
class Math3dThreeJS
constructor: (opts) ->
@opts = defaults opts,
parent : required
width : undefined
height : undefined
renderer : _defaultRendererType # 'webgl' or 'canvas' (defaults to a best guess)
background : [1, 1, 1]
spin : false # if true, image spins by itself when mouse is over it.
aspect_ratio : [1, 1, 1] # a triple [x,y,z] of length three, which scales the x,y,z coordinates of everything by the given values.
stop_when_gone : undefined # if given, animation, etc., stops when this html element (not jquery!) is no longer in the DOM
fast_points : false # if true will use a faster point implementation, but they will be square and not work without WebGL
frame : undefined # frame options
light : undefined # light options
callback : undefined # opts.callback(this object, error)
@frameOpts = defaults @opts.frame,
color : undefined # defaults to the color-wise negation of the background
thickness : .4 # zero thickness disables the frame
labels : true # whether or not to enable labels on the axes
fontface : undefined # defaults to default fontface
@lightOpts = defaults @opts.light,
color : [1, 1, 1]
intensity : 0.75
math3d.loadThreejs (error) =>
if error
return @opts.callback? undefined, error
if @_init
return @opts.callback? @
@_init = true
# IMPORTANT: There is a major bug in three.js -- if you make the width below more than .5 of the window
# width, then after 8 3d renders, things get foobared in WebGL mode. This happens even with the simplest
# demo using the basic cube example from their site with R68. It even sometimes happens with this workaround, but
# at least retrying a few times can fix it.
@opts.width ?= document.documentElement.clientWidth/2
@opts.height ?= @opts.width*2/3
# setup aspect ratio stuff
aspectRatio = @aspectRatio = new THREE.Vector3 @opts.aspect_ratio...
@scaleSize = @aspectRatio.length()
@squareScale = (new THREE.Vector3 1, 1, 1).normalize()
@squareScale.multiplyScalar @scaleSize
@rescale = (vector) -> vector.multiply aspectRatio
# initialize the scene
@scene = new THREE.Scene()
# functions in render hooks will be run when before rendering
@renderHooks = []
@opts.callback? @
# client code should call this when done adding objects to the scene
finalize: ->
@computeDimensions()
@element = document.createElement 'span'
@element.className = 'math-3d-viewer'
@element.style.display = 'inline-block'
if @opts.renderer is 'canvas'
# possibly show the canvas warning.
@element.title = 'WARNING: using slow non-WebGL canvas renderer'
@setColor()
@setFrame()
@setCamera()
@setOrbitControls()
@setLight()
# borrow the renderer to do the initial render
owner = _sceneUsingRenderer
wasDynamic = owner? and owner.rendererType is 'dynamic'
@setDynamicRenderer() # used to do the first render
@setStaticRenderer()
# then give it back to whoever had it (if they were using it)
if wasDynamic
owner.setDynamicRenderer()
@setOnMouseOver()
@opts.parent.appendChild @element
setDynamicRenderer: ->
if @rendererType is 'dynamic'
# already have it
return
@renderer = getRenderer @, @opts.renderer
@rendererType = 'dynamic'
@renderer.setClearColor @background, 1
@renderer.setSize @opts.width, @opts.height
@controls.enabled = true
if @opts.spin
@animate render: false
@renderScene true
# remove the current renderer (if it exists)
removeElement @element.lastChild
# place renderer in correct place in the DOM
@element.appendChild @renderer.domElement
setStaticRenderer: ->
if @rendererType is 'static'
# already have it
return
@controls.enabled = false
if not @staticImage?
@staticImage = document.createElement 'img'
@staticImage.className = 'math-3d-static-renderer'
@staticImage.style.width = @opts.width
@staticImage.style.height = @opts.height
@staticImage.src = @dataUrl()
@rendererType = 'static'
# remove the current renderer (if it exists)
removeElement @element.lastChild
# place renderer in correct place in the DOM
@element.appendChild @staticImage
dataUrl: (opts) ->
opts = defaults opts,
type : 'png' # 'png' or 'jpeg' or 'webp' (the best)
quality : undefined # 1 is best quality; 0 is worst; only applies for jpeg or webp
@renderer.domElement.toDataURL "image/#{opts.type}", opts.quality
updateBoundingBox: (obj) ->
obj.geometry.computeBoundingBox()
if @_boundingBoxHelper?
@_boundingBoxHelper.copy obj.geometry.boundingBox.clone()
else
@_boundingBoxHelper = obj.geometry.boundingBox.clone()
@_boundingBoxHelper.min.add obj.position
@_boundingBoxHelper.max.add obj.position
if @boundingBox?
@boundingBox.geometry.boundingBox.union @_boundingBoxHelper
else
@boundingBox = new THREE.BoxHelper()
@boundingBox.geometry.boundingBox = @_boundingBoxHelper.clone()
@boundingBox.update @boundingBox
_finalizeObj: (obj, in_frame) ->
obj.scale.copy @aspectRatio
if in_frame
@updateBoundingBox obj
@rescale obj.position
@scene.add obj
return obj
addText: (opts) ->
opts = defaults opts,
text : required
loc : required
fontface : undefined # defaults to Text3d's default font
rotation : undefined # by default will always face the camera
size : 1 # should really be specified
texture : required
if not (opts.rotation? or @_text?)
@_text = []
up = new THREE.Vector3()
@renderHooks.push =>
up.set(0, 1, 0).applyQuaternion @camera.quaternion
for text in @_text
text.up.copy up
text.lookAt @camera.position
opts.depth = 0
text = @addText3d opts
if not opts.rotation?
@_text.push text
return text
addText3d: (opts) ->
opts = defaults opts,
text : required
loc : [0, 0, 0]
rotation : [0, 0, 0]
fontface : "helvetiker"
size : 1 # should really be specified
depth : 1 # ditto
texture : required
geometry = new THREE.TextGeometry opts.text,
size : opts.size
height : opts.depth
font : opts.fontface
material = new THREE.MeshBasicMaterial
opacity : opts.texture.opacity
transparent : opts.texture.opacity < 1
material.color.setRGB opts.texture.color...
text = new THREE.Mesh geometry, material
text.position.set opts.loc...
text.rotation.set opts.rotation...
# we shift the origin of the text to the center so
# that rotations will be about the center of the text
# rather than the lower left corner
geometry.computeBoundingBox()
shift = (new THREE.Matrix4()).makeTranslation(
geometry.boundingBox.center().negate().toArray()...)
geometry.applyMatrix shift
geometry.boundingBox.applyMatrix4 shift
@_finalizeObj text, false
text.scale.copy @squareScale
return text
addLine: (opts) ->
opts = defaults opts,
points : required
thickness : 1
arrow_head : false # TODO
texture : required
in_frame : true
geometry = new THREE.Geometry()
for point in opts.points
geometry.vertices.push new THREE.Vector3(point...)
line = new THREE.Line geometry, new THREE.LineBasicMaterial(linewidth:opts.thickness)
line.material.color.setRGB opts.texture.color...
return @_finalizeObj line, opts.in_frame
addSphere: (opts) ->
opts = defaults opts,
loc : required
radius : 5
texture : required
in_frame : true
segments : if @opts.renderer is 'webgl' then 64 else 24
_basic_material : false
geometry = new THREE.SphereGeometry opts.radius, opts.segments, opts.segments
if opts._basic_material
material = new THREE.MeshBasicMaterial
transparent : opts.texture.opacity < 1
else
material = new THREE.MeshPhongMaterial
transparent : opts.texture.opacity < 1
side : THREE.DoubleSide
material.ambient.setRGB opts.texture.ambient...
material.specular.setRGB opts.texture.specular...
material.color.setRGB opts.texture.color...
material.opacity = opts.texture.opacity
sphere = new THREE.Mesh geometry, material
sphere.position.set opts.loc...
@_finalizeObj sphere, opts.in_frame
return sphere
addTorus: (opts) ->
opts = defaults opts,
loc : required
inner_radius : .3
outer_radius : 1
texture : required
in_frame : true
segments : if @opts.renderer is 'webgl' then 64 else 24
tube_diameter = opts.outer_radius-opts.inner_radius
doughnut_radius = (opts.outer_radius+opts.inner_radius)/2
geometry = new THREE.TorusGeometry doughnut_radius, tube_diameter, opts.segments, opts.segments
material = new THREE.MeshPhongMaterial
transparent : opts.texture.opacity < 1
side : THREE.DoubleSide
material.ambient.setRGB opts.texture.ambient...
material.specular.setRGB opts.texture.specular...
material.color.setRGB opts.texture.color...
material.opacity = opts.texture.opacity
torus = new THREE.Mesh geometry, material
torus.position.set opts.loc...
return @_finalizeObj torus, opts.in_frame
_addCloudPoint: (opts) ->
if not @_cloud
@_cloud = {}
key = opts.size+opts.texture.color+opts.in_frame
if not (cloud = @_cloud[key])?
material = new THREE.PointCloudMaterial
size : opts.size*2
sizeAttenuation : false
material.color.setRGB opts.texture.color...
cloud = @_cloud[key] = new THREE.PointCloud(
new THREE.Geometry(), material)
cloud.scale.copy @aspectRatio
@scene.add cloud
cloud.geometry.vertices.push new THREE.Vector3(opts.loc...)
if opts.in_frame
@updateBoundingBox cloud
return cloud
_initPointHelper: ->
if not @_pointHelper?
@_pointHelperVec = new THREE.Vector3()
@_pointHelper = new THREE.Mesh()
@_pointHelper.geometry.vertices.push @_pointHelperVec
_addSpherePoint: (opts) ->
if not @_points?
@_points = []
@renderHooks.push =>
for point in @_points
scale = @camera.position.distanceTo point.position
point.scale.set scale, scale, scale
if opts.in_frame
@_initPointHelper()
@_pointHelperVec.set opts.loc...
@updateBoundingBox @_pointHelper
opts.radius = opts.size/1200
opts._basic_material = true
opts.in_frame = false
delete opts.size
point = @addSphere opts
@_points.push point
return point
addPoint: (opts) ->
opts = defaults opts,
loc : required
size : 5
texture : required
in_frame : true
if @opts.fast_points
return @_addCloudPoint opts
else
return @_addSpherePoint opts
addIndexFaceSet: (opts) ->
opts = defaults opts,
vertices : required
faces : required
texture : required
wireframe : undefined
in_frame : true
geometry = new THREE.Geometry()
for vector in opts.vertices
geometry.vertices.push new THREE.Vector3(vector...)
for vertex in opts.faces
a = vertex.shift()
b = vertex.shift()
while c = vertex.shift()
geometry.faces.push new THREE.Face3 a, b, c
b = c
geometry.mergeVertices()
geometry.computeFaceNormals()
if opts.wireframe
material = new THREE.MeshBasicMaterial
wireframe : true
wireframeLinewidth : opts.wireframe
transparent : opts.texture.opacity < 1
side : THREE.DoubleSide
else
material = new THREE.MeshPhongMaterial
transparent : opts.texture.opacity < 1
side : THREE.DoubleSide
material.ambient.setRGB opts.texture.ambient...
material.specular.setRGB opts.texture.specular...
material.color.setRGB opts.texture.color...
material.opacity = opts.texture.opacity
mesh = new THREE.Mesh geometry, material
return @_finalizeObj mesh, opts.in_frame
addGroup: (opts) ->
opts = defaults opts,
subobjs : required
ret = for obj in opts.subobjs
@addObj obj
return ret
addObj: (opts) ->
opts = defaults opts, {type: required}, true
type = opts.type
delete opts.type
switch type
when 'group'
return @addGroup opts
when 'text'
return @addText opts
when 'text3d'
return @addText3d opts
when 'index_face_set'
return @addIndexFaceSet opts
when 'line'
return @addLine opts
when 'point'
return @addPoint opts
when 'sphere'
return @addSphere opts
when 'torus'
return @addTorus opts
else
console.log "ERROR: bad object type #{opts.obj.type}"
computeDimensions: ->
dim = @rescale @boundingBox.geometry.boundingBox.size()
@maxDim = Math.max dim.x, dim.y, dim.z
@minDim = Math.min dim.x, dim.y, dim.z
@center = @rescale @boundingBox.geometry.boundingBox.center()
setColor: ->
# setup color stuff
@background = new THREE.Color @opts.background...
@element.style.background = @background.getStyle()
if @frameOpts.color?
@frameColor = new THREE.Color @frameOpts.color...
else
@frameColor = new THREE.Color(
1-@background.r, 1-@background.g, 1-@background.b)
# always call this after adding things to the scene to make sure track
# controls are sorted out, etc. The client should never have to worry
# about calling this
setFrame: ->
###
if Math.abs(x1 - x0) < eps
x1 += 1
x0 -= 1
if Math.abs(y1 - y0) < eps
y1 += 1
y0 -= 1
if Math.abs(z1 - z0) < eps
z1 += 1
z0 -= 1
###
if @frameOpts.thickness isnt 0 and not @_bounded
@_bounded = true
if not @boundingBox?
# no objects in the scene
return
# set the color and linewidth of the bounding box
@boundingBox.material.color = @frameColor
@boundingBox.material.linewidth = @frameOpts.thickness
# set the scale for the bounding box
@boundingBox.scale.copy @aspectRatio
# the update method of BoxHelper disables matrixAutoUpdate but
# we still need it for the aspect ratio to be taken into account
@boundingBox.matrixAutoUpdate = true
# add the bounding box to the scene
@scene.add @boundingBox
if @frameOpts.labels and not @_labeled
@_labeled = true
min = @boundingBox.geometry.boundingBox.min
max = @boundingBox.geometry.boundingBox.max
avg = @boundingBox.geometry.boundingBox.center()
offset = @maxDim*0.05
offsets = (new THREE.Vector3 offset, offset, offset).divide @aspectRatio
textSize = @minDim/@scaleSize/8
if textSize is 0
return
frameColor = [@frameColor.r, @frameColor.g, @frameColor.b]
addHashMark = (loc) =>
loc2 = new THREE.Vector3 loc...
if offsetDir[0] is '+'
loc2[offsetDir[1]] += offsets[offsetDir[1]]*0.75
else if offsetDir[0] is '-'
loc2[offsetDir[1]] -= offsets[offsetDir[1]]*0.75
loc2 = [loc2.x, loc2.y, loc2.z]
@addLine
points : [loc, loc2]
thickness : @frameOpts.thickness*5
in_frame : false
texture :
color : frameColor
opacity : 1
addLabel = (loc, text) =>
addHashMark loc
text = @addText
loc : loc
text : text
size : textSize
fontface : @frameOpts.fontface
texture :
color : frameColor
opacity : 1
# add a bit of extra offset based on the size of the text
textBox = text.geometry.boundingBox.size().multiply @squareScale
extraOffset = Math.max(textBox.x, textBox.y, textBox.z)/2
realOffset = offset + extraOffset
if offsetDir[0] is '+'
text.position[offsetDir[1]] += realOffset
else if offsetDir[0] is '-'
text.position[offsetDir[1]] -= realOffset
format = (num) ->
Number(num.toFixed 2).toString()
offsetDir = ['-','y']
addLabel [max.x, min.y, min.z], format(min.z)
addLabel [max.x, min.y, avg.z], "z = #{format avg.z}"
addLabel [max.x, min.y, max.z], format(max.z)
offsetDir = ['+','x']
addLabel [max.x, min.y, min.z], format(min.y)
addLabel [max.x, avg.y, min.z], "y = #{format avg.y}"
addLabel [max.x, max.y, min.z], format(max.y)
offsetDir = ['+','y']
addLabel [max.x, max.y, min.z], format(max.x)
addLabel [avg.x, max.y, min.z], "x = #{format avg.x}"
addLabel [min.x, max.y, min.z], format(min.x)
setLight: ->
ambientLight = new THREE.AmbientLight()
ambientLight.color.setRGB @lightOpts.color...
@scene.add ambientLight
cameraLight = new THREE.PointLight 0, @lightOpts.intensity
cameraLight.color.setRGB @lightOpts.color...
@scene.add cameraLight
@renderHooks.push =>
cameraLight.position.copy @camera.position
setCamera: ->
view_angle = 45
aspect = @opts.width/@opts.height
near = @minDim/4
far = @maxDim*16
@camera = new THREE.PerspectiveCamera view_angle, aspect, near, far
@camera.up = new THREE.Vector3 0, 0, 1
@camera.lookAt @center
@camera.position.set 1.5, 1.5, 0.75
@camera.position.multiplyScalar(@maxDim).add @center
@camera.updateProjectionMatrix()
@scene.add @camera
setOrbitControls: ->
# set up camera controls
@controls = new OrbitControls @camera, @element
@controls.target = @center
@controls.damping = 2
@controls.noKeys = true
#@controls.zoomSpeed = 0.6
if @opts.spin
if typeof @opts.spin is "boolean"
@controls.autoRotateSpeed = 2.0
else
@controls.autoRotateSpeed = @opts.spin
@controls.autoRotate = true
@controls.addEventListener 'change', => @renderScene()
# on mouseover, we switch the renderer out to use the dynamic renderer
setOnMouseOver: ->
@element.addEventListener 'mouseover', (=> @setDynamicRenderer()), false
@element.addEventListener 'mouseleave', (=> @setStaticRenderer()), false
animate: (opts = {}) ->
opts = defaults opts,
fps : undefined
stop : false
mouseover : undefined # ignored now
render : true
if @_animate_started and not opts.stop
return
@_animate_started = true
@_animate opts
_animate: (opts) ->
if @rendererType is 'static'
# will try again when we switch to dynamic renderer
@_animate_started = false
return
if @element.offsetWidth <= 0 and @element.offsetWidth <= 0
if @opts.stop_when_gone? and not document.contains(@opts.stop_when_gone)
@_animate_started = false
else if not document.contains(@element)
setTimeout (=> @_animate opts), 5000
else
setTimeout (=> @_animate opts), 1000
return
if opts.stop
@_stop_animating = true
# so next time around will start
return
if @_stop_animating
@_stop_animating = false
@_animate_started = false
return
@renderScene opts.render
delete opts.render
f = =>
requestAnimationFrame (=> @_animate opts)
if opts.fps? and opts.fps
setTimeout f, 1000/opts.fps
else
f()
renderScene: (force = false) ->
if @_rendering? and @_rendering
# already in the process of rendering
return
if @rendererType isnt 'dynamic'
# scene's are only rendered when they are dynamic
return
if not @camera?
return # nothing to do yet
@_rendering = true
if not force
position = @camera.position
if not @_lastPosition?
force = true
@_lastPosition = position.clone()
#@_renderDistance = @minDim*@minDim/10000
else if @_lastPosition.distanceToSquared(position) > 0.05
force = true
@_lastPosition.copy position
if force
@controls.update()
for hook in @renderHooks
hook()
@renderer.render @scene, @camera
@_rendering = false
@math3d = (opts) ->
opts = defaults opts,
scene : required # {opts:?, obj:?} or url from which to download (via ajax) a JSON string that parses to {opts:?,obj:?}
element : required # DOM element to attach to
timeout : 30000 # milleseconds for timing out fetchs
callback : undefined # callback(scene object, error)
# Render a 3-d scene
create_scene = (scene) ->
scene.opts ?= {}
scene.opts.parent = opts.element
scene.opts.callback = (sceneobj, error) ->
if not error
if scene.obj?
sceneobj.addObj scene.obj
sceneobj.finalize()
opts.callback? sceneobj, error
new Math3dThreeJS scene.opts
switch typeof opts.scene
when 'object'
create_scene opts.scene
when 'string'
xhr = new XMLHttpRequest()
xhr.timeout = opts.timeout
xhr.onload = ->
if @status is 200 # success
try
create_scene JSON.parse @responseText
catch error
opts.callback? error
else
opts.callback? "errno #{@status} when trying to download #{opts.scene}"
xhr.onerror = ->
opts.callback? "error when trying to download #{opts.scene}"
xhr.onabort = ->
opts.callback? "downloading #{opts.scene} aborted"
xhr.ontimeout = ->
opts.callback? "downloading #{opts.scene} timed out"
xhr.open 'get', opts.scene
xhr.send()
else
opts.callback? "bad scene type #{typeof opts.scene}"
for key, value of math3d
@math3d[key] = value
|
[
{
"context": "###\n *\n * jQuery truncateLines by Gary Hepting - https://github.com/ghepting/jquery-truncate-lin",
"end": 47,
"score": 0.9999009370803833,
"start": 35,
"tag": "NAME",
"value": "Gary Hepting"
},
{
"context": "runcateLines by Gary Hepting - https://github.com/ghepting/... | src/coffee/plugins/jquery-truncateLines.coffee | srinivasbellarichenna/groundwork | 1 | ###
*
* jQuery truncateLines by Gary Hepting - https://github.com/ghepting/jquery-truncate-lines
*
* Open source under the MIT License.
*
* Copyright © 2013 Gary Hepting. All rights reserved.
*
###
delayedAdjustTruncation = []
truncateIndex = 0
class TruncateLines
constructor: (el) ->
@el = el
@index = truncateIndex++
@text = $(@el).text()
$(@el).attr('data-text',@text)
@words = @text.trim().split(" ") # store words in array
@lines = parseInt($(@el).attr('data-truncate')) # store maximum number of lines
@truncate()
@adjustOnResize()
truncate: ->
@measure()
@setContent()
reset: ->
$(@el).text(@text)
.css('max-height', 'none')
.attr('data-truncated', 'false')
measure: ->
@reset() # reset element state
$(@el).html(".") # set element to have single line
@singleLineHeight = $(@el).outerHeight()
i = 1
while i++ < @lines # set element to have the maximum number of lines
$(@el).append("<br>.")
@maxLinesHeight = $(@el).outerHeight() # store the height of the element when it is at the max number of lines
empty: ->
$(@el).html("") # clear the element
setContent: ->
@reset() # reset element state
truncated = false # reset truncated state
@addWords(@words.length)
if @tooBig()
# binary build-up the string -- Thanks @BananaNeil :]
@addNumberWordsThatFit()
$(@el).css('max-height', @maxLinesHeight + 'px') # set the max height
$(@el).attr('data-truncated', true) # set element truncation state
addNumberWordsThatFit: ->
cant = @words.length
can = 0
mid = Math.floor(@words.length/2)
while can+1 != cant
@addWords(can + mid)
if @tooBig()
cant = can + mid
else
can = can + mid
mid = Math.floor(mid/2) || 1
@addWords(can)
$(@el).html( @trimTrailingPunctuation( $(@el).html() ) ) # trim trailing punctuation
addWords: (num) ->
$(@el).html(@words.slice(0,num).join(" "))
tooBig: ->
$(@el).outerHeight() > @maxLinesHeight
adjustOnResize: ->
$(window).on 'resize', =>
clearTimeout(delayedAdjustTruncation[@index])
delayedAdjustTruncation[@index] = setTimeout(=>
@truncate()
, 20)
trimTrailingPunctuation: (str) ->
str.replace(/(,$)|(\.$)|(\:$)|(\;$)|(\?$)|(\!$)/g, "")
(($) ->
truncateInitialized = false
truncatedLineElements = []
$.fn.truncateLines = ->
unless truncateInitialized
# add CSS for the ellipsis (just so there are no additional file dependencies)
$('head').append('<style type="text/css"> [data-truncated="true"] { overflow: hidden; } [data-truncated="true"]:after { content: "..."; position: absolute; } </style>')
@each ->
truncatedLineElements.push( new TruncateLines(@) )
) jQuery
$(window).load ->
$("[data-truncate]").truncateLines()
# $('[data-truncated="true"]').on 'mouseenter', ->
# $(this).html($(this).attr('data-text')).attr('data-truncated', 'false')
# stopScrolling($(this))
# $(this).animate(scrollTop: $(this)[0].scrollHeight, ($(this)[0].scrollHeight * 120))
# $('[data-truncated="true"]').on 'mouseleave', ->
# $(this).stop().animate(scrollTop: 0, ($(this)[0].scrollHeight * 5), ->
# $(this).truncateLines()
# )
# $('[data-truncated="true"]').on 'mousedown', ->
# stopScrolling($(this))
# $('[data-truncated="true"]').on 'mousewheel', ->
# stopScrolling($(this))
# stopScrolling = ($el) ->
# $el.stop().css('overflow','auto')
| 120484 | ###
*
* jQuery truncateLines by <NAME> - https://github.com/ghepting/jquery-truncate-lines
*
* Open source under the MIT License.
*
* Copyright © 2013 <NAME>. All rights reserved.
*
###
delayedAdjustTruncation = []
truncateIndex = 0
class TruncateLines
constructor: (el) ->
@el = el
@index = truncateIndex++
@text = $(@el).text()
$(@el).attr('data-text',@text)
@words = @text.trim().split(" ") # store words in array
@lines = parseInt($(@el).attr('data-truncate')) # store maximum number of lines
@truncate()
@adjustOnResize()
truncate: ->
@measure()
@setContent()
reset: ->
$(@el).text(@text)
.css('max-height', 'none')
.attr('data-truncated', 'false')
measure: ->
@reset() # reset element state
$(@el).html(".") # set element to have single line
@singleLineHeight = $(@el).outerHeight()
i = 1
while i++ < @lines # set element to have the maximum number of lines
$(@el).append("<br>.")
@maxLinesHeight = $(@el).outerHeight() # store the height of the element when it is at the max number of lines
empty: ->
$(@el).html("") # clear the element
setContent: ->
@reset() # reset element state
truncated = false # reset truncated state
@addWords(@words.length)
if @tooBig()
# binary build-up the string -- Thanks @BananaNeil :]
@addNumberWordsThatFit()
$(@el).css('max-height', @maxLinesHeight + 'px') # set the max height
$(@el).attr('data-truncated', true) # set element truncation state
addNumberWordsThatFit: ->
cant = @words.length
can = 0
mid = Math.floor(@words.length/2)
while can+1 != cant
@addWords(can + mid)
if @tooBig()
cant = can + mid
else
can = can + mid
mid = Math.floor(mid/2) || 1
@addWords(can)
$(@el).html( @trimTrailingPunctuation( $(@el).html() ) ) # trim trailing punctuation
addWords: (num) ->
$(@el).html(@words.slice(0,num).join(" "))
tooBig: ->
$(@el).outerHeight() > @maxLinesHeight
adjustOnResize: ->
$(window).on 'resize', =>
clearTimeout(delayedAdjustTruncation[@index])
delayedAdjustTruncation[@index] = setTimeout(=>
@truncate()
, 20)
trimTrailingPunctuation: (str) ->
str.replace(/(,$)|(\.$)|(\:$)|(\;$)|(\?$)|(\!$)/g, "")
(($) ->
truncateInitialized = false
truncatedLineElements = []
$.fn.truncateLines = ->
unless truncateInitialized
# add CSS for the ellipsis (just so there are no additional file dependencies)
$('head').append('<style type="text/css"> [data-truncated="true"] { overflow: hidden; } [data-truncated="true"]:after { content: "..."; position: absolute; } </style>')
@each ->
truncatedLineElements.push( new TruncateLines(@) )
) jQuery
$(window).load ->
$("[data-truncate]").truncateLines()
# $('[data-truncated="true"]').on 'mouseenter', ->
# $(this).html($(this).attr('data-text')).attr('data-truncated', 'false')
# stopScrolling($(this))
# $(this).animate(scrollTop: $(this)[0].scrollHeight, ($(this)[0].scrollHeight * 120))
# $('[data-truncated="true"]').on 'mouseleave', ->
# $(this).stop().animate(scrollTop: 0, ($(this)[0].scrollHeight * 5), ->
# $(this).truncateLines()
# )
# $('[data-truncated="true"]').on 'mousedown', ->
# stopScrolling($(this))
# $('[data-truncated="true"]').on 'mousewheel', ->
# stopScrolling($(this))
# stopScrolling = ($el) ->
# $el.stop().css('overflow','auto')
| true | ###
*
* jQuery truncateLines by PI:NAME:<NAME>END_PI - https://github.com/ghepting/jquery-truncate-lines
*
* Open source under the MIT License.
*
* Copyright © 2013 PI:NAME:<NAME>END_PI. All rights reserved.
*
###
delayedAdjustTruncation = []
truncateIndex = 0
class TruncateLines
constructor: (el) ->
@el = el
@index = truncateIndex++
@text = $(@el).text()
$(@el).attr('data-text',@text)
@words = @text.trim().split(" ") # store words in array
@lines = parseInt($(@el).attr('data-truncate')) # store maximum number of lines
@truncate()
@adjustOnResize()
truncate: ->
@measure()
@setContent()
reset: ->
$(@el).text(@text)
.css('max-height', 'none')
.attr('data-truncated', 'false')
measure: ->
@reset() # reset element state
$(@el).html(".") # set element to have single line
@singleLineHeight = $(@el).outerHeight()
i = 1
while i++ < @lines # set element to have the maximum number of lines
$(@el).append("<br>.")
@maxLinesHeight = $(@el).outerHeight() # store the height of the element when it is at the max number of lines
empty: ->
$(@el).html("") # clear the element
setContent: ->
@reset() # reset element state
truncated = false # reset truncated state
@addWords(@words.length)
if @tooBig()
# binary build-up the string -- Thanks @BananaNeil :]
@addNumberWordsThatFit()
$(@el).css('max-height', @maxLinesHeight + 'px') # set the max height
$(@el).attr('data-truncated', true) # set element truncation state
addNumberWordsThatFit: ->
cant = @words.length
can = 0
mid = Math.floor(@words.length/2)
while can+1 != cant
@addWords(can + mid)
if @tooBig()
cant = can + mid
else
can = can + mid
mid = Math.floor(mid/2) || 1
@addWords(can)
$(@el).html( @trimTrailingPunctuation( $(@el).html() ) ) # trim trailing punctuation
addWords: (num) ->
$(@el).html(@words.slice(0,num).join(" "))
tooBig: ->
$(@el).outerHeight() > @maxLinesHeight
adjustOnResize: ->
$(window).on 'resize', =>
clearTimeout(delayedAdjustTruncation[@index])
delayedAdjustTruncation[@index] = setTimeout(=>
@truncate()
, 20)
trimTrailingPunctuation: (str) ->
str.replace(/(,$)|(\.$)|(\:$)|(\;$)|(\?$)|(\!$)/g, "")
(($) ->
truncateInitialized = false
truncatedLineElements = []
$.fn.truncateLines = ->
unless truncateInitialized
# add CSS for the ellipsis (just so there are no additional file dependencies)
$('head').append('<style type="text/css"> [data-truncated="true"] { overflow: hidden; } [data-truncated="true"]:after { content: "..."; position: absolute; } </style>')
@each ->
truncatedLineElements.push( new TruncateLines(@) )
) jQuery
$(window).load ->
$("[data-truncate]").truncateLines()
# $('[data-truncated="true"]').on 'mouseenter', ->
# $(this).html($(this).attr('data-text')).attr('data-truncated', 'false')
# stopScrolling($(this))
# $(this).animate(scrollTop: $(this)[0].scrollHeight, ($(this)[0].scrollHeight * 120))
# $('[data-truncated="true"]').on 'mouseleave', ->
# $(this).stop().animate(scrollTop: 0, ($(this)[0].scrollHeight * 5), ->
# $(this).truncateLines()
# )
# $('[data-truncated="true"]').on 'mousedown', ->
# stopScrolling($(this))
# $('[data-truncated="true"]').on 'mousewheel', ->
# stopScrolling($(this))
# stopScrolling = ($el) ->
# $el.stop().css('overflow','auto')
|
[
{
"context": "# event.js\n# Copyright (c) 2015 David Tai\n#\n#Permission to use, copy, modify, and/or distri",
"end": 41,
"score": 0.9998132586479187,
"start": 32,
"tag": "NAME",
"value": "David Tai"
}
] | src/event.coffee | davidtai/domino | 0 | # event.js
# Copyright (c) 2015 David Tai
#
#Permission to use, copy, modify, and/or distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY
# SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER
# RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF
# CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN
# CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
#
module.exports = class EventEmitter
events: null
constructor: ->
@events = {}
# add a namespaced listener
on: (name, listener, namespace = '') ->
# add listener to event map, create missing hashes for namespace and event name
if !(namespacedEvents = @events[namespace])?
namespacedEvents = @events[namespace] = {}
if !(namedEvents = namespacedEvents[name])?
namedEvents = namespacedEvents[name] = []
namedEvents.push listener
# add a namespaced listener that only executes once
once: (name, listener, namespace) ->
# modify the listener function to off itself
l2 = () =>
listener.apply @, arguments
@off.call @, name, l2, namespace
@on name, l2, namespace
trigger: (name, data, namespace = '') ->
# execute event in namespace, abort if namespae or event name does not exist
namespacedEvents = @events[namespace]
if !namespacedEvents?
return
namedEvents = namespacedEvents[name]
if !namedEvents?
return
for listener in namedEvents
listener.call(@, data)
# removed a namespaced listener
off: (name, listener, namespace = '') ->
# abort off if namespace or event name does not exist
namespacedEvents = @events[namespace]
if !namespacedEvents?
return
namedEvents = namespacedEvents[name]
if !namedEvents?
return
# return the listener removed if true, otherwise return null
for i, v of namedEvents
if listener == v
namedEvents.splice i, 1
return v
return null
# various other ways of removing listeners
offAll: () ->
#delete everything
for k, v of @events
delete @events[k]
offNamespace: (namespace) ->
# delete a whole namespace
delete @events[namespace]
offEvents: (name) ->
# loop over namespaces and delete everything associated with event
for namespacedEvents of @events
delete namespacedEvents[name]
offNamespacedEvents: (name, namespace = '') ->
# delete everything associated with event in a specific namespace
if !(namespacedEvents = @events[namespace])?
namespacedEvents = @events[namespace] = {}
delete namespacedEvents[name]
# diagnostic
isEmpty: ()->
for k, v of @events
return false
return true
| 193176 | # event.js
# Copyright (c) 2015 <NAME>
#
#Permission to use, copy, modify, and/or distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY
# SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER
# RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF
# CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN
# CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
#
module.exports = class EventEmitter
events: null
constructor: ->
@events = {}
# add a namespaced listener
on: (name, listener, namespace = '') ->
# add listener to event map, create missing hashes for namespace and event name
if !(namespacedEvents = @events[namespace])?
namespacedEvents = @events[namespace] = {}
if !(namedEvents = namespacedEvents[name])?
namedEvents = namespacedEvents[name] = []
namedEvents.push listener
# add a namespaced listener that only executes once
once: (name, listener, namespace) ->
# modify the listener function to off itself
l2 = () =>
listener.apply @, arguments
@off.call @, name, l2, namespace
@on name, l2, namespace
trigger: (name, data, namespace = '') ->
# execute event in namespace, abort if namespae or event name does not exist
namespacedEvents = @events[namespace]
if !namespacedEvents?
return
namedEvents = namespacedEvents[name]
if !namedEvents?
return
for listener in namedEvents
listener.call(@, data)
# removed a namespaced listener
off: (name, listener, namespace = '') ->
# abort off if namespace or event name does not exist
namespacedEvents = @events[namespace]
if !namespacedEvents?
return
namedEvents = namespacedEvents[name]
if !namedEvents?
return
# return the listener removed if true, otherwise return null
for i, v of namedEvents
if listener == v
namedEvents.splice i, 1
return v
return null
# various other ways of removing listeners
offAll: () ->
#delete everything
for k, v of @events
delete @events[k]
offNamespace: (namespace) ->
# delete a whole namespace
delete @events[namespace]
offEvents: (name) ->
# loop over namespaces and delete everything associated with event
for namespacedEvents of @events
delete namespacedEvents[name]
offNamespacedEvents: (name, namespace = '') ->
# delete everything associated with event in a specific namespace
if !(namespacedEvents = @events[namespace])?
namespacedEvents = @events[namespace] = {}
delete namespacedEvents[name]
# diagnostic
isEmpty: ()->
for k, v of @events
return false
return true
| true | # event.js
# Copyright (c) 2015 PI:NAME:<NAME>END_PI
#
#Permission to use, copy, modify, and/or distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY
# SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER
# RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF
# CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN
# CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
#
module.exports = class EventEmitter
events: null
constructor: ->
@events = {}
# add a namespaced listener
on: (name, listener, namespace = '') ->
# add listener to event map, create missing hashes for namespace and event name
if !(namespacedEvents = @events[namespace])?
namespacedEvents = @events[namespace] = {}
if !(namedEvents = namespacedEvents[name])?
namedEvents = namespacedEvents[name] = []
namedEvents.push listener
# add a namespaced listener that only executes once
once: (name, listener, namespace) ->
# modify the listener function to off itself
l2 = () =>
listener.apply @, arguments
@off.call @, name, l2, namespace
@on name, l2, namespace
trigger: (name, data, namespace = '') ->
# execute event in namespace, abort if namespae or event name does not exist
namespacedEvents = @events[namespace]
if !namespacedEvents?
return
namedEvents = namespacedEvents[name]
if !namedEvents?
return
for listener in namedEvents
listener.call(@, data)
# removed a namespaced listener
off: (name, listener, namespace = '') ->
# abort off if namespace or event name does not exist
namespacedEvents = @events[namespace]
if !namespacedEvents?
return
namedEvents = namespacedEvents[name]
if !namedEvents?
return
# return the listener removed if true, otherwise return null
for i, v of namedEvents
if listener == v
namedEvents.splice i, 1
return v
return null
# various other ways of removing listeners
offAll: () ->
#delete everything
for k, v of @events
delete @events[k]
offNamespace: (namespace) ->
# delete a whole namespace
delete @events[namespace]
offEvents: (name) ->
# loop over namespaces and delete everything associated with event
for namespacedEvents of @events
delete namespacedEvents[name]
offNamespacedEvents: (name, namespace = '') ->
# delete everything associated with event in a specific namespace
if !(namespacedEvents = @events[namespace])?
namespacedEvents = @events[namespace] = {}
delete namespacedEvents[name]
# diagnostic
isEmpty: ()->
for k, v of @events
return false
return true
|
[
{
"context": "# Copyright © 2014–6 Brad Ackerman.\n#\n# Licensed under the Apache License, Version 2",
"end": 34,
"score": 0.9998317956924438,
"start": 21,
"tag": "NAME",
"value": "Brad Ackerman"
}
] | spec/stationPickerDirective.coffee | backerman/eveindy | 2 | # Copyright © 2014–6 Brad Ackerman.
#
# Licensed under the Apache License, Version 2.0 the "License";
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
angular.module 'eveindy'
.config ($sceProvider) ->
$sceProvider.enabled false
describe 'Directive: stationPicker', () ->
compile = null
timeout = null
element = null
searchedTerm = null
selectedLocation = null
scope = null
# Copying this one from uib tests.
changeInputValueTo = null
beforeEach () ->
module 'eveindy'
module 'controller_test'
module 'directives_test'
inject ($compile, $rootScope, $sniffer, $timeout) ->
compile = $compile
timeout = $timeout
scope = $rootScope.$new()
scope.ctrl =
getStations: (searchTerm) ->
searchedTerm = searchTerm
results = fixture.load('stationPicker-autocomplete.json')
return JSON.parse JSON.stringify results
location: null
locationSelected: (station) ->
selectedLocation = station
changeInputValueTo = (element, value) ->
inputEl = element.find 'input'
inputEl.val value
inputEl.trigger if $sniffer.hasEvent 'input' then 'input' else 'change'
scope.$digest()
timeout.flush()
element = compile(
"""<div>
<station-picker search="ctrl.getStations(prefix)"
location="ctrl.location"
selected="ctrl.locationSelected(station)">
</station-picker></div>
""") scope
scope.$digest()
it 'should pass the search term to the parent', () ->
searchedTerm = null
changeInputValueTo element, 'xyz'
expect(searchedTerm).toEqual 'xyz'
it 'should get an autocomplete list', () ->
dropdown = element.find 'ul.dropdown-menu'
expect(dropdown.text().trim()).toBe("")
changeInputValueTo element, 'poi'
dropdown = element.find 'ul.dropdown-menu'
matches = dropdown.find('li')
expect(matches.length).toBe(10)
| 177768 | # Copyright © 2014–6 <NAME>.
#
# Licensed under the Apache License, Version 2.0 the "License";
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
angular.module 'eveindy'
.config ($sceProvider) ->
$sceProvider.enabled false
describe 'Directive: stationPicker', () ->
compile = null
timeout = null
element = null
searchedTerm = null
selectedLocation = null
scope = null
# Copying this one from uib tests.
changeInputValueTo = null
beforeEach () ->
module 'eveindy'
module 'controller_test'
module 'directives_test'
inject ($compile, $rootScope, $sniffer, $timeout) ->
compile = $compile
timeout = $timeout
scope = $rootScope.$new()
scope.ctrl =
getStations: (searchTerm) ->
searchedTerm = searchTerm
results = fixture.load('stationPicker-autocomplete.json')
return JSON.parse JSON.stringify results
location: null
locationSelected: (station) ->
selectedLocation = station
changeInputValueTo = (element, value) ->
inputEl = element.find 'input'
inputEl.val value
inputEl.trigger if $sniffer.hasEvent 'input' then 'input' else 'change'
scope.$digest()
timeout.flush()
element = compile(
"""<div>
<station-picker search="ctrl.getStations(prefix)"
location="ctrl.location"
selected="ctrl.locationSelected(station)">
</station-picker></div>
""") scope
scope.$digest()
it 'should pass the search term to the parent', () ->
searchedTerm = null
changeInputValueTo element, 'xyz'
expect(searchedTerm).toEqual 'xyz'
it 'should get an autocomplete list', () ->
dropdown = element.find 'ul.dropdown-menu'
expect(dropdown.text().trim()).toBe("")
changeInputValueTo element, 'poi'
dropdown = element.find 'ul.dropdown-menu'
matches = dropdown.find('li')
expect(matches.length).toBe(10)
| true | # Copyright © 2014–6 PI:NAME:<NAME>END_PI.
#
# Licensed under the Apache License, Version 2.0 the "License";
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
angular.module 'eveindy'
.config ($sceProvider) ->
$sceProvider.enabled false
describe 'Directive: stationPicker', () ->
compile = null
timeout = null
element = null
searchedTerm = null
selectedLocation = null
scope = null
# Copying this one from uib tests.
changeInputValueTo = null
beforeEach () ->
module 'eveindy'
module 'controller_test'
module 'directives_test'
inject ($compile, $rootScope, $sniffer, $timeout) ->
compile = $compile
timeout = $timeout
scope = $rootScope.$new()
scope.ctrl =
getStations: (searchTerm) ->
searchedTerm = searchTerm
results = fixture.load('stationPicker-autocomplete.json')
return JSON.parse JSON.stringify results
location: null
locationSelected: (station) ->
selectedLocation = station
changeInputValueTo = (element, value) ->
inputEl = element.find 'input'
inputEl.val value
inputEl.trigger if $sniffer.hasEvent 'input' then 'input' else 'change'
scope.$digest()
timeout.flush()
element = compile(
"""<div>
<station-picker search="ctrl.getStations(prefix)"
location="ctrl.location"
selected="ctrl.locationSelected(station)">
</station-picker></div>
""") scope
scope.$digest()
it 'should pass the search term to the parent', () ->
searchedTerm = null
changeInputValueTo element, 'xyz'
expect(searchedTerm).toEqual 'xyz'
it 'should get an autocomplete list', () ->
dropdown = element.find 'ul.dropdown-menu'
expect(dropdown.text().trim()).toBe("")
changeInputValueTo element, 'poi'
dropdown = element.find 'ul.dropdown-menu'
matches = dropdown.find('li')
expect(matches.length).toBe(10)
|
[
{
"context": "\\\\b\n '''\n },\n {\n 'match': '''(?xi)\n (Achilles|\n Adonis|\n Adriana|\n Aegeon|\n ",
"end": 876,
"score": 0.9806104302406311,
"start": 868,
"tag": "NAME",
"value": "Achilles"
},
{
"context": ",\n {\n 'match': '''(?xi)\n (Ach... | grammars/shakespeare.cson | zmbc/language-shakespeare | 1 | 'name': 'Shakespeare Programming Language'
'scopeName': 'source.shakespeare'
'fileTypes': [
'spl'
]
'patterns': [
{
'name': 'comment.line.shakespeare',
'match': '''(?xi)
\\A([^\\.]+\\.)
'''
},
{
'match': '''(?xi)
\\b
((scene|act)
(?=\\s[MCDXLVI])\\sM{0,4}(CM|CD|D?C{0,3})(XC|XL|L?X{0,3})(IX|IV|V?I{0,3}))
:\\s([^\\.]+\\.)
''',
'captures': {
'1': {'name': 'entity.name.function.shakespeare'},
'6': {'name': 'comment.line.shakespeare'}
}
},
{
'name': 'entity.name.function.shakespeare',
'match': '''(?xi)
\\b
((scene|act)
(?=\\s[MCDXLVI])\\sM{0,4}(CM|CD|D?C{0,3})(XC|XL|L?X{0,3})(IX|IV|V?I{0,3}))
\\b
'''
},
{
'name': 'keyword.control.shakespeare',
'match': '''(?xi)
\\bIf\\s+(so|not)\\b
'''
},
{
'match': '''(?xi)
(Achilles|
Adonis|
Adriana|
Aegeon|
Aemilia|
Agamemnon|
Agrippa|
Ajax|
Alonso|
Andromache|
Angelo|
Antiochus|
Antonio|
Arthur|
Autolycus|
Balthazar|
Banquo|
Beatrice|
Benedick|
Benvolio|
Bianca|
Brabantio|
Brutus|
Capulet|
Cassandra|
Cassius|
Christopher\\s+Sly|
Cicero|
Claudio|
Claudius|
Cleopatra|
Cordelia|
Cornelius|
Cressida|
Cymberline|
Demetrius|
Desdemona|
Dionyza|
Doctor\\s+Caius|
Dogberry|
Don\\s+John|
Don\\s+Pedro|
Donalbain|
Dorcas|
Duncan|
Egeus|
Emilia|
Escalus|
Falstaff|
Fenton|
Ferdinand|
Ford|
Fortinbras|
Francisca|
Friar\\s+John|
Friar\\s+Laurence|
Gertrude|
Goneril|
Hamlet|
Hecate|
Hector|
Helen|
Helena|
Hermia|
Hermonie|
Hippolyta|
Horatio|
Imogen|
Isabella|
John\\s+of\\s+Gaunt|
John\\s+of\\s+Lancaster|
Julia|
Juliet|
Julius\\s+Caesar|
King\\s+Henry|
King\\s+John|
King\\s+Lear|
King\\s+Richard|
Lady\\s+Capulet|
Lady\\s+Macbeth|
Lady\\s+Macduff|
Lady\\s+Montague|
Lennox|
Leonato|
Luciana|
Lucio|
Lychorida|
Lysander|
Macbeth|
Macduff|
Malcolm|
Mariana|
Mark\\s+Antony|
Mercutio|
Miranda|
Mistress\\s+Ford|
Mistress\\s+Overdone|
Mistress\\s+Page|
Montague|
Mopsa|
Oberon|
Octavia|
Octavius\\s+Caesar|
Olivia|
Ophelia|
Orlando|
Orsino|
Othello|
Page|
Pantino|
Paris|
Pericles|
Pinch|
Polonius|
Pompeius|
Portia|
Priam|
Prince\\s+Henry|
Prospero|
Proteus|
Publius|
Puck|
Queen\\s+Elinor|
Regan|
Robin|
Romeo|
Rosalind|
Sebastian|
Shallow|
Shylock|
Slender|
Solinus|
Stephano|
Thaisa|
The\\s+Abbot\\s+of\\s+Westminster|
The\\s+Apothecary|
The\\s+Archbishop\\s+of\\s+Canterbury|
The\\s+Duke\\s+of\\s+Milan|
The\\s+Duke\\s+of\\s+Venice|
The\\s+Ghost|
Theseus|
Thurio|
Timon|
Titania|
Titus|
Troilus|
Tybalt|
Ulysses|
Valentine|
Venus|
Vincentio|
Viola)
\\,([^\\.]+\\.)
''',
'captures': {
'1': {'name': 'variable.other.shakespeare'},
'2': {'name': 'comment.line.shakespeare'}
}
},
{
'name': 'constant.language.shakespeare',
'match': '''(?xi)
\\b
(
Hell|
Microsoft|
bastard|
beggar|
blister|
codpiece|
coward|
curse|
death|
devil|
draught|
famine|
flirt-gill|
goat|
hate|
hog|
hound|
leech|
lie|
pig|
plague|
starvation|
toad|
war|
wolf|
animal|
aunt|
brother|
cat|
chihuahua|
cousin|
cow|
daughter|
door|
face|
father|
fellow|
granddaughter|
grandfather|
grandmother|
grandson|
hair|
hamster|
horse|
lamp|
lantern|
mistletoe|
moon|
morning|
mother|
nephew|
niece|
nose|
purse|
road|
roman|
sister|
sky|
son|
squirrel|
stone\\s+wall|
thing|
town|
tree|
uncle|
wind|
Heaven|
King|
Lord|
angel|
flower|
happiness|
joy|
plum|
summer\\'s\\s+day|
hero|
rose|
kingdom|
pony|
nothing|
zero
)
\\b
'''
},
{
'name': 'variable.other.shakespeare',
'match': '''(?xi)
\\b
(
Achilles|
Adonis|
Adriana|
Aegeon|
Aemilia|
Agamemnon|
Agrippa|
Ajax|
Alonso|
Andromache|
Angelo|
Antiochus|
Antonio|
Arthur|
Autolycus|
Balthazar|
Banquo|
Beatrice|
Benedick|
Benvolio|
Bianca|
Brabantio|
Brutus|
Capulet|
Cassandra|
Cassius|
Christopher\\s+Sly|
Cicero|
Claudio|
Claudius|
Cleopatra|
Cordelia|
Cornelius|
Cressida|
Cymberline|
Demetrius|
Desdemona|
Dionyza|
Doctor\\s+Caius|
Dogberry|
Don\\s+John|
Don\\s+Pedro|
Donalbain|
Dorcas|
Duncan|
Egeus|
Emilia|
Escalus|
Falstaff|
Fenton|
Ferdinand|
Ford|
Fortinbras|
Francisca|
Friar\\s+John|
Friar\\s+Laurence|
Gertrude|
Goneril|
Hamlet|
Hecate|
Hector|
Helen|
Helena|
Hermia|
Hermonie|
Hippolyta|
Horatio|
Imogen|
Isabella|
John\\s+of\\s+Gaunt|
John\\s+of\\s+Lancaster|
Julia|
Juliet|
Julius\\s+Caesar|
King\\s+Henry|
King\\s+John|
King\\s+Lear|
King\\s+Richard|
Lady\\s+Capulet|
Lady\\s+Macbeth|
Lady\\s+Macduff|
Lady\\s+Montague|
Lennox|
Leonato|
Luciana|
Lucio|
Lychorida|
Lysander|
Macbeth|
Macduff|
Malcolm|
Mariana|
Mark\\s+Antony|
Mercutio|
Miranda|
Mistress\\s+Ford|
Mistress\\s+Overdone|
Mistress\\s+Page|
Montague|
Mopsa|
Oberon|
Octavia|
Octavius\\s+Caesar|
Olivia|
Ophelia|
Orlando|
Orsino|
Othello|
Page|
Pantino|
Paris|
Pericles|
Pinch|
Polonius|
Pompeius|
Portia|
Priam|
Prince\\s+Henry|
Prospero|
Proteus|
Publius|
Puck|
Queen\\s+Elinor|
Regan|
Robin|
Romeo|
Rosalind|
Sebastian|
Shallow|
Shylock|
Slender|
Solinus|
Stephano|
Thaisa|
The\\s+Abbot\\s+of\\s+Westminster|
The\\s+Apothecary|
The\\s+Archbishop\\s+of\\s+Canterbury|
The\\s+Duke\\s+of\\s+Milan|
The\\s+Duke\\s+of\\s+Venice|
The\\s+Ghost|
Theseus|
Thurio|
Timon|
Titania|
Titus|
Troilus|
Tybalt|
Ulysses|
Valentine|
Venus|
Vincentio|
Viola|
I|
me|
myself|
thee|
thou|
you|
thyself|
yourself
)
\\b
'''
},
{
'name': 'keyword.operator.shakespeare',
'match': '''(?xi)
\\b
(
art|
am|
are|
be|
is
)
\\b
'''
},
{
'name': 'constant.numeric.shakespeare',
'match': '''(?xi)
\\b
(
bad|
cowardly|
cursed|
damned|
dirty|
disgusting|
distasteful|
dusty|
evil|
fat|
fat-kidneyed|
fatherless|
foul|
hairy|
half-witted|
horrible|
horrid|
infected|
lying|
miserable|
misused|
oozing|
rotten|
rotten|
smelly|
snotty|
sorry|
stinking|
stuffed|
stupid|
vile|
villainous|
worried|
big|
black|
blue|
bluest|
bottomless|
furry|
green|
hard|
huge|
large|
little|
normal|
old|
purple|
red|
rural|
small|
tiny|
white|
yellow|
amazing|
beautiful|
blossoming|
bold|
brave|
charming|
clearest|
cunning|
cute|
delicious|
embroidered|
fair|
fine|
gentle|
golden|
good|
handsome|
happy|
healthy|
honest|
lovely|
loving|
mighty|
noble|
peaceful|
pretty|
prompt|
proud|
reddest|
rich|
smooth|
sunny|
sweet|
sweetest|
trustworthy|
warm
)
\\b
'''
},
{
'name': 'support.function.shakespeare',
'match': '''(?xi)
\\b
(
the\\s+cube\\s+of|
the\\s+factorial\\s+of|
the\\s+square\\s+of|
the\\s+square\\s+root\\s+of|
twice|
the\\s+difference\\s+between|
the\\s+product\\s+of|
the\\s+quotient\\s+between|
the\\s+remainder\\s+of\\s+the\\s+quotient\\s+between|
the\\s+sum\\s+of
)
\\b
'''
},
{
'name': 'keyword.other.shakespeare',
'match': '''(?xi)
\\b
(
enter|
exit|
exeunt|
and
)
\\b
'''
},
{
'name': 'support.function.shakespeare',
'match': '''(?xi)
(speak|open|listen\\s+to)\\s+
(thine|thy|your)\\s+
(heart|mind)
'''
},
{
'name': 'support.function.shakespeare',
'match': '''(?xi)
(speak|open|listen\\s+to)\\s+
(thine|thy|your)\\s+
(heart|mind)
'''
},
{
'name': 'support.function.shakespeare',
'match': '''(?xi)
remember
'''
},
{
'match': '''(?xi)
(recall)\\s+
([^!\\.]*[!\\.])
''',
'captures': {
'1': {'name': 'support.function.shakespeare'},
'2': {'name': 'comment.line.shakespeare'}
}
},
{
'name': 'keyword.control.shakespeare',
'match': '''(?xi)
(we\\s+shall|
we\\s+must|
let\\s+us)\\s+
(proceed\\s+to|
return\\s+to)
'''
}
]
| 208892 | 'name': 'Shakespeare Programming Language'
'scopeName': 'source.shakespeare'
'fileTypes': [
'spl'
]
'patterns': [
{
'name': 'comment.line.shakespeare',
'match': '''(?xi)
\\A([^\\.]+\\.)
'''
},
{
'match': '''(?xi)
\\b
((scene|act)
(?=\\s[MCDXLVI])\\sM{0,4}(CM|CD|D?C{0,3})(XC|XL|L?X{0,3})(IX|IV|V?I{0,3}))
:\\s([^\\.]+\\.)
''',
'captures': {
'1': {'name': 'entity.name.function.shakespeare'},
'6': {'name': 'comment.line.shakespeare'}
}
},
{
'name': 'entity.name.function.shakespeare',
'match': '''(?xi)
\\b
((scene|act)
(?=\\s[MCDXLVI])\\sM{0,4}(CM|CD|D?C{0,3})(XC|XL|L?X{0,3})(IX|IV|V?I{0,3}))
\\b
'''
},
{
'name': 'keyword.control.shakespeare',
'match': '''(?xi)
\\bIf\\s+(so|not)\\b
'''
},
{
'match': '''(?xi)
(<NAME>|
<NAME>|
Ad<NAME>ana|
Aegeon|
Aemilia|
Agamemnon|
Agrippa|
Ajax|
Alonso|
Andromache|
Angelo|
Antiochus|
Antonio|
Arthur|
Autolycus|
Balthazar|
Banquo|
<NAME>|
<NAME>|
B<NAME>io|
Bianca|
Brabantio|
Brutus|
Capulet|
<NAME>|
<NAME>|
<NAME>\\s+<NAME>|
<NAME>|
<NAME>|
<NAME>|
<NAME>opatra|
Cordelia|
Cornelius|
Cressida|
Cymberline|
Demetrius|
Desdemona|
Dionyza|
Doctor\\s+Caius|
Dogberry|
Don\\s+John|
Don\\s+Pedro|
Donalbain|
Dorcas|
Duncan|
Egeus|
Emilia|
Escalus|
Falstaff|
Fenton|
Ferdinand|
Ford|
Fortinbras|
Francisca|
Friar\\s+John|
Friar\\s+Laurence|
Gertrude|
Goneril|
Hamlet|
Hecate|
Hector|
<NAME>en|
<NAME>ena|
Hermia|
Hermonie|
Hippolyta|
Horatio|
Imogen|
Isabella|
John\\s+of\\s+Gaunt|
John\\s+of\\s+Lancaster|
<NAME>|
Juliet|
Julius\\s+Caesar|
King\\s+Henry|
King\\s+John|
King\\s+Lear|
King\\s+Richard|
Lady\\s+Capulet|
Lady\\s+Macbeth|
Lady\\s+Macduff|
Lady\\s+Montague|
Lennox|
Leonato|
Luciana|
Lucio|
Lychorida|
Lysander|
Macbeth|
Macduff|
Malcolm|
Mariana|
Mark\\s+Antony|
Mercutio|
Miranda|
Mistress\\s+Ford|
Mistress\\s+Overdone|
Mistress\\s+Page|
Montague|
Mopsa|
Oberon|
Octavia|
Octavius\\s+Caesar|
Olivia|
Ophelia|
Orlando|
Orsino|
Othello|
Page|
Pantino|
Paris|
Pericles|
Pinch|
Polonius|
Pompeius|
Portia|
Priam|
Prince\\s+Henry|
Prospero|
Proteus|
Publius|
Puck|
Queen\\s+Elinor|
Regan|
<NAME>|
Romeo|
Rosalind|
<NAME>|
Shallow|
Shylock|
Slender|
Solinus|
Stephano|
Thaisa|
The\\s+Abbot\\s+of\\s+Westminster|
The\\s+Apothecary|
The\\s+Archbishop\\s+of\\s+Canterbury|
The\\s+Duke\\s+of\\s+Milan|
The\\s+Duke\\s+of\\s+Venice|
The\\s+Ghost|
Theseus|
Thurio|
Timon|
Titania|
Titus|
Troilus|
Tybalt|
Ulysses|
Valentine|
Venus|
Vincentio|
Viola)
\\,([^\\.]+\\.)
''',
'captures': {
'1': {'name': 'variable.other.shakespeare'},
'2': {'name': 'comment.line.shakespeare'}
}
},
{
'name': 'constant.language.shakespeare',
'match': '''(?xi)
\\b
(
Hell|
Microsoft|
bastard|
beggar|
blister|
codpiece|
coward|
curse|
death|
devil|
draught|
famine|
flirt-gill|
goat|
hate|
hog|
hound|
leech|
lie|
pig|
plague|
starvation|
toad|
war|
wolf|
animal|
aunt|
brother|
cat|
chihuahua|
cousin|
cow|
daughter|
door|
face|
father|
fellow|
granddaughter|
grandfather|
grandmother|
grandson|
hair|
hamster|
horse|
lamp|
lantern|
mistletoe|
moon|
morning|
mother|
nephew|
niece|
nose|
purse|
road|
roman|
sister|
sky|
son|
squirrel|
stone\\s+wall|
thing|
town|
tree|
uncle|
wind|
Heaven|
King|
Lord|
angel|
flower|
happiness|
joy|
plum|
summer\\'s\\s+day|
hero|
rose|
kingdom|
pony|
nothing|
zero
)
\\b
'''
},
{
'name': 'variable.other.shakespeare',
'match': '''(?xi)
\\b
(
<NAME>|
<NAME>|
<NAME>riana|
Aegeon|
Aemilia|
Agamemnon|
Agrippa|
Ajax|
<NAME>so|
<NAME>ache|
<NAME>|
<NAME>iochus|
<NAME>onio|
<NAME>thur|
Autolycus|
<NAME>|
Banquo|
Be<NAME>rice|
<NAME>|
Benvolio|
Bianca|
Brabantio|
Brutus|
Capulet|
Cassandra|
Cassius|
<NAME>\\s+Sly|
C<NAME>ero|
<NAME>|
<NAME>ius|
Cleopatra|
Cordelia|
Cornelius|
Cressida|
Cymberline|
Demetrius|
Desdemona|
Dionyza|
Doctor\\s+Caius|
Dogberry|
Don\\s+John|
<NAME>\\s+<NAME>|
Donalbain|
Dorcas|
Duncan|
Egeus|
Emilia|
Escalus|
Falstaff|
Fenton|
Ferdinand|
Ford|
Fortinbras|
Francisca|
Friar\\s+John|
<NAME>ar\\s+Laurence|
<NAME>trude|
Goneril|
Hamlet|
Hecate|
Hector|
<NAME>|
<NAME>|
<NAME>|
<NAME>|
Hippolyta|
Horatio|
Imogen|
Isabella|
John\\s+of\\s+Gaunt|
John\\s+of\\s+Lancaster|
<NAME>|
<NAME>|
Julius\\s+Caesar|
King\\s+Henry|
King\\s+John|
King\\s+Lear|
King\\s+Richard|
Lady\\s+Capulet|
Lady\\s+Macbeth|
Lady\\s+Macduff|
Lady\\s+Montague|
Lennox|
Leonato|
<NAME>|
<NAME>|
Lychorida|
Lysander|
Macbeth|
<NAME>duff|
<NAME>|
<NAME>|
<NAME>\\s+<NAME>|
<NAME>|
<NAME>|
Mistress\\s+Ford|
Mistress\\s+Overdone|
Mistress\\s+Page|
Montague|
Mopsa|
Oberon|
Octavia|
Octavius\\s+Caesar|
Olivia|
Ophelia|
Orlando|
Orsino|
Othello|
Page|
Pantino|
Paris|
Pericles|
Pinch|
Polonius|
Pompeius|
Portia|
Priam|
Prince\\s+Henry|
Prospero|
Proteus|
Publius|
Puck|
Queen\\s+Elinor|
Regan|
<NAME>|
Romeo|
Rosalind|
Sebastian|
Shallow|
Shylock|
Slender|
Solinus|
Stephano|
Thaisa|
The\\s+Abbot\\s+of\\s+Westminster|
The\\s+Apothecary|
The\\s+Archbishop\\s+of\\s+Canterbury|
The\\s+Duke\\s+of\\s+Milan|
The\\s+Duke\\s+of\\s+Venice|
The\\s+Ghost|
Theseus|
Thurio|
Timon|
Titania|
Titus|
Troilus|
Tybalt|
Ulysses|
Valentine|
Venus|
Vincentio|
Viola|
I|
me|
myself|
thee|
thou|
you|
thyself|
yourself
)
\\b
'''
},
{
'name': 'keyword.operator.shakespeare',
'match': '''(?xi)
\\b
(
art|
am|
are|
be|
is
)
\\b
'''
},
{
'name': 'constant.numeric.shakespeare',
'match': '''(?xi)
\\b
(
bad|
cowardly|
cursed|
damned|
dirty|
disgusting|
distasteful|
dusty|
evil|
fat|
fat-kidneyed|
fatherless|
foul|
hairy|
half-witted|
horrible|
horrid|
infected|
lying|
miserable|
misused|
oozing|
rotten|
rotten|
smelly|
snotty|
sorry|
stinking|
stuffed|
stupid|
vile|
villainous|
worried|
big|
black|
blue|
bluest|
bottomless|
furry|
green|
hard|
huge|
large|
little|
normal|
old|
purple|
red|
rural|
small|
tiny|
white|
yellow|
amazing|
beautiful|
blossoming|
bold|
brave|
charming|
clearest|
cunning|
cute|
delicious|
embroidered|
fair|
fine|
gentle|
golden|
good|
handsome|
happy|
healthy|
honest|
lovely|
loving|
mighty|
noble|
peaceful|
pretty|
prompt|
proud|
reddest|
rich|
smooth|
sunny|
sweet|
sweetest|
trustworthy|
warm
)
\\b
'''
},
{
'name': 'support.function.shakespeare',
'match': '''(?xi)
\\b
(
the\\s+cube\\s+of|
the\\s+factorial\\s+of|
the\\s+square\\s+of|
the\\s+square\\s+root\\s+of|
twice|
the\\s+difference\\s+between|
the\\s+product\\s+of|
the\\s+quotient\\s+between|
the\\s+remainder\\s+of\\s+the\\s+quotient\\s+between|
the\\s+sum\\s+of
)
\\b
'''
},
{
'name': 'keyword.other.shakespeare',
'match': '''(?xi)
\\b
(
enter|
exit|
exeunt|
and
)
\\b
'''
},
{
'name': 'support.function.shakespeare',
'match': '''(?xi)
(speak|open|listen\\s+to)\\s+
(thine|thy|your)\\s+
(heart|mind)
'''
},
{
'name': 'support.function.shakespeare',
'match': '''(?xi)
(speak|open|listen\\s+to)\\s+
(thine|thy|your)\\s+
(heart|mind)
'''
},
{
'name': 'support.function.shakespeare',
'match': '''(?xi)
remember
'''
},
{
'match': '''(?xi)
(recall)\\s+
([^!\\.]*[!\\.])
''',
'captures': {
'1': {'name': 'support.function.shakespeare'},
'2': {'name': 'comment.line.shakespeare'}
}
},
{
'name': 'keyword.control.shakespeare',
'match': '''(?xi)
(we\\s+shall|
we\\s+must|
let\\s+us)\\s+
(proceed\\s+to|
return\\s+to)
'''
}
]
| true | 'name': 'Shakespeare Programming Language'
'scopeName': 'source.shakespeare'
'fileTypes': [
'spl'
]
'patterns': [
{
'name': 'comment.line.shakespeare',
'match': '''(?xi)
\\A([^\\.]+\\.)
'''
},
{
'match': '''(?xi)
\\b
((scene|act)
(?=\\s[MCDXLVI])\\sM{0,4}(CM|CD|D?C{0,3})(XC|XL|L?X{0,3})(IX|IV|V?I{0,3}))
:\\s([^\\.]+\\.)
''',
'captures': {
'1': {'name': 'entity.name.function.shakespeare'},
'6': {'name': 'comment.line.shakespeare'}
}
},
{
'name': 'entity.name.function.shakespeare',
'match': '''(?xi)
\\b
((scene|act)
(?=\\s[MCDXLVI])\\sM{0,4}(CM|CD|D?C{0,3})(XC|XL|L?X{0,3})(IX|IV|V?I{0,3}))
\\b
'''
},
{
'name': 'keyword.control.shakespeare',
'match': '''(?xi)
\\bIf\\s+(so|not)\\b
'''
},
{
'match': '''(?xi)
(PI:NAME:<NAME>END_PI|
PI:NAME:<NAME>END_PI|
AdPI:NAME:<NAME>END_PIana|
Aegeon|
Aemilia|
Agamemnon|
Agrippa|
Ajax|
Alonso|
Andromache|
Angelo|
Antiochus|
Antonio|
Arthur|
Autolycus|
Balthazar|
Banquo|
PI:NAME:<NAME>END_PI|
PI:NAME:<NAME>END_PI|
BPI:NAME:<NAME>END_PIio|
Bianca|
Brabantio|
Brutus|
Capulet|
PI:NAME:<NAME>END_PI|
PI:NAME:<NAME>END_PI|
PI:NAME:<NAME>END_PI\\s+PI:NAME:<NAME>END_PI|
PI:NAME:<NAME>END_PI|
PI:NAME:<NAME>END_PI|
PI:NAME:<NAME>END_PI|
PI:NAME:<NAME>END_PIopatra|
Cordelia|
Cornelius|
Cressida|
Cymberline|
Demetrius|
Desdemona|
Dionyza|
Doctor\\s+Caius|
Dogberry|
Don\\s+John|
Don\\s+Pedro|
Donalbain|
Dorcas|
Duncan|
Egeus|
Emilia|
Escalus|
Falstaff|
Fenton|
Ferdinand|
Ford|
Fortinbras|
Francisca|
Friar\\s+John|
Friar\\s+Laurence|
Gertrude|
Goneril|
Hamlet|
Hecate|
Hector|
PI:NAME:<NAME>END_PIen|
PI:NAME:<NAME>END_PIena|
Hermia|
Hermonie|
Hippolyta|
Horatio|
Imogen|
Isabella|
John\\s+of\\s+Gaunt|
John\\s+of\\s+Lancaster|
PI:NAME:<NAME>END_PI|
Juliet|
Julius\\s+Caesar|
King\\s+Henry|
King\\s+John|
King\\s+Lear|
King\\s+Richard|
Lady\\s+Capulet|
Lady\\s+Macbeth|
Lady\\s+Macduff|
Lady\\s+Montague|
Lennox|
Leonato|
Luciana|
Lucio|
Lychorida|
Lysander|
Macbeth|
Macduff|
Malcolm|
Mariana|
Mark\\s+Antony|
Mercutio|
Miranda|
Mistress\\s+Ford|
Mistress\\s+Overdone|
Mistress\\s+Page|
Montague|
Mopsa|
Oberon|
Octavia|
Octavius\\s+Caesar|
Olivia|
Ophelia|
Orlando|
Orsino|
Othello|
Page|
Pantino|
Paris|
Pericles|
Pinch|
Polonius|
Pompeius|
Portia|
Priam|
Prince\\s+Henry|
Prospero|
Proteus|
Publius|
Puck|
Queen\\s+Elinor|
Regan|
PI:NAME:<NAME>END_PI|
Romeo|
Rosalind|
PI:NAME:<NAME>END_PI|
Shallow|
Shylock|
Slender|
Solinus|
Stephano|
Thaisa|
The\\s+Abbot\\s+of\\s+Westminster|
The\\s+Apothecary|
The\\s+Archbishop\\s+of\\s+Canterbury|
The\\s+Duke\\s+of\\s+Milan|
The\\s+Duke\\s+of\\s+Venice|
The\\s+Ghost|
Theseus|
Thurio|
Timon|
Titania|
Titus|
Troilus|
Tybalt|
Ulysses|
Valentine|
Venus|
Vincentio|
Viola)
\\,([^\\.]+\\.)
''',
'captures': {
'1': {'name': 'variable.other.shakespeare'},
'2': {'name': 'comment.line.shakespeare'}
}
},
{
'name': 'constant.language.shakespeare',
'match': '''(?xi)
\\b
(
Hell|
Microsoft|
bastard|
beggar|
blister|
codpiece|
coward|
curse|
death|
devil|
draught|
famine|
flirt-gill|
goat|
hate|
hog|
hound|
leech|
lie|
pig|
plague|
starvation|
toad|
war|
wolf|
animal|
aunt|
brother|
cat|
chihuahua|
cousin|
cow|
daughter|
door|
face|
father|
fellow|
granddaughter|
grandfather|
grandmother|
grandson|
hair|
hamster|
horse|
lamp|
lantern|
mistletoe|
moon|
morning|
mother|
nephew|
niece|
nose|
purse|
road|
roman|
sister|
sky|
son|
squirrel|
stone\\s+wall|
thing|
town|
tree|
uncle|
wind|
Heaven|
King|
Lord|
angel|
flower|
happiness|
joy|
plum|
summer\\'s\\s+day|
hero|
rose|
kingdom|
pony|
nothing|
zero
)
\\b
'''
},
{
'name': 'variable.other.shakespeare',
'match': '''(?xi)
\\b
(
PI:NAME:<NAME>END_PI|
PI:NAME:<NAME>END_PI|
PI:NAME:<NAME>END_PIriana|
Aegeon|
Aemilia|
Agamemnon|
Agrippa|
Ajax|
PI:NAME:<NAME>END_PIso|
PI:NAME:<NAME>END_PIache|
PI:NAME:<NAME>END_PI|
PI:NAME:<NAME>END_PIiochus|
PI:NAME:<NAME>END_PIonio|
PI:NAME:<NAME>END_PIthur|
Autolycus|
PI:NAME:<NAME>END_PI|
Banquo|
BePI:NAME:<NAME>END_PIrice|
PI:NAME:<NAME>END_PI|
Benvolio|
Bianca|
Brabantio|
Brutus|
Capulet|
Cassandra|
Cassius|
PI:NAME:<NAME>END_PI\\s+Sly|
CPI:NAME:<NAME>END_PIero|
PI:NAME:<NAME>END_PI|
PI:NAME:<NAME>END_PIius|
Cleopatra|
Cordelia|
Cornelius|
Cressida|
Cymberline|
Demetrius|
Desdemona|
Dionyza|
Doctor\\s+Caius|
Dogberry|
Don\\s+John|
PI:NAME:<NAME>END_PI\\s+PI:NAME:<NAME>END_PI|
Donalbain|
Dorcas|
Duncan|
Egeus|
Emilia|
Escalus|
Falstaff|
Fenton|
Ferdinand|
Ford|
Fortinbras|
Francisca|
Friar\\s+John|
PI:NAME:<NAME>END_PIar\\s+Laurence|
PI:NAME:<NAME>END_PItrude|
Goneril|
Hamlet|
Hecate|
Hector|
PI:NAME:<NAME>END_PI|
PI:NAME:<NAME>END_PI|
PI:NAME:<NAME>END_PI|
PI:NAME:<NAME>END_PI|
Hippolyta|
Horatio|
Imogen|
Isabella|
John\\s+of\\s+Gaunt|
John\\s+of\\s+Lancaster|
PI:NAME:<NAME>END_PI|
PI:NAME:<NAME>END_PI|
Julius\\s+Caesar|
King\\s+Henry|
King\\s+John|
King\\s+Lear|
King\\s+Richard|
Lady\\s+Capulet|
Lady\\s+Macbeth|
Lady\\s+Macduff|
Lady\\s+Montague|
Lennox|
Leonato|
PI:NAME:<NAME>END_PI|
PI:NAME:<NAME>END_PI|
Lychorida|
Lysander|
Macbeth|
PI:NAME:<NAME>END_PIduff|
PI:NAME:<NAME>END_PI|
PI:NAME:<NAME>END_PI|
PI:NAME:<NAME>END_PI\\s+PI:NAME:<NAME>END_PI|
PI:NAME:<NAME>END_PI|
PI:NAME:<NAME>END_PI|
Mistress\\s+Ford|
Mistress\\s+Overdone|
Mistress\\s+Page|
Montague|
Mopsa|
Oberon|
Octavia|
Octavius\\s+Caesar|
Olivia|
Ophelia|
Orlando|
Orsino|
Othello|
Page|
Pantino|
Paris|
Pericles|
Pinch|
Polonius|
Pompeius|
Portia|
Priam|
Prince\\s+Henry|
Prospero|
Proteus|
Publius|
Puck|
Queen\\s+Elinor|
Regan|
PI:NAME:<NAME>END_PI|
Romeo|
Rosalind|
Sebastian|
Shallow|
Shylock|
Slender|
Solinus|
Stephano|
Thaisa|
The\\s+Abbot\\s+of\\s+Westminster|
The\\s+Apothecary|
The\\s+Archbishop\\s+of\\s+Canterbury|
The\\s+Duke\\s+of\\s+Milan|
The\\s+Duke\\s+of\\s+Venice|
The\\s+Ghost|
Theseus|
Thurio|
Timon|
Titania|
Titus|
Troilus|
Tybalt|
Ulysses|
Valentine|
Venus|
Vincentio|
Viola|
I|
me|
myself|
thee|
thou|
you|
thyself|
yourself
)
\\b
'''
},
{
'name': 'keyword.operator.shakespeare',
'match': '''(?xi)
\\b
(
art|
am|
are|
be|
is
)
\\b
'''
},
{
'name': 'constant.numeric.shakespeare',
'match': '''(?xi)
\\b
(
bad|
cowardly|
cursed|
damned|
dirty|
disgusting|
distasteful|
dusty|
evil|
fat|
fat-kidneyed|
fatherless|
foul|
hairy|
half-witted|
horrible|
horrid|
infected|
lying|
miserable|
misused|
oozing|
rotten|
rotten|
smelly|
snotty|
sorry|
stinking|
stuffed|
stupid|
vile|
villainous|
worried|
big|
black|
blue|
bluest|
bottomless|
furry|
green|
hard|
huge|
large|
little|
normal|
old|
purple|
red|
rural|
small|
tiny|
white|
yellow|
amazing|
beautiful|
blossoming|
bold|
brave|
charming|
clearest|
cunning|
cute|
delicious|
embroidered|
fair|
fine|
gentle|
golden|
good|
handsome|
happy|
healthy|
honest|
lovely|
loving|
mighty|
noble|
peaceful|
pretty|
prompt|
proud|
reddest|
rich|
smooth|
sunny|
sweet|
sweetest|
trustworthy|
warm
)
\\b
'''
},
{
'name': 'support.function.shakespeare',
'match': '''(?xi)
\\b
(
the\\s+cube\\s+of|
the\\s+factorial\\s+of|
the\\s+square\\s+of|
the\\s+square\\s+root\\s+of|
twice|
the\\s+difference\\s+between|
the\\s+product\\s+of|
the\\s+quotient\\s+between|
the\\s+remainder\\s+of\\s+the\\s+quotient\\s+between|
the\\s+sum\\s+of
)
\\b
'''
},
{
'name': 'keyword.other.shakespeare',
'match': '''(?xi)
\\b
(
enter|
exit|
exeunt|
and
)
\\b
'''
},
{
'name': 'support.function.shakespeare',
'match': '''(?xi)
(speak|open|listen\\s+to)\\s+
(thine|thy|your)\\s+
(heart|mind)
'''
},
{
'name': 'support.function.shakespeare',
'match': '''(?xi)
(speak|open|listen\\s+to)\\s+
(thine|thy|your)\\s+
(heart|mind)
'''
},
{
'name': 'support.function.shakespeare',
'match': '''(?xi)
remember
'''
},
{
'match': '''(?xi)
(recall)\\s+
([^!\\.]*[!\\.])
''',
'captures': {
'1': {'name': 'support.function.shakespeare'},
'2': {'name': 'comment.line.shakespeare'}
}
},
{
'name': 'keyword.control.shakespeare',
'match': '''(?xi)
(we\\s+shall|
we\\s+must|
let\\s+us)\\s+
(proceed\\s+to|
return\\s+to)
'''
}
]
|
[
{
"context": "or('Test code. Not to be executed.')\n\n test = 'U2FsdGVkX18uek1T+johJh6pyZv2ddks8hLbEnUbGwo=' # key:123 (256 bit)\n test = 'U2FsdGVkX183PJoMK",
"end": 1032,
"score": 0.9994922280311584,
"start": 987,
"tag": "KEY",
"value": "U2FsdGVkX18uek1T+johJh6pyZv2ddks8hLbEnUbGwo='"
... | lib/tiny-aes.coffee | AdrienTreuille/atom-tiny-aes | 1 | {PasswordDialogView} = require './tiny-aes-view'
{CompositeDisposable} = require 'atom'
CryptoJS = require 'crypto-js'
####################
# Helper Functions #
####################
TinyAES =
subscriptions: null
encryptView: null
decryptView: null
activate: (state) ->
@encryptView = new PasswordDialogView 'encrypt'
@decryptView = new PasswordDialogView 'decrypt'
# Events subscribed to in atom's system can be easily
# cleaned up with a CompositeDisposable
@subscriptions = new CompositeDisposable
# Register encryption / decryption commands.
@subscriptions.add atom.commands.add 'atom-text-editor',
'tiny-aes:encrypt': => @encrypt()
@subscriptions.add atom.commands.add 'atom-text-editor',
'tiny-aes:decrypt': => @decrypt()
deactivate: ->
@subscriptions.dispose()
@encryptView.destroy()
@decryptView.destroy()
serialize: ->
testCryptoJS: ->
# throw Error('Test code. Not to be executed.')
test = 'U2FsdGVkX18uek1T+johJh6pyZv2ddks8hLbEnUbGwo=' # key:123 (256 bit)
test = 'U2FsdGVkX183PJoMKqzwQKh9jcXjReUo+MS5HCU0fV4=\n' # key:123 (128 bit)
iv=CryptoJS.enc.Hex.parse("00000000000000000000000000000000")
key = CryptoJS.EvpKDF '123'
# key = CryptoJS.lib.WordArray.random(4);
console.log "128-bit key: #{key.toString(CryptoJS.enc.Hex)}"
console.log key
answer = CryptoJS.AES.decrypt test, key, iv:iv
console.log "Test: #{test}"
console.log "Answer: #{answer}"
console.log answer.toString(CryptoJS.enc.Latin1)
return
encrypt: ->
selection = @getSelectionOrEverything()
@encryptView.requestPassword()
.then (pw) =>
script = "openssl enc -e -aes128 -base64 -pass \"pass:#{pw}\""
@exec script, input: selection.getText()
.then (ciphertext) =>
selection.insertText ciphertext, select:yes
.catch (err) =>
if err.message isnt "Cancelled."
console.log err.stack
atom.notifications.addWarning err.message
decrypt: ->
selection = @getSelectionOrEverything()
@decryptView.requestPassword()
.then (pw) =>
script = "openssl enc -d -aes128 -base64 -pass \"pass:#{pw}\""
@exec script, input: selection.getText()
.catch (err) => Promise.reject Error 'Password incorrect.'
.then (cleartext) =>
selection.insertText cleartext, select:yes
.catch (err) =>
if err.message isnt "Cancelled."
console.log err.stack
atom.notifications.addWarning err.message
# Execute something on the command line, returning a promise.
# cmd: (string) the cmd to execute
# options: list of options
# input: (string) piped into stdin
exec: (cmd, options) ->
new Promise (resolve, reject) ->
childProcess = require 'child_process'
child = childProcess.exec cmd, (err, stdout, stderr) ->
if err then reject err else resolve stdout
child.stdin.write(options?.input ? '')
child.stdin.end()
# The current selection or selects everything.
getSelectionOrEverything: ->
editor = atom.workspace.getActiveTextEditor()
selection = editor.getLastSelection()
selection.selectAll() if selection.isEmpty()
return selection
module.exports =
activate: (state) -> TinyAES.activate state
deactivate: -> TinyAES.deactivate
serialize: -> TinyAES.serialize
| 101063 | {PasswordDialogView} = require './tiny-aes-view'
{CompositeDisposable} = require 'atom'
CryptoJS = require 'crypto-js'
####################
# Helper Functions #
####################
TinyAES =
subscriptions: null
encryptView: null
decryptView: null
activate: (state) ->
@encryptView = new PasswordDialogView 'encrypt'
@decryptView = new PasswordDialogView 'decrypt'
# Events subscribed to in atom's system can be easily
# cleaned up with a CompositeDisposable
@subscriptions = new CompositeDisposable
# Register encryption / decryption commands.
@subscriptions.add atom.commands.add 'atom-text-editor',
'tiny-aes:encrypt': => @encrypt()
@subscriptions.add atom.commands.add 'atom-text-editor',
'tiny-aes:decrypt': => @decrypt()
deactivate: ->
@subscriptions.dispose()
@encryptView.destroy()
@decryptView.destroy()
serialize: ->
testCryptoJS: ->
# throw Error('Test code. Not to be executed.')
test = '<KEY> # key:123 (256 bit)
test = '<KEY>' # key:123 (128 bit)
iv=CryptoJS.enc.Hex.parse("00000000000000000000000000000000")
key = <KEY> '<KEY>'
# key = CryptoJS.lib.WordArray.random(4);
console.log "128-bit key: #{key.toString(CryptoJS.enc.Hex)}"
console.log key
answer = CryptoJS.AES.decrypt test, key, iv:iv
console.log "Test: #{test}"
console.log "Answer: #{answer}"
console.log answer.toString(CryptoJS.enc.Latin1)
return
encrypt: ->
selection = @getSelectionOrEverything()
@encryptView.requestPassword()
.then (pw) =>
script = "openssl enc -e -aes128 -base64 -pass \"pass:#{pw}\""
@exec script, input: selection.getText()
.then (ciphertext) =>
selection.insertText ciphertext, select:yes
.catch (err) =>
if err.message isnt "Cancelled."
console.log err.stack
atom.notifications.addWarning err.message
decrypt: ->
selection = @getSelectionOrEverything()
@decryptView.requestPassword()
.then (pw) =>
script = "openssl enc -d -aes128 -base64 -pass \"pass:#{pw}\""
@exec script, input: selection.getText()
.catch (err) => Promise.reject Error 'Password incorrect.'
.then (cleartext) =>
selection.insertText cleartext, select:yes
.catch (err) =>
if err.message isnt "Cancelled."
console.log err.stack
atom.notifications.addWarning err.message
# Execute something on the command line, returning a promise.
# cmd: (string) the cmd to execute
# options: list of options
# input: (string) piped into stdin
exec: (cmd, options) ->
new Promise (resolve, reject) ->
childProcess = require 'child_process'
child = childProcess.exec cmd, (err, stdout, stderr) ->
if err then reject err else resolve stdout
child.stdin.write(options?.input ? '')
child.stdin.end()
# The current selection or selects everything.
getSelectionOrEverything: ->
editor = atom.workspace.getActiveTextEditor()
selection = editor.getLastSelection()
selection.selectAll() if selection.isEmpty()
return selection
module.exports =
activate: (state) -> TinyAES.activate state
deactivate: -> TinyAES.deactivate
serialize: -> TinyAES.serialize
| true | {PasswordDialogView} = require './tiny-aes-view'
{CompositeDisposable} = require 'atom'
CryptoJS = require 'crypto-js'
####################
# Helper Functions #
####################
TinyAES =
subscriptions: null
encryptView: null
decryptView: null
activate: (state) ->
@encryptView = new PasswordDialogView 'encrypt'
@decryptView = new PasswordDialogView 'decrypt'
# Events subscribed to in atom's system can be easily
# cleaned up with a CompositeDisposable
@subscriptions = new CompositeDisposable
# Register encryption / decryption commands.
@subscriptions.add atom.commands.add 'atom-text-editor',
'tiny-aes:encrypt': => @encrypt()
@subscriptions.add atom.commands.add 'atom-text-editor',
'tiny-aes:decrypt': => @decrypt()
deactivate: ->
@subscriptions.dispose()
@encryptView.destroy()
@decryptView.destroy()
serialize: ->
testCryptoJS: ->
# throw Error('Test code. Not to be executed.')
test = 'PI:KEY:<KEY>END_PI # key:123 (256 bit)
test = 'PI:KEY:<KEY>END_PI' # key:123 (128 bit)
iv=CryptoJS.enc.Hex.parse("00000000000000000000000000000000")
key = PI:KEY:<KEY>END_PI 'PI:KEY:<KEY>END_PI'
# key = CryptoJS.lib.WordArray.random(4);
console.log "128-bit key: #{key.toString(CryptoJS.enc.Hex)}"
console.log key
answer = CryptoJS.AES.decrypt test, key, iv:iv
console.log "Test: #{test}"
console.log "Answer: #{answer}"
console.log answer.toString(CryptoJS.enc.Latin1)
return
encrypt: ->
selection = @getSelectionOrEverything()
@encryptView.requestPassword()
.then (pw) =>
script = "openssl enc -e -aes128 -base64 -pass \"pass:#{pw}\""
@exec script, input: selection.getText()
.then (ciphertext) =>
selection.insertText ciphertext, select:yes
.catch (err) =>
if err.message isnt "Cancelled."
console.log err.stack
atom.notifications.addWarning err.message
decrypt: ->
selection = @getSelectionOrEverything()
@decryptView.requestPassword()
.then (pw) =>
script = "openssl enc -d -aes128 -base64 -pass \"pass:#{pw}\""
@exec script, input: selection.getText()
.catch (err) => Promise.reject Error 'Password incorrect.'
.then (cleartext) =>
selection.insertText cleartext, select:yes
.catch (err) =>
if err.message isnt "Cancelled."
console.log err.stack
atom.notifications.addWarning err.message
# Execute something on the command line, returning a promise.
# cmd: (string) the cmd to execute
# options: list of options
# input: (string) piped into stdin
exec: (cmd, options) ->
new Promise (resolve, reject) ->
childProcess = require 'child_process'
child = childProcess.exec cmd, (err, stdout, stderr) ->
if err then reject err else resolve stdout
child.stdin.write(options?.input ? '')
child.stdin.end()
# The current selection or selects everything.
getSelectionOrEverything: ->
editor = atom.workspace.getActiveTextEditor()
selection = editor.getLastSelection()
selection.selectAll() if selection.isEmpty()
return selection
module.exports =
activate: (state) -> TinyAES.activate state
deactivate: -> TinyAES.deactivate
serialize: -> TinyAES.serialize
|
[
{
"context": "user: \n data: [\n id: 1\n name: \"Bob\"\n ,\n id: 2\n name: \"Sue\"\n ",
"end": 457,
"score": 0.9998132586479187,
"start": 454,
"tag": "NAME",
"value": "Bob"
},
{
"context": " name: \"Bob\"\n ,\n id: 2\n nam... | test/spec_coffee/model_relationships_order_spec.coffee | kirkbowers/mvcoffee | 0 | MVCoffee = require("../lib/mvcoffee")
theUser = class User extends MVCoffee.Model
theUser.hasMany "activity", order: "position"
theUser.hasMany "activity", as: "reversed", order: "position desc"
theActivity = class Activity extends MVCoffee.Model
theActivity.belongsTo "user"
store = new MVCoffee.ModelStore
user: User
activity: Activity
store.load
mvcoffee_version: "1.0.0"
models:
user:
data: [
id: 1
name: "Bob"
,
id: 2
name: "Sue"
]
activity:
data: [
id: 1
name: "Rake the yard"
position: 2
user_id: 1
owner_id: 1
,
id: 2
name: "Sweep the driveway"
position: 1
user_id: 1
owner_id: 2
,
id: 3
name: "Wash the cat"
position: 1
user_id: 2
owner_id: 1
]
describe "model macro methods for relationships with options", ->
it "should find activities for a user in ascending order by position", ->
user = User.find(1)
acts = user.activities()
expect(acts instanceof Array).toBeTruthy()
expect(acts.length).toBe(2)
expect(acts[0].name).toBe("Sweep the driveway")
expect(acts[1].name).toBe("Rake the yard")
expect(acts[0].position).toBe(1)
expect(acts[1].position).toBe(2)
it "should find activities for a user in descending order by position", ->
user = User.find(1)
acts = user.reversed()
expect(acts instanceof Array).toBeTruthy()
expect(acts.length).toBe(2)
expect(acts[0].name).toBe("Rake the yard")
expect(acts[1].name).toBe("Sweep the driveway")
expect(acts[0].position).toBe(2)
expect(acts[1].position).toBe(1)
| 199928 | MVCoffee = require("../lib/mvcoffee")
theUser = class User extends MVCoffee.Model
theUser.hasMany "activity", order: "position"
theUser.hasMany "activity", as: "reversed", order: "position desc"
theActivity = class Activity extends MVCoffee.Model
theActivity.belongsTo "user"
store = new MVCoffee.ModelStore
user: User
activity: Activity
store.load
mvcoffee_version: "1.0.0"
models:
user:
data: [
id: 1
name: "<NAME>"
,
id: 2
name: "<NAME>"
]
activity:
data: [
id: 1
name: "Rake the yard"
position: 2
user_id: 1
owner_id: 1
,
id: 2
name: "Sweep the driveway"
position: 1
user_id: 1
owner_id: 2
,
id: 3
name: "Wash the cat"
position: 1
user_id: 2
owner_id: 1
]
describe "model macro methods for relationships with options", ->
it "should find activities for a user in ascending order by position", ->
user = User.find(1)
acts = user.activities()
expect(acts instanceof Array).toBeTruthy()
expect(acts.length).toBe(2)
expect(acts[0].name).toBe("Sweep the driveway")
expect(acts[1].name).toBe("Rake the yard")
expect(acts[0].position).toBe(1)
expect(acts[1].position).toBe(2)
it "should find activities for a user in descending order by position", ->
user = User.find(1)
acts = user.reversed()
expect(acts instanceof Array).toBeTruthy()
expect(acts.length).toBe(2)
expect(acts[0].name).toBe("Rake the yard")
expect(acts[1].name).toBe("Sweep the driveway")
expect(acts[0].position).toBe(2)
expect(acts[1].position).toBe(1)
| true | MVCoffee = require("../lib/mvcoffee")
theUser = class User extends MVCoffee.Model
theUser.hasMany "activity", order: "position"
theUser.hasMany "activity", as: "reversed", order: "position desc"
theActivity = class Activity extends MVCoffee.Model
theActivity.belongsTo "user"
store = new MVCoffee.ModelStore
user: User
activity: Activity
store.load
mvcoffee_version: "1.0.0"
models:
user:
data: [
id: 1
name: "PI:NAME:<NAME>END_PI"
,
id: 2
name: "PI:NAME:<NAME>END_PI"
]
activity:
data: [
id: 1
name: "Rake the yard"
position: 2
user_id: 1
owner_id: 1
,
id: 2
name: "Sweep the driveway"
position: 1
user_id: 1
owner_id: 2
,
id: 3
name: "Wash the cat"
position: 1
user_id: 2
owner_id: 1
]
describe "model macro methods for relationships with options", ->
it "should find activities for a user in ascending order by position", ->
user = User.find(1)
acts = user.activities()
expect(acts instanceof Array).toBeTruthy()
expect(acts.length).toBe(2)
expect(acts[0].name).toBe("Sweep the driveway")
expect(acts[1].name).toBe("Rake the yard")
expect(acts[0].position).toBe(1)
expect(acts[1].position).toBe(2)
it "should find activities for a user in descending order by position", ->
user = User.find(1)
acts = user.reversed()
expect(acts instanceof Array).toBeTruthy()
expect(acts.length).toBe(2)
expect(acts[0].name).toBe("Rake the yard")
expect(acts[1].name).toBe("Sweep the driveway")
expect(acts[0].position).toBe(2)
expect(acts[1].position).toBe(1)
|
[
{
"context": " process.env.HUBOT_HIPCHAT_JID\n password: process.env.HUBOT_HIPCHAT_PASSWORD\n token: process.env.HUBO",
"end": 483,
"score": 0.8622715473175049,
"start": 466,
"tag": "PASSWORD",
"value": "process.env.HUBOT"
},
{
"context": "(jid: @options.jid, name: @... | src/hipchat.coffee | agoragames/hubot-hipchat | 0 | Robot = require('hubot').robot()
Adapter = require('hubot').adapter()
HTTPS = require 'https'
Wobot = require('wobot').Bot
class HipChat extends Adapter
send: (user, strings...) ->
for str in strings
@bot.message user.reply_to, str
reply: (user, strings...) ->
for str in strings
@send user, "@#{user.name.replace(' ', '')} #{str}"
run: ->
self = @
@options =
jid: process.env.HUBOT_HIPCHAT_JID
password: process.env.HUBOT_HIPCHAT_PASSWORD
token: process.env.HUBOT_HIPCHAT_TOKEN or null
name: process.env.HUBOT_HIPCHAT_NAME or "#{self.name} Bot"
rooms: process.env.HUBOT_HIPCHAT_ROOMS or "@All"
debug: process.env.HUBOT_HIPCHAT_DEBUG or false
host: process.env.HUBOT_HIPCHAT_HOST or null
console.log "Options:", @options
bot = new Wobot(jid: @options.jid, name: @options.name, password: @options.password, debug: @options.debug == 'true', host: @options.host)
mention = new RegExp("@#{@options.name.replace(' ', '')}\\b", "i")
console.log mention
console.log "Bot:", bot
bot.onConnect =>
console.log "Connected to HipChat"
# Join requested rooms
if @options.rooms is "@All"
bot.getRooms (err, rooms, stanza) ->
if rooms
for room in rooms
console.log "Joining #{room.jid}"
bot.join room.jid
else
console.log "Can't list rooms: #{err}"
else
for room_jid in @options.rooms.split(',')
console.log "Joining #{room_jid}"
bot.join room_jid
# Fetch user info
bot.getRoster (err, users, stanza) ->
if users
for user in users
self.userForId self.userIdFromJid(user.jid), user
else
console.log "Can't list users: #{err}"
bot.onError (message) ->
# If HipChat sends an error, we get the error message from XMPP.
# Otherwise, we get an Error object from the Node connection.
if message.message
console.log "Error talking to HipChat:", message.message
else
console.log "Received error from HipChat:", message
bot.onMessage (channel, from, message) ->
author = (self.userForName from) or {}
author.name = from unless author.name
author.reply_to = channel
author.room = self.roomNameFromJid(channel)
hubot_msg = message.replace(mention, "#{self.robot.name}: ")
self.receive new Robot.TextMessage(author, hubot_msg)
bot.onPrivateMessage (from, message) ->
author = self.userForId(self.userIdFromJid(from))
author.reply_to = from
author.room = self.roomNameFromJid(from)
self.receive new Robot.TextMessage(author, "#{self.robot.name}: #{message}")
# Join rooms automatically when invited
bot.onInvite (room_jid, from_jid, message) =>
console.log "Got invite to #{room_jid} from #{from_jid} - joining"
bot.join room_jid
bot.connect()
@bot = bot
self.emit "connected"
# Convenience HTTP Methods for posting on behalf of the token"d user
get: (path, callback) ->
@request "GET", path, null, callback
post: (path, body, callback) ->
@request "POST", path, body, callback
request: (method, path, body, callback) ->
console.log method, path, body
host = @options.host or "api.hipchat.com"
headers = "Host": host
unless @options.token
callback "No API token provided to Hubot", null
return
options =
"agent" : false
"host" : host
"port" : 443
"path" : path
"method" : method
"headers": headers
if method is "POST"
body.auth_token = @options.token
body = JSON.stringify(body)
headers["Content-Type"] = "application/json"
body = new Buffer(body)
options.headers["Content-Length"] = body.length
else
options.path += "?auth_token=#{@options.token}"
request = HTTPS.request options, (response) ->
data = ""
response.on "data", (chunk) ->
data += chunk
response.on "end", ->
if response.statusCode >= 400
console.log "HipChat API error: #{response.statusCode}"
try
callback null, JSON.parse(data)
catch err
callback null, data or { }
response.on "error", (err) ->
callback err, null
if method is "POST"
request.end(body, 'binary')
else
request.end()
request.on "error", (err) ->
console.log err
console.log err.stack
callback err
userIdFromJid: (jid) ->
try
return jid.match(/^\d+_(\d+)@/)[1]
catch e
console.log "Bad user JID: #{jid}"
return null
roomNameFromJid: (jid) ->
try
return jid.match(/^\d+_([\w_\.-]+)@/)[1]
catch e
console.log "Bad room JID: #{jid}"
return null
exports.use = (robot) ->
new HipChat robot
| 66661 | Robot = require('hubot').robot()
Adapter = require('hubot').adapter()
HTTPS = require 'https'
Wobot = require('wobot').Bot
class HipChat extends Adapter
send: (user, strings...) ->
for str in strings
@bot.message user.reply_to, str
reply: (user, strings...) ->
for str in strings
@send user, "@#{user.name.replace(' ', '')} #{str}"
run: ->
self = @
@options =
jid: process.env.HUBOT_HIPCHAT_JID
password: <PASSWORD>_HIPCHAT_PASSWORD
token: process.env.HUBOT_HIPCHAT_TOKEN or null
name: process.env.HUBOT_HIPCHAT_NAME or "#{self.name} Bot"
rooms: process.env.HUBOT_HIPCHAT_ROOMS or "@All"
debug: process.env.HUBOT_HIPCHAT_DEBUG or false
host: process.env.HUBOT_HIPCHAT_HOST or null
console.log "Options:", @options
bot = new Wobot(jid: @options.jid, name: @options.name, password: <PASSWORD>, debug: @options.debug == 'true', host: @options.host)
mention = new RegExp("@#{@options.name.replace(' ', '')}\\b", "i")
console.log mention
console.log "Bot:", bot
bot.onConnect =>
console.log "Connected to HipChat"
# Join requested rooms
if @options.rooms is "@All"
bot.getRooms (err, rooms, stanza) ->
if rooms
for room in rooms
console.log "Joining #{room.jid}"
bot.join room.jid
else
console.log "Can't list rooms: #{err}"
else
for room_jid in @options.rooms.split(',')
console.log "Joining #{room_jid}"
bot.join room_jid
# Fetch user info
bot.getRoster (err, users, stanza) ->
if users
for user in users
self.userForId self.userIdFromJid(user.jid), user
else
console.log "Can't list users: #{err}"
bot.onError (message) ->
# If HipChat sends an error, we get the error message from XMPP.
# Otherwise, we get an Error object from the Node connection.
if message.message
console.log "Error talking to HipChat:", message.message
else
console.log "Received error from HipChat:", message
bot.onMessage (channel, from, message) ->
author = (self.userForName from) or {}
author.name = from unless author.name
author.reply_to = channel
author.room = self.roomNameFromJid(channel)
hubot_msg = message.replace(mention, "#{self.robot.name}: ")
self.receive new Robot.TextMessage(author, hubot_msg)
bot.onPrivateMessage (from, message) ->
author = self.userForId(self.userIdFromJid(from))
author.reply_to = from
author.room = self.roomNameFromJid(from)
self.receive new Robot.TextMessage(author, "#{self.robot.name}: #{message}")
# Join rooms automatically when invited
bot.onInvite (room_jid, from_jid, message) =>
console.log "Got invite to #{room_jid} from #{from_jid} - joining"
bot.join room_jid
bot.connect()
@bot = bot
self.emit "connected"
# Convenience HTTP Methods for posting on behalf of the token"d user
get: (path, callback) ->
@request "GET", path, null, callback
post: (path, body, callback) ->
@request "POST", path, body, callback
request: (method, path, body, callback) ->
console.log method, path, body
host = @options.host or "api.hipchat.com"
headers = "Host": host
unless @options.token
callback "No API token provided to Hubot", null
return
options =
"agent" : false
"host" : host
"port" : 443
"path" : path
"method" : method
"headers": headers
if method is "POST"
body.auth_token = @options.token
body = JSON.stringify(body)
headers["Content-Type"] = "application/json"
body = new Buffer(body)
options.headers["Content-Length"] = body.length
else
options.path += "?auth_token=#{@options.token}"
request = HTTPS.request options, (response) ->
data = ""
response.on "data", (chunk) ->
data += chunk
response.on "end", ->
if response.statusCode >= 400
console.log "HipChat API error: #{response.statusCode}"
try
callback null, JSON.parse(data)
catch err
callback null, data or { }
response.on "error", (err) ->
callback err, null
if method is "POST"
request.end(body, 'binary')
else
request.end()
request.on "error", (err) ->
console.log err
console.log err.stack
callback err
userIdFromJid: (jid) ->
try
return jid.match(/^\d+_(\d+)@/)[1]
catch e
console.log "Bad user JID: #{jid}"
return null
roomNameFromJid: (jid) ->
try
return jid.match(/^\d+_([\w_\.-]+)@/)[1]
catch e
console.log "Bad room JID: #{jid}"
return null
exports.use = (robot) ->
new HipChat robot
| true | Robot = require('hubot').robot()
Adapter = require('hubot').adapter()
HTTPS = require 'https'
Wobot = require('wobot').Bot
class HipChat extends Adapter
send: (user, strings...) ->
for str in strings
@bot.message user.reply_to, str
reply: (user, strings...) ->
for str in strings
@send user, "@#{user.name.replace(' ', '')} #{str}"
run: ->
self = @
@options =
jid: process.env.HUBOT_HIPCHAT_JID
password: PI:PASSWORD:<PASSWORD>END_PI_HIPCHAT_PASSWORD
token: process.env.HUBOT_HIPCHAT_TOKEN or null
name: process.env.HUBOT_HIPCHAT_NAME or "#{self.name} Bot"
rooms: process.env.HUBOT_HIPCHAT_ROOMS or "@All"
debug: process.env.HUBOT_HIPCHAT_DEBUG or false
host: process.env.HUBOT_HIPCHAT_HOST or null
console.log "Options:", @options
bot = new Wobot(jid: @options.jid, name: @options.name, password: PI:PASSWORD:<PASSWORD>END_PI, debug: @options.debug == 'true', host: @options.host)
mention = new RegExp("@#{@options.name.replace(' ', '')}\\b", "i")
console.log mention
console.log "Bot:", bot
bot.onConnect =>
console.log "Connected to HipChat"
# Join requested rooms
if @options.rooms is "@All"
bot.getRooms (err, rooms, stanza) ->
if rooms
for room in rooms
console.log "Joining #{room.jid}"
bot.join room.jid
else
console.log "Can't list rooms: #{err}"
else
for room_jid in @options.rooms.split(',')
console.log "Joining #{room_jid}"
bot.join room_jid
# Fetch user info
bot.getRoster (err, users, stanza) ->
if users
for user in users
self.userForId self.userIdFromJid(user.jid), user
else
console.log "Can't list users: #{err}"
bot.onError (message) ->
# If HipChat sends an error, we get the error message from XMPP.
# Otherwise, we get an Error object from the Node connection.
if message.message
console.log "Error talking to HipChat:", message.message
else
console.log "Received error from HipChat:", message
bot.onMessage (channel, from, message) ->
author = (self.userForName from) or {}
author.name = from unless author.name
author.reply_to = channel
author.room = self.roomNameFromJid(channel)
hubot_msg = message.replace(mention, "#{self.robot.name}: ")
self.receive new Robot.TextMessage(author, hubot_msg)
bot.onPrivateMessage (from, message) ->
author = self.userForId(self.userIdFromJid(from))
author.reply_to = from
author.room = self.roomNameFromJid(from)
self.receive new Robot.TextMessage(author, "#{self.robot.name}: #{message}")
# Join rooms automatically when invited
bot.onInvite (room_jid, from_jid, message) =>
console.log "Got invite to #{room_jid} from #{from_jid} - joining"
bot.join room_jid
bot.connect()
@bot = bot
self.emit "connected"
# Convenience HTTP Methods for posting on behalf of the token"d user
get: (path, callback) ->
@request "GET", path, null, callback
post: (path, body, callback) ->
@request "POST", path, body, callback
request: (method, path, body, callback) ->
console.log method, path, body
host = @options.host or "api.hipchat.com"
headers = "Host": host
unless @options.token
callback "No API token provided to Hubot", null
return
options =
"agent" : false
"host" : host
"port" : 443
"path" : path
"method" : method
"headers": headers
if method is "POST"
body.auth_token = @options.token
body = JSON.stringify(body)
headers["Content-Type"] = "application/json"
body = new Buffer(body)
options.headers["Content-Length"] = body.length
else
options.path += "?auth_token=#{@options.token}"
request = HTTPS.request options, (response) ->
data = ""
response.on "data", (chunk) ->
data += chunk
response.on "end", ->
if response.statusCode >= 400
console.log "HipChat API error: #{response.statusCode}"
try
callback null, JSON.parse(data)
catch err
callback null, data or { }
response.on "error", (err) ->
callback err, null
if method is "POST"
request.end(body, 'binary')
else
request.end()
request.on "error", (err) ->
console.log err
console.log err.stack
callback err
userIdFromJid: (jid) ->
try
return jid.match(/^\d+_(\d+)@/)[1]
catch e
console.log "Bad user JID: #{jid}"
return null
roomNameFromJid: (jid) ->
try
return jid.match(/^\d+_([\w_\.-]+)@/)[1]
catch e
console.log "Bad room JID: #{jid}"
return null
exports.use = (robot) ->
new HipChat robot
|
[
{
"context": "sponseData =\n\t\t\tprojectId: @projectId\n\t\t\tkeys: ['k1', 'k2']\n\n\tdescribe 'index', ->\n\n\t\tbeforeEach ->\n\t",
"end": 1510,
"score": 0.6910796761512756,
"start": 1509,
"tag": "KEY",
"value": "1"
},
{
"context": "Data =\n\t\t\tprojectId: @projectId\n\t\t\tkeys:... | test/unit/coffee/References/ReferencesHandlerTests.coffee | davidmehren/web-sharelatex | 1 | SandboxedModule = require('sandboxed-module')
should = require('chai').should()
expect = require('chai').expect
sinon = require 'sinon'
assert = require("chai").assert
modulePath = "../../../../app/js/Features/References/ReferencesHandler"
describe 'ReferencesHandler', ->
beforeEach ->
@projectId = '222'
@fakeProject =
_id: @projectId
owner_ref: @fakeOwner =
_id: 'some_owner'
features:
references: false
rootFolder: [
docs: [
{name: 'one.bib', _id: 'aaa'},
{name: 'two.txt', _id: 'bbb'},
]
folders: [
{
docs: [{name: 'three.bib', _id: 'ccc'}],
fileRefs: [{name: 'four.bib', _id: 'ghg'}],
folders: []
}
]
]
@docIds = ['aaa', 'ccc']
@handler = SandboxedModule.require modulePath, requires:
'logger-sharelatex': {
log: ->
err: ->
}
'settings-sharelatex': @settings = {
apis:
references: {url: 'http://some.url/references'}
docstore: {url: 'http://some.url/docstore'}
filestore: {url: 'http://some.url/filestore'}
}
'request': @request = {
get: sinon.stub()
post: sinon.stub()
}
'../Project/ProjectGetter': @ProjectGetter = {
getProject: sinon.stub().callsArgWith(2, null, @fakeProject)
}
'../User/UserGetter': @UserGetter = {
getUser: sinon.stub()
}
'../DocumentUpdater/DocumentUpdaterHandler': @DocumentUpdaterHandler = {
flushDocToMongo: sinon.stub().callsArgWith(2, null)
}
@fakeResponseData =
projectId: @projectId
keys: ['k1', 'k2']
describe 'index', ->
beforeEach ->
sinon.stub(@handler, '_findBibDocIds')
sinon.stub(@handler, '_findBibFileIds')
sinon.stub(@handler, '_isFullIndex').callsArgWith(1, null, true)
@request.post.callsArgWith(1, null, {statusCode: 200}, @fakeResponseData)
@call = (callback) =>
@handler.index @projectId, @docIds, callback
describe 'with docIds as an array', ->
beforeEach ->
@docIds = ['aaa', 'ccc']
it 'should not call _findBibDocIds', (done) ->
@call (err, data) =>
@handler._findBibDocIds.callCount.should.equal 0
done()
it 'should call ProjectGetter.getProject', (done) ->
@call (err, data) =>
@ProjectGetter.getProject.callCount.should.equal 1
@ProjectGetter.getProject.calledWith(@projectId).should.equal true
done()
it 'should not call _findBibDocIds', (done) ->
@call (err, data) =>
@handler._findBibDocIds.callCount.should.equal 0
done()
it 'should call DocumentUpdaterHandler.flushDocToMongo', (done) ->
@call (err, data) =>
@DocumentUpdaterHandler.flushDocToMongo.callCount.should.equal 2
@docIds.forEach (docId) =>
@DocumentUpdaterHandler.flushDocToMongo.calledWith(@projectId, docId).should.equal true
done()
it 'should make a request to references service', (done) ->
@call (err, data) =>
@request.post.callCount.should.equal 1
arg = @request.post.firstCall.args[0]
expect(arg.json).to.have.all.keys 'docUrls', 'fullIndex'
expect(arg.json.docUrls.length).to.equal 2
expect(arg.json.fullIndex).to.equal true
done()
it 'should not produce an error', (done) ->
@call (err, data) =>
expect(err).to.equal null
done()
it 'should return data', (done) ->
@call (err, data) =>
expect(data).to.not.equal null
expect(data).to.not.equal undefined
expect(data).to.equal @fakeResponseData
done()
describe 'when ProjectGetter.getProject produces an error', ->
beforeEach ->
@ProjectGetter.getProject.callsArgWith(2, new Error('woops'))
it 'should produce an error', (done) ->
@call (err, data) =>
expect(err).to.not.equal null
expect(err).to.be.instanceof Error
expect(data).to.equal undefined
done()
it 'should not send request', (done) ->
@call (err, data) =>
@request.post.callCount.should.equal 0
done()
describe 'when _isFullIndex produces an error', ->
beforeEach ->
@ProjectGetter.getProject.callsArgWith(2, null, @fakeProject)
@handler._isFullIndex.callsArgWith(1, new Error('woops'))
it 'should produce an error', (done) ->
@call (err, data) =>
expect(err).to.not.equal null
expect(err).to.be.instanceof Error
expect(data).to.equal undefined
done()
it 'should not send request', (done) ->
@call (err, data) =>
@request.post.callCount.should.equal 0
done()
describe 'when flushDocToMongo produces an error', ->
beforeEach ->
@ProjectGetter.getProject.callsArgWith(2, null, @fakeProject)
@handler._isFullIndex.callsArgWith(1, false)
@DocumentUpdaterHandler.flushDocToMongo.callsArgWith(2, new Error('woops'))
it 'should produce an error', (done) ->
@call (err, data) =>
expect(err).to.not.equal null
expect(err).to.be.instanceof Error
expect(data).to.equal undefined
done()
it 'should not send request', (done) ->
@call (err, data) =>
@request.post.callCount.should.equal 0
done()
describe 'when request produces an error', ->
beforeEach ->
@ProjectGetter.getProject.callsArgWith(2, null, @fakeProject)
@handler._isFullIndex.callsArgWith(1, null, false)
@DocumentUpdaterHandler.flushDocToMongo.callsArgWith(2, null)
@request.post.callsArgWith(1, new Error('woops'))
it 'should produce an error', (done) ->
@call (err, data) =>
expect(err).to.not.equal null
expect(err).to.be.instanceof Error
expect(data).to.equal undefined
done()
describe 'when request responds with error status', ->
beforeEach ->
@ProjectGetter.getProject.callsArgWith(2, null, @fakeProject)
@handler._isFullIndex.callsArgWith(1, null, false)
@request.post.callsArgWith(1, null, {statusCode: 500}, null)
it 'should produce an error', (done) ->
@call (err, data) =>
expect(err).to.not.equal null
expect(err).to.be.instanceof Error
expect(data).to.equal undefined
done()
describe 'indexAll', ->
beforeEach ->
sinon.stub(@handler, '_findBibDocIds').returns(['aaa', 'ccc'])
sinon.stub(@handler, '_findBibFileIds').returns(['fff', 'ggg'])
sinon.stub(@handler, '_isFullIndex').callsArgWith(1, null, true)
@request.post.callsArgWith(1, null, {statusCode: 200}, @fakeResponseData)
@call = (callback) =>
@handler.indexAll @projectId, callback
it 'should call _findBibDocIds', (done) ->
@call (err, data) =>
@handler._findBibDocIds.callCount.should.equal 1
@handler._findBibDocIds.calledWith(@fakeProject).should.equal true
done()
it 'should call _findBibFileIds', (done) ->
@call (err, data) =>
@handler._findBibDocIds.callCount.should.equal 1
@handler._findBibDocIds.calledWith(@fakeProject).should.equal true
done()
it 'should call DocumentUpdaterHandler.flushDocToMongo', (done) ->
@call (err, data) =>
@DocumentUpdaterHandler.flushDocToMongo.callCount.should.equal 2
done()
it 'should make a request to references service', (done) ->
@call (err, data) =>
@request.post.callCount.should.equal 1
arg = @request.post.firstCall.args[0]
expect(arg.json).to.have.all.keys 'docUrls', 'fullIndex'
expect(arg.json.docUrls.length).to.equal 4
expect(arg.json.fullIndex).to.equal true
done()
it 'should not produce an error', (done) ->
@call (err, data) =>
expect(err).to.equal null
done()
it 'should return data', (done) ->
@call (err, data) =>
expect(data).to.not.equal null
expect(data).to.not.equal undefined
expect(data).to.equal @fakeResponseData
done()
describe 'when ProjectGetter.getProject produces an error', ->
beforeEach ->
@ProjectGetter.getProject.callsArgWith(2, new Error('woops'))
it 'should produce an error', (done) ->
@call (err, data) =>
expect(err).to.not.equal null
expect(err).to.be.instanceof Error
expect(data).to.equal undefined
done()
it 'should not send request', (done) ->
@call (err, data) =>
@request.post.callCount.should.equal 0
done()
describe 'when _isFullIndex produces an error', ->
beforeEach ->
@ProjectGetter.getProject.callsArgWith(2, null, @fakeProject)
@handler._isFullIndex.callsArgWith(1, new Error('woops'))
it 'should produce an error', (done) ->
@call (err, data) =>
expect(err).to.not.equal null
expect(err).to.be.instanceof Error
expect(data).to.equal undefined
done()
it 'should not send request', (done) ->
@call (err, data) =>
@request.post.callCount.should.equal 0
done()
describe 'when flushDocToMongo produces an error', ->
beforeEach ->
@ProjectGetter.getProject.callsArgWith(2, null, @fakeProject)
@handler._isFullIndex.callsArgWith(1, false)
@DocumentUpdaterHandler.flushDocToMongo.callsArgWith(2, new Error('woops'))
it 'should produce an error', (done) ->
@call (err, data) =>
expect(err).to.not.equal null
expect(err).to.be.instanceof Error
expect(data).to.equal undefined
done()
it 'should not send request', (done) ->
@call (err, data) =>
@request.post.callCount.should.equal 0
done()
describe '_findBibDocIds', ->
beforeEach ->
@fakeProject =
rootFolder: [
docs: [
{name: 'one.bib', _id: 'aaa'},
{name: 'two.txt', _id: 'bbb'},
]
folders: [
{docs: [{name: 'three.bib', _id: 'ccc'}], folders: []}
]
]
@expectedIds = ['aaa', 'ccc']
it 'should select the correct docIds', ->
result = @handler._findBibDocIds(@fakeProject)
expect(result).to.deep.equal @expectedIds
it 'should not error with a non array of folders from dirty data', ->
@fakeProject.rootFolder[0].folders[0].folders = {}
result = @handler._findBibDocIds(@fakeProject)
expect(result).to.deep.equal @expectedIds
describe '_findBibFileIds', ->
beforeEach ->
@fakeProject =
rootFolder: [
docs: [
{name: 'one.bib', _id: 'aaa'},
{name: 'two.txt', _id: 'bbb'},
]
fileRefs: [
{name: 'other.bib', _id: 'ddd'}
],
folders: [
{
docs: [{name: 'three.bib', _id: 'ccc'}],
fileRefs: [{name: 'four.bib', _id: 'ghg'}],
folders: []
}
]
]
@expectedIds = ['ddd', 'ghg']
it 'should select the correct docIds', ->
result = @handler._findBibFileIds(@fakeProject)
expect(result).to.deep.equal @expectedIds
describe '_isFullIndex', ->
beforeEach ->
@fakeProject =
owner_ref: @owner_ref = "owner-ref-123"
@owner =
features:
references: false
@UserGetter.getUser = sinon.stub()
@UserGetter.getUser.withArgs(@owner_ref, {features: true}).yields(null, @owner)
@call = (callback) =>
@handler._isFullIndex @fakeProject, callback
describe 'with references feature on', ->
beforeEach ->
@owner.features.references = true
it 'should return true', ->
@call (err, isFullIndex) =>
expect(err).to.equal null
expect(isFullIndex).to.equal true
describe 'with references feature off', ->
beforeEach ->
@owner.features.references = false
it 'should return false', ->
@call (err, isFullIndex) =>
expect(err).to.equal null
expect(isFullIndex).to.equal false
describe 'with referencesSearch', ->
beforeEach ->
@owner.features = {referencesSearch: true, references: false}
it 'should return true', ->
@call (err, isFullIndex) =>
expect(err).to.equal null
expect(isFullIndex).to.equal true
| 77113 | SandboxedModule = require('sandboxed-module')
should = require('chai').should()
expect = require('chai').expect
sinon = require 'sinon'
assert = require("chai").assert
modulePath = "../../../../app/js/Features/References/ReferencesHandler"
describe 'ReferencesHandler', ->
beforeEach ->
@projectId = '222'
@fakeProject =
_id: @projectId
owner_ref: @fakeOwner =
_id: 'some_owner'
features:
references: false
rootFolder: [
docs: [
{name: 'one.bib', _id: 'aaa'},
{name: 'two.txt', _id: 'bbb'},
]
folders: [
{
docs: [{name: 'three.bib', _id: 'ccc'}],
fileRefs: [{name: 'four.bib', _id: 'ghg'}],
folders: []
}
]
]
@docIds = ['aaa', 'ccc']
@handler = SandboxedModule.require modulePath, requires:
'logger-sharelatex': {
log: ->
err: ->
}
'settings-sharelatex': @settings = {
apis:
references: {url: 'http://some.url/references'}
docstore: {url: 'http://some.url/docstore'}
filestore: {url: 'http://some.url/filestore'}
}
'request': @request = {
get: sinon.stub()
post: sinon.stub()
}
'../Project/ProjectGetter': @ProjectGetter = {
getProject: sinon.stub().callsArgWith(2, null, @fakeProject)
}
'../User/UserGetter': @UserGetter = {
getUser: sinon.stub()
}
'../DocumentUpdater/DocumentUpdaterHandler': @DocumentUpdaterHandler = {
flushDocToMongo: sinon.stub().callsArgWith(2, null)
}
@fakeResponseData =
projectId: @projectId
keys: ['k<KEY>', 'k<KEY>']
describe 'index', ->
beforeEach ->
sinon.stub(@handler, '_findBibDocIds')
sinon.stub(@handler, '_findBibFileIds')
sinon.stub(@handler, '_isFullIndex').callsArgWith(1, null, true)
@request.post.callsArgWith(1, null, {statusCode: 200}, @fakeResponseData)
@call = (callback) =>
@handler.index @projectId, @docIds, callback
describe 'with docIds as an array', ->
beforeEach ->
@docIds = ['aaa', 'ccc']
it 'should not call _findBibDocIds', (done) ->
@call (err, data) =>
@handler._findBibDocIds.callCount.should.equal 0
done()
it 'should call ProjectGetter.getProject', (done) ->
@call (err, data) =>
@ProjectGetter.getProject.callCount.should.equal 1
@ProjectGetter.getProject.calledWith(@projectId).should.equal true
done()
it 'should not call _findBibDocIds', (done) ->
@call (err, data) =>
@handler._findBibDocIds.callCount.should.equal 0
done()
it 'should call DocumentUpdaterHandler.flushDocToMongo', (done) ->
@call (err, data) =>
@DocumentUpdaterHandler.flushDocToMongo.callCount.should.equal 2
@docIds.forEach (docId) =>
@DocumentUpdaterHandler.flushDocToMongo.calledWith(@projectId, docId).should.equal true
done()
it 'should make a request to references service', (done) ->
@call (err, data) =>
@request.post.callCount.should.equal 1
arg = @request.post.firstCall.args[0]
expect(arg.json).to.have.all.keys 'docUrls', 'fullIndex'
expect(arg.json.docUrls.length).to.equal 2
expect(arg.json.fullIndex).to.equal true
done()
it 'should not produce an error', (done) ->
@call (err, data) =>
expect(err).to.equal null
done()
it 'should return data', (done) ->
@call (err, data) =>
expect(data).to.not.equal null
expect(data).to.not.equal undefined
expect(data).to.equal @fakeResponseData
done()
describe 'when ProjectGetter.getProject produces an error', ->
beforeEach ->
@ProjectGetter.getProject.callsArgWith(2, new Error('woops'))
it 'should produce an error', (done) ->
@call (err, data) =>
expect(err).to.not.equal null
expect(err).to.be.instanceof Error
expect(data).to.equal undefined
done()
it 'should not send request', (done) ->
@call (err, data) =>
@request.post.callCount.should.equal 0
done()
describe 'when _isFullIndex produces an error', ->
beforeEach ->
@ProjectGetter.getProject.callsArgWith(2, null, @fakeProject)
@handler._isFullIndex.callsArgWith(1, new Error('woops'))
it 'should produce an error', (done) ->
@call (err, data) =>
expect(err).to.not.equal null
expect(err).to.be.instanceof Error
expect(data).to.equal undefined
done()
it 'should not send request', (done) ->
@call (err, data) =>
@request.post.callCount.should.equal 0
done()
describe 'when flushDocToMongo produces an error', ->
beforeEach ->
@ProjectGetter.getProject.callsArgWith(2, null, @fakeProject)
@handler._isFullIndex.callsArgWith(1, false)
@DocumentUpdaterHandler.flushDocToMongo.callsArgWith(2, new Error('woops'))
it 'should produce an error', (done) ->
@call (err, data) =>
expect(err).to.not.equal null
expect(err).to.be.instanceof Error
expect(data).to.equal undefined
done()
it 'should not send request', (done) ->
@call (err, data) =>
@request.post.callCount.should.equal 0
done()
describe 'when request produces an error', ->
beforeEach ->
@ProjectGetter.getProject.callsArgWith(2, null, @fakeProject)
@handler._isFullIndex.callsArgWith(1, null, false)
@DocumentUpdaterHandler.flushDocToMongo.callsArgWith(2, null)
@request.post.callsArgWith(1, new Error('woops'))
it 'should produce an error', (done) ->
@call (err, data) =>
expect(err).to.not.equal null
expect(err).to.be.instanceof Error
expect(data).to.equal undefined
done()
describe 'when request responds with error status', ->
beforeEach ->
@ProjectGetter.getProject.callsArgWith(2, null, @fakeProject)
@handler._isFullIndex.callsArgWith(1, null, false)
@request.post.callsArgWith(1, null, {statusCode: 500}, null)
it 'should produce an error', (done) ->
@call (err, data) =>
expect(err).to.not.equal null
expect(err).to.be.instanceof Error
expect(data).to.equal undefined
done()
describe 'indexAll', ->
beforeEach ->
sinon.stub(@handler, '_findBibDocIds').returns(['aaa', 'ccc'])
sinon.stub(@handler, '_findBibFileIds').returns(['fff', 'ggg'])
sinon.stub(@handler, '_isFullIndex').callsArgWith(1, null, true)
@request.post.callsArgWith(1, null, {statusCode: 200}, @fakeResponseData)
@call = (callback) =>
@handler.indexAll @projectId, callback
it 'should call _findBibDocIds', (done) ->
@call (err, data) =>
@handler._findBibDocIds.callCount.should.equal 1
@handler._findBibDocIds.calledWith(@fakeProject).should.equal true
done()
it 'should call _findBibFileIds', (done) ->
@call (err, data) =>
@handler._findBibDocIds.callCount.should.equal 1
@handler._findBibDocIds.calledWith(@fakeProject).should.equal true
done()
it 'should call DocumentUpdaterHandler.flushDocToMongo', (done) ->
@call (err, data) =>
@DocumentUpdaterHandler.flushDocToMongo.callCount.should.equal 2
done()
it 'should make a request to references service', (done) ->
@call (err, data) =>
@request.post.callCount.should.equal 1
arg = @request.post.firstCall.args[0]
expect(arg.json).to.have.all.keys 'docUrls', 'fullIndex'
expect(arg.json.docUrls.length).to.equal 4
expect(arg.json.fullIndex).to.equal true
done()
it 'should not produce an error', (done) ->
@call (err, data) =>
expect(err).to.equal null
done()
it 'should return data', (done) ->
@call (err, data) =>
expect(data).to.not.equal null
expect(data).to.not.equal undefined
expect(data).to.equal @fakeResponseData
done()
describe 'when ProjectGetter.getProject produces an error', ->
beforeEach ->
@ProjectGetter.getProject.callsArgWith(2, new Error('woops'))
it 'should produce an error', (done) ->
@call (err, data) =>
expect(err).to.not.equal null
expect(err).to.be.instanceof Error
expect(data).to.equal undefined
done()
it 'should not send request', (done) ->
@call (err, data) =>
@request.post.callCount.should.equal 0
done()
describe 'when _isFullIndex produces an error', ->
beforeEach ->
@ProjectGetter.getProject.callsArgWith(2, null, @fakeProject)
@handler._isFullIndex.callsArgWith(1, new Error('woops'))
it 'should produce an error', (done) ->
@call (err, data) =>
expect(err).to.not.equal null
expect(err).to.be.instanceof Error
expect(data).to.equal undefined
done()
it 'should not send request', (done) ->
@call (err, data) =>
@request.post.callCount.should.equal 0
done()
describe 'when flushDocToMongo produces an error', ->
beforeEach ->
@ProjectGetter.getProject.callsArgWith(2, null, @fakeProject)
@handler._isFullIndex.callsArgWith(1, false)
@DocumentUpdaterHandler.flushDocToMongo.callsArgWith(2, new Error('woops'))
it 'should produce an error', (done) ->
@call (err, data) =>
expect(err).to.not.equal null
expect(err).to.be.instanceof Error
expect(data).to.equal undefined
done()
it 'should not send request', (done) ->
@call (err, data) =>
@request.post.callCount.should.equal 0
done()
describe '_findBibDocIds', ->
beforeEach ->
@fakeProject =
rootFolder: [
docs: [
{name: 'one.bib', _id: 'aaa'},
{name: 'two.txt', _id: 'bbb'},
]
folders: [
{docs: [{name: 'three.bib', _id: 'ccc'}], folders: []}
]
]
@expectedIds = ['aaa', 'ccc']
it 'should select the correct docIds', ->
result = @handler._findBibDocIds(@fakeProject)
expect(result).to.deep.equal @expectedIds
it 'should not error with a non array of folders from dirty data', ->
@fakeProject.rootFolder[0].folders[0].folders = {}
result = @handler._findBibDocIds(@fakeProject)
expect(result).to.deep.equal @expectedIds
describe '_findBibFileIds', ->
beforeEach ->
@fakeProject =
rootFolder: [
docs: [
{name: 'one.bib', _id: 'aaa'},
{name: 'two.txt', _id: 'bbb'},
]
fileRefs: [
{name: 'other.bib', _id: 'ddd'}
],
folders: [
{
docs: [{name: 'three.bib', _id: 'ccc'}],
fileRefs: [{name: 'four.bib', _id: 'ghg'}],
folders: []
}
]
]
@expectedIds = ['ddd', 'ghg']
it 'should select the correct docIds', ->
result = @handler._findBibFileIds(@fakeProject)
expect(result).to.deep.equal @expectedIds
describe '_isFullIndex', ->
beforeEach ->
@fakeProject =
owner_ref: @owner_ref = "owner-ref-123"
@owner =
features:
references: false
@UserGetter.getUser = sinon.stub()
@UserGetter.getUser.withArgs(@owner_ref, {features: true}).yields(null, @owner)
@call = (callback) =>
@handler._isFullIndex @fakeProject, callback
describe 'with references feature on', ->
beforeEach ->
@owner.features.references = true
it 'should return true', ->
@call (err, isFullIndex) =>
expect(err).to.equal null
expect(isFullIndex).to.equal true
describe 'with references feature off', ->
beforeEach ->
@owner.features.references = false
it 'should return false', ->
@call (err, isFullIndex) =>
expect(err).to.equal null
expect(isFullIndex).to.equal false
describe 'with referencesSearch', ->
beforeEach ->
@owner.features = {referencesSearch: true, references: false}
it 'should return true', ->
@call (err, isFullIndex) =>
expect(err).to.equal null
expect(isFullIndex).to.equal true
| true | SandboxedModule = require('sandboxed-module')
should = require('chai').should()
expect = require('chai').expect
sinon = require 'sinon'
assert = require("chai").assert
modulePath = "../../../../app/js/Features/References/ReferencesHandler"
describe 'ReferencesHandler', ->
beforeEach ->
@projectId = '222'
@fakeProject =
_id: @projectId
owner_ref: @fakeOwner =
_id: 'some_owner'
features:
references: false
rootFolder: [
docs: [
{name: 'one.bib', _id: 'aaa'},
{name: 'two.txt', _id: 'bbb'},
]
folders: [
{
docs: [{name: 'three.bib', _id: 'ccc'}],
fileRefs: [{name: 'four.bib', _id: 'ghg'}],
folders: []
}
]
]
@docIds = ['aaa', 'ccc']
@handler = SandboxedModule.require modulePath, requires:
'logger-sharelatex': {
log: ->
err: ->
}
'settings-sharelatex': @settings = {
apis:
references: {url: 'http://some.url/references'}
docstore: {url: 'http://some.url/docstore'}
filestore: {url: 'http://some.url/filestore'}
}
'request': @request = {
get: sinon.stub()
post: sinon.stub()
}
'../Project/ProjectGetter': @ProjectGetter = {
getProject: sinon.stub().callsArgWith(2, null, @fakeProject)
}
'../User/UserGetter': @UserGetter = {
getUser: sinon.stub()
}
'../DocumentUpdater/DocumentUpdaterHandler': @DocumentUpdaterHandler = {
flushDocToMongo: sinon.stub().callsArgWith(2, null)
}
@fakeResponseData =
projectId: @projectId
keys: ['kPI:KEY:<KEY>END_PI', 'kPI:KEY:<KEY>END_PI']
describe 'index', ->
beforeEach ->
sinon.stub(@handler, '_findBibDocIds')
sinon.stub(@handler, '_findBibFileIds')
sinon.stub(@handler, '_isFullIndex').callsArgWith(1, null, true)
@request.post.callsArgWith(1, null, {statusCode: 200}, @fakeResponseData)
@call = (callback) =>
@handler.index @projectId, @docIds, callback
describe 'with docIds as an array', ->
beforeEach ->
@docIds = ['aaa', 'ccc']
it 'should not call _findBibDocIds', (done) ->
@call (err, data) =>
@handler._findBibDocIds.callCount.should.equal 0
done()
it 'should call ProjectGetter.getProject', (done) ->
@call (err, data) =>
@ProjectGetter.getProject.callCount.should.equal 1
@ProjectGetter.getProject.calledWith(@projectId).should.equal true
done()
it 'should not call _findBibDocIds', (done) ->
@call (err, data) =>
@handler._findBibDocIds.callCount.should.equal 0
done()
it 'should call DocumentUpdaterHandler.flushDocToMongo', (done) ->
@call (err, data) =>
@DocumentUpdaterHandler.flushDocToMongo.callCount.should.equal 2
@docIds.forEach (docId) =>
@DocumentUpdaterHandler.flushDocToMongo.calledWith(@projectId, docId).should.equal true
done()
it 'should make a request to references service', (done) ->
@call (err, data) =>
@request.post.callCount.should.equal 1
arg = @request.post.firstCall.args[0]
expect(arg.json).to.have.all.keys 'docUrls', 'fullIndex'
expect(arg.json.docUrls.length).to.equal 2
expect(arg.json.fullIndex).to.equal true
done()
it 'should not produce an error', (done) ->
@call (err, data) =>
expect(err).to.equal null
done()
it 'should return data', (done) ->
@call (err, data) =>
expect(data).to.not.equal null
expect(data).to.not.equal undefined
expect(data).to.equal @fakeResponseData
done()
describe 'when ProjectGetter.getProject produces an error', ->
beforeEach ->
@ProjectGetter.getProject.callsArgWith(2, new Error('woops'))
it 'should produce an error', (done) ->
@call (err, data) =>
expect(err).to.not.equal null
expect(err).to.be.instanceof Error
expect(data).to.equal undefined
done()
it 'should not send request', (done) ->
@call (err, data) =>
@request.post.callCount.should.equal 0
done()
describe 'when _isFullIndex produces an error', ->
beforeEach ->
@ProjectGetter.getProject.callsArgWith(2, null, @fakeProject)
@handler._isFullIndex.callsArgWith(1, new Error('woops'))
it 'should produce an error', (done) ->
@call (err, data) =>
expect(err).to.not.equal null
expect(err).to.be.instanceof Error
expect(data).to.equal undefined
done()
it 'should not send request', (done) ->
@call (err, data) =>
@request.post.callCount.should.equal 0
done()
describe 'when flushDocToMongo produces an error', ->
beforeEach ->
@ProjectGetter.getProject.callsArgWith(2, null, @fakeProject)
@handler._isFullIndex.callsArgWith(1, false)
@DocumentUpdaterHandler.flushDocToMongo.callsArgWith(2, new Error('woops'))
it 'should produce an error', (done) ->
@call (err, data) =>
expect(err).to.not.equal null
expect(err).to.be.instanceof Error
expect(data).to.equal undefined
done()
it 'should not send request', (done) ->
@call (err, data) =>
@request.post.callCount.should.equal 0
done()
describe 'when request produces an error', ->
beforeEach ->
@ProjectGetter.getProject.callsArgWith(2, null, @fakeProject)
@handler._isFullIndex.callsArgWith(1, null, false)
@DocumentUpdaterHandler.flushDocToMongo.callsArgWith(2, null)
@request.post.callsArgWith(1, new Error('woops'))
it 'should produce an error', (done) ->
@call (err, data) =>
expect(err).to.not.equal null
expect(err).to.be.instanceof Error
expect(data).to.equal undefined
done()
describe 'when request responds with error status', ->
beforeEach ->
@ProjectGetter.getProject.callsArgWith(2, null, @fakeProject)
@handler._isFullIndex.callsArgWith(1, null, false)
@request.post.callsArgWith(1, null, {statusCode: 500}, null)
it 'should produce an error', (done) ->
@call (err, data) =>
expect(err).to.not.equal null
expect(err).to.be.instanceof Error
expect(data).to.equal undefined
done()
describe 'indexAll', ->
beforeEach ->
sinon.stub(@handler, '_findBibDocIds').returns(['aaa', 'ccc'])
sinon.stub(@handler, '_findBibFileIds').returns(['fff', 'ggg'])
sinon.stub(@handler, '_isFullIndex').callsArgWith(1, null, true)
@request.post.callsArgWith(1, null, {statusCode: 200}, @fakeResponseData)
@call = (callback) =>
@handler.indexAll @projectId, callback
it 'should call _findBibDocIds', (done) ->
@call (err, data) =>
@handler._findBibDocIds.callCount.should.equal 1
@handler._findBibDocIds.calledWith(@fakeProject).should.equal true
done()
it 'should call _findBibFileIds', (done) ->
@call (err, data) =>
@handler._findBibDocIds.callCount.should.equal 1
@handler._findBibDocIds.calledWith(@fakeProject).should.equal true
done()
it 'should call DocumentUpdaterHandler.flushDocToMongo', (done) ->
@call (err, data) =>
@DocumentUpdaterHandler.flushDocToMongo.callCount.should.equal 2
done()
it 'should make a request to references service', (done) ->
@call (err, data) =>
@request.post.callCount.should.equal 1
arg = @request.post.firstCall.args[0]
expect(arg.json).to.have.all.keys 'docUrls', 'fullIndex'
expect(arg.json.docUrls.length).to.equal 4
expect(arg.json.fullIndex).to.equal true
done()
it 'should not produce an error', (done) ->
@call (err, data) =>
expect(err).to.equal null
done()
it 'should return data', (done) ->
@call (err, data) =>
expect(data).to.not.equal null
expect(data).to.not.equal undefined
expect(data).to.equal @fakeResponseData
done()
describe 'when ProjectGetter.getProject produces an error', ->
beforeEach ->
@ProjectGetter.getProject.callsArgWith(2, new Error('woops'))
it 'should produce an error', (done) ->
@call (err, data) =>
expect(err).to.not.equal null
expect(err).to.be.instanceof Error
expect(data).to.equal undefined
done()
it 'should not send request', (done) ->
@call (err, data) =>
@request.post.callCount.should.equal 0
done()
describe 'when _isFullIndex produces an error', ->
beforeEach ->
@ProjectGetter.getProject.callsArgWith(2, null, @fakeProject)
@handler._isFullIndex.callsArgWith(1, new Error('woops'))
it 'should produce an error', (done) ->
@call (err, data) =>
expect(err).to.not.equal null
expect(err).to.be.instanceof Error
expect(data).to.equal undefined
done()
it 'should not send request', (done) ->
@call (err, data) =>
@request.post.callCount.should.equal 0
done()
describe 'when flushDocToMongo produces an error', ->
beforeEach ->
@ProjectGetter.getProject.callsArgWith(2, null, @fakeProject)
@handler._isFullIndex.callsArgWith(1, false)
@DocumentUpdaterHandler.flushDocToMongo.callsArgWith(2, new Error('woops'))
it 'should produce an error', (done) ->
@call (err, data) =>
expect(err).to.not.equal null
expect(err).to.be.instanceof Error
expect(data).to.equal undefined
done()
it 'should not send request', (done) ->
@call (err, data) =>
@request.post.callCount.should.equal 0
done()
describe '_findBibDocIds', ->
beforeEach ->
@fakeProject =
rootFolder: [
docs: [
{name: 'one.bib', _id: 'aaa'},
{name: 'two.txt', _id: 'bbb'},
]
folders: [
{docs: [{name: 'three.bib', _id: 'ccc'}], folders: []}
]
]
@expectedIds = ['aaa', 'ccc']
it 'should select the correct docIds', ->
result = @handler._findBibDocIds(@fakeProject)
expect(result).to.deep.equal @expectedIds
it 'should not error with a non array of folders from dirty data', ->
@fakeProject.rootFolder[0].folders[0].folders = {}
result = @handler._findBibDocIds(@fakeProject)
expect(result).to.deep.equal @expectedIds
describe '_findBibFileIds', ->
beforeEach ->
@fakeProject =
rootFolder: [
docs: [
{name: 'one.bib', _id: 'aaa'},
{name: 'two.txt', _id: 'bbb'},
]
fileRefs: [
{name: 'other.bib', _id: 'ddd'}
],
folders: [
{
docs: [{name: 'three.bib', _id: 'ccc'}],
fileRefs: [{name: 'four.bib', _id: 'ghg'}],
folders: []
}
]
]
@expectedIds = ['ddd', 'ghg']
it 'should select the correct docIds', ->
result = @handler._findBibFileIds(@fakeProject)
expect(result).to.deep.equal @expectedIds
describe '_isFullIndex', ->
beforeEach ->
@fakeProject =
owner_ref: @owner_ref = "owner-ref-123"
@owner =
features:
references: false
@UserGetter.getUser = sinon.stub()
@UserGetter.getUser.withArgs(@owner_ref, {features: true}).yields(null, @owner)
@call = (callback) =>
@handler._isFullIndex @fakeProject, callback
describe 'with references feature on', ->
beforeEach ->
@owner.features.references = true
it 'should return true', ->
@call (err, isFullIndex) =>
expect(err).to.equal null
expect(isFullIndex).to.equal true
describe 'with references feature off', ->
beforeEach ->
@owner.features.references = false
it 'should return false', ->
@call (err, isFullIndex) =>
expect(err).to.equal null
expect(isFullIndex).to.equal false
describe 'with referencesSearch', ->
beforeEach ->
@owner.features = {referencesSearch: true, references: false}
it 'should return true', ->
@call (err, isFullIndex) =>
expect(err).to.equal null
expect(isFullIndex).to.equal true
|
[
{
"context": "ommon')\n\ntest 'date-test', [\n {\n Name: 'Bob'\n Date: new Date(\"2013-11-27T09:14:15.617Z",
"end": 72,
"score": 0.9998663663864136,
"start": 69,
"tag": "NAME",
"value": "Bob"
},
{
"context": "-11-27T09:14:15.617Z\")\n }\n {\n Name: 'Alic... | test/date_test.coffee | SBeyeMHP/node-xlsx-writestream | 42 | test = require('./common')
test 'date-test', [
{
Name: 'Bob'
Date: new Date("2013-11-27T09:14:15.617Z")
}
{
Name: 'Alice'
Date: new Date("2013-11-27T09:14:15.617Z")
}
]
| 178518 | test = require('./common')
test 'date-test', [
{
Name: '<NAME>'
Date: new Date("2013-11-27T09:14:15.617Z")
}
{
Name: '<NAME>'
Date: new Date("2013-11-27T09:14:15.617Z")
}
]
| true | test = require('./common')
test 'date-test', [
{
Name: 'PI:NAME:<NAME>END_PI'
Date: new Date("2013-11-27T09:14:15.617Z")
}
{
Name: 'PI:NAME:<NAME>END_PI'
Date: new Date("2013-11-27T09:14:15.617Z")
}
]
|
[
{
"context": "d: true\n manufactured: true\n name: \"TestPart\"\n params: \"\"\n quantity: 2\n ",
"end": 3142,
"score": 0.7360716462135315,
"start": 3138,
"tag": "NAME",
"value": "Test"
}
] | src/test/spec/project.spec.coffee | kaosat-dev/CoffeeSCad | 110 | define (require)->
$ = require 'jquery'
_ = require 'underscore'
Project = require "core/projects/project"
checkDeferred=(df,fn) ->
callback = jasmine.createSpy()
errback = jasmine.createSpy()
df.then(callback, errback)
waitsFor -> callback.callCount > 0
runs ->
fn.apply @,callback.mostRecentCall.args if fn
expect(errback).not.toHaveBeenCalled()
describe "Project ", ->
project = null
compiler = null
beforeEach ->
project = new Project
name:"Project"
it 'can make new project files',->
project.addFile
name:"Project.coffee"
content:"testContent"
expect(project.rootFolder.at(0).name).toBe("Project.coffee")
it 'can remove files from itself' , ->
file = project.addFile
name:"testFileName"
content:"testContent"
project.removeFile(file)
expect(project.rootFolder.length).toBe 0
it 'can have only one active file at a time (selectable)', ->
file = project.addFile
name:"Project.coffee"
content:"testContent"
file2 = project.addFile
name:"otherFile.coffee"
content:"testContent"
activeFile = project.makeFileActive({file:file})
expect(activeFile).toEqual file
expect(file.isActive).toBe true
expect(file2.isActive).toBe false
activeFile = project.makeFileActive({fileName:file2.name})
expect(activeFile).toEqual file2
expect(file2.isActive).toBe true
expect(file.isActive).toBe false
activeFile = project.makeFileActive(file.name)
expect(activeFile).toEqual file
expect(file.isActive).toBe true
expect(file2.isActive).toBe false
activeFile = project.makeFileActive(file2)
expect(activeFile).toEqual file2
expect(file2.isActive).toBe true
expect(file.isActive).toBe false
it 'compiles the contents of its files into an assembly of parts', ->
project.addFile
name:"Project.coffee"
content:"""
class TestPart extends Part
constructor:(options) ->
super options
@union(new Cylinder(h:300, r:20,$fn:3))
testPart = new TestPart()
assembly.add(testPart)
"""
checkDeferred $.when(project.compile()), (assembly) =>
expect(project.rootAssembly.children[0].polygons.length).toBe(9)
it 'generates bom data when compiling',->
project.addFile
name:"Project.coffee"
content:"""
class SubPart extends Part
constructor:(options)->
super options
class TestPart extends Part
constructor:(options) ->
super options
@union(new Cylinder(h:300, r:20,$fn:3))
@add(new SubPart())
@add(new SubPart())
testPart = new TestPart()
assembly.add(testPart)
"""
###
expBom = new Backbone.Collection()
expPart = new Backbone.Model
included: true
manufactured: true
name: "TestPart"
params: ""
quantity: 2
variant: "Default"
expBom.add expPart###
checkDeferred $.when(project.compile()), (assembly) =>
expect(JSON.stringify(project.bom)).toEqual('[{"name":"TestPart","variant":"Default","params":"","quantity":1,"manufactured":true,"included":true},{"name":"SubPart","variant":"Default","params":"","quantity":2,"manufactured":true,"included":true}]')
it 'handles variants (different options) for parts in bom data correctly',->
project.addFile
name:"Project.coffee"
content:"""
class TestPart extends Part
constructor:(options) ->
defaults = {thickness:5}
{@thickness} = options = merge(defaults, options)
super options
@union(new Cylinder(h:@thickness, r:20,$fn:3))
testPart = new TestPart()
testPartVar2 = new TestPart({thickness:15})
assembly.add(testPart)
assembly.add(testPartVar2)
"""
checkDeferred $.when(project.compile()), (assembly) =>
expect(JSON.stringify(project.bom)).toEqual('[{"name":"TestPart","variant":"","params":"{\\"thickness\\":5}","quantity":1,"manufactured":true,"included":true},{"name":"TestPart","variant":"","params":"{\\"thickness\\":15}","quantity":1,"manufactured":true,"included":true}]')
it 'handles variants (different options) for parts in bom data correctly (background processing)',->
project.addFile
name:"Project.coffee"
content:"""
class TestPart extends Part
constructor:(options) ->
defaults = {thickness:5}
{@thickness} = options = merge(defaults, options)
super options
@union(new Cylinder(h:@thickness, r:20,$fn:3))
testPart = new TestPart()
testPartVar2 = new TestPart({thickness:15})
assembly.add(testPart)
assembly.add(testPartVar2)
"""
checkDeferred $.when(project.compile({backgroundProcessing:true})), (assembly) =>
expect(JSON.stringify(project.bom)).toEqual('[{"name":"TestPart","variant":"","params":"{\\"thickness\\":5}","quantity":1,"manufactured":true,"included":true},{"name":"TestPart","variant":"","params":"{\\"thickness\\":15}","quantity":1,"manufactured":true,"included":true}]')
it 'is marked as "dirty" when one of its files gets modified', ->
expect(project.isCompileAdvised).toBe(false)
project.addFile
name:"test_project"
content:"""
class TestPart extends Part
constructor:(options) ->
super options
@union(new Cylinder(h:300, r:20,$fn:3))
testPart = new TestPart()
assembly.add(testPart)
"""
project.isCompileAdvised = false
mainFile = project.rootFolder.get("test_project")
mainFile.content= ""
expect(project.isCompileAdvised).toBe(true)
#########################
###
describe "projectFile", ->
project = null
part = null
beforeEach ->
project = new Project
name:"test_project"
part = new ProjectFile
name: "test_part"
ext: "coscad"
content: ""
project.add part
afterEach ->
part.destroy()
localStorage.removeItem("Library-test_project")
localStorage.removeItem("Library-test_project-parts")
it 'flags itself as isCompileAdvised on change' , ->
part.content="DummyContent"
expect(part.isCompileAdvised).toBe true
it 'flags itself as not isCompileAdvised on save' , ->
part.content="DummyContent"
part.save()
expect(part.isCompileAdvised).toBe false
###
| 53312 | define (require)->
$ = require 'jquery'
_ = require 'underscore'
Project = require "core/projects/project"
checkDeferred=(df,fn) ->
callback = jasmine.createSpy()
errback = jasmine.createSpy()
df.then(callback, errback)
waitsFor -> callback.callCount > 0
runs ->
fn.apply @,callback.mostRecentCall.args if fn
expect(errback).not.toHaveBeenCalled()
describe "Project ", ->
project = null
compiler = null
beforeEach ->
project = new Project
name:"Project"
it 'can make new project files',->
project.addFile
name:"Project.coffee"
content:"testContent"
expect(project.rootFolder.at(0).name).toBe("Project.coffee")
it 'can remove files from itself' , ->
file = project.addFile
name:"testFileName"
content:"testContent"
project.removeFile(file)
expect(project.rootFolder.length).toBe 0
it 'can have only one active file at a time (selectable)', ->
file = project.addFile
name:"Project.coffee"
content:"testContent"
file2 = project.addFile
name:"otherFile.coffee"
content:"testContent"
activeFile = project.makeFileActive({file:file})
expect(activeFile).toEqual file
expect(file.isActive).toBe true
expect(file2.isActive).toBe false
activeFile = project.makeFileActive({fileName:file2.name})
expect(activeFile).toEqual file2
expect(file2.isActive).toBe true
expect(file.isActive).toBe false
activeFile = project.makeFileActive(file.name)
expect(activeFile).toEqual file
expect(file.isActive).toBe true
expect(file2.isActive).toBe false
activeFile = project.makeFileActive(file2)
expect(activeFile).toEqual file2
expect(file2.isActive).toBe true
expect(file.isActive).toBe false
it 'compiles the contents of its files into an assembly of parts', ->
project.addFile
name:"Project.coffee"
content:"""
class TestPart extends Part
constructor:(options) ->
super options
@union(new Cylinder(h:300, r:20,$fn:3))
testPart = new TestPart()
assembly.add(testPart)
"""
checkDeferred $.when(project.compile()), (assembly) =>
expect(project.rootAssembly.children[0].polygons.length).toBe(9)
it 'generates bom data when compiling',->
project.addFile
name:"Project.coffee"
content:"""
class SubPart extends Part
constructor:(options)->
super options
class TestPart extends Part
constructor:(options) ->
super options
@union(new Cylinder(h:300, r:20,$fn:3))
@add(new SubPart())
@add(new SubPart())
testPart = new TestPart()
assembly.add(testPart)
"""
###
expBom = new Backbone.Collection()
expPart = new Backbone.Model
included: true
manufactured: true
name: "<NAME>Part"
params: ""
quantity: 2
variant: "Default"
expBom.add expPart###
checkDeferred $.when(project.compile()), (assembly) =>
expect(JSON.stringify(project.bom)).toEqual('[{"name":"TestPart","variant":"Default","params":"","quantity":1,"manufactured":true,"included":true},{"name":"SubPart","variant":"Default","params":"","quantity":2,"manufactured":true,"included":true}]')
it 'handles variants (different options) for parts in bom data correctly',->
project.addFile
name:"Project.coffee"
content:"""
class TestPart extends Part
constructor:(options) ->
defaults = {thickness:5}
{@thickness} = options = merge(defaults, options)
super options
@union(new Cylinder(h:@thickness, r:20,$fn:3))
testPart = new TestPart()
testPartVar2 = new TestPart({thickness:15})
assembly.add(testPart)
assembly.add(testPartVar2)
"""
checkDeferred $.when(project.compile()), (assembly) =>
expect(JSON.stringify(project.bom)).toEqual('[{"name":"TestPart","variant":"","params":"{\\"thickness\\":5}","quantity":1,"manufactured":true,"included":true},{"name":"TestPart","variant":"","params":"{\\"thickness\\":15}","quantity":1,"manufactured":true,"included":true}]')
it 'handles variants (different options) for parts in bom data correctly (background processing)',->
project.addFile
name:"Project.coffee"
content:"""
class TestPart extends Part
constructor:(options) ->
defaults = {thickness:5}
{@thickness} = options = merge(defaults, options)
super options
@union(new Cylinder(h:@thickness, r:20,$fn:3))
testPart = new TestPart()
testPartVar2 = new TestPart({thickness:15})
assembly.add(testPart)
assembly.add(testPartVar2)
"""
checkDeferred $.when(project.compile({backgroundProcessing:true})), (assembly) =>
expect(JSON.stringify(project.bom)).toEqual('[{"name":"TestPart","variant":"","params":"{\\"thickness\\":5}","quantity":1,"manufactured":true,"included":true},{"name":"TestPart","variant":"","params":"{\\"thickness\\":15}","quantity":1,"manufactured":true,"included":true}]')
it 'is marked as "dirty" when one of its files gets modified', ->
expect(project.isCompileAdvised).toBe(false)
project.addFile
name:"test_project"
content:"""
class TestPart extends Part
constructor:(options) ->
super options
@union(new Cylinder(h:300, r:20,$fn:3))
testPart = new TestPart()
assembly.add(testPart)
"""
project.isCompileAdvised = false
mainFile = project.rootFolder.get("test_project")
mainFile.content= ""
expect(project.isCompileAdvised).toBe(true)
#########################
###
describe "projectFile", ->
project = null
part = null
beforeEach ->
project = new Project
name:"test_project"
part = new ProjectFile
name: "test_part"
ext: "coscad"
content: ""
project.add part
afterEach ->
part.destroy()
localStorage.removeItem("Library-test_project")
localStorage.removeItem("Library-test_project-parts")
it 'flags itself as isCompileAdvised on change' , ->
part.content="DummyContent"
expect(part.isCompileAdvised).toBe true
it 'flags itself as not isCompileAdvised on save' , ->
part.content="DummyContent"
part.save()
expect(part.isCompileAdvised).toBe false
###
| true | define (require)->
$ = require 'jquery'
_ = require 'underscore'
Project = require "core/projects/project"
checkDeferred=(df,fn) ->
callback = jasmine.createSpy()
errback = jasmine.createSpy()
df.then(callback, errback)
waitsFor -> callback.callCount > 0
runs ->
fn.apply @,callback.mostRecentCall.args if fn
expect(errback).not.toHaveBeenCalled()
describe "Project ", ->
project = null
compiler = null
beforeEach ->
project = new Project
name:"Project"
it 'can make new project files',->
project.addFile
name:"Project.coffee"
content:"testContent"
expect(project.rootFolder.at(0).name).toBe("Project.coffee")
it 'can remove files from itself' , ->
file = project.addFile
name:"testFileName"
content:"testContent"
project.removeFile(file)
expect(project.rootFolder.length).toBe 0
it 'can have only one active file at a time (selectable)', ->
file = project.addFile
name:"Project.coffee"
content:"testContent"
file2 = project.addFile
name:"otherFile.coffee"
content:"testContent"
activeFile = project.makeFileActive({file:file})
expect(activeFile).toEqual file
expect(file.isActive).toBe true
expect(file2.isActive).toBe false
activeFile = project.makeFileActive({fileName:file2.name})
expect(activeFile).toEqual file2
expect(file2.isActive).toBe true
expect(file.isActive).toBe false
activeFile = project.makeFileActive(file.name)
expect(activeFile).toEqual file
expect(file.isActive).toBe true
expect(file2.isActive).toBe false
activeFile = project.makeFileActive(file2)
expect(activeFile).toEqual file2
expect(file2.isActive).toBe true
expect(file.isActive).toBe false
it 'compiles the contents of its files into an assembly of parts', ->
project.addFile
name:"Project.coffee"
content:"""
class TestPart extends Part
constructor:(options) ->
super options
@union(new Cylinder(h:300, r:20,$fn:3))
testPart = new TestPart()
assembly.add(testPart)
"""
checkDeferred $.when(project.compile()), (assembly) =>
expect(project.rootAssembly.children[0].polygons.length).toBe(9)
it 'generates bom data when compiling',->
project.addFile
name:"Project.coffee"
content:"""
class SubPart extends Part
constructor:(options)->
super options
class TestPart extends Part
constructor:(options) ->
super options
@union(new Cylinder(h:300, r:20,$fn:3))
@add(new SubPart())
@add(new SubPart())
testPart = new TestPart()
assembly.add(testPart)
"""
###
expBom = new Backbone.Collection()
expPart = new Backbone.Model
included: true
manufactured: true
name: "PI:NAME:<NAME>END_PIPart"
params: ""
quantity: 2
variant: "Default"
expBom.add expPart###
checkDeferred $.when(project.compile()), (assembly) =>
expect(JSON.stringify(project.bom)).toEqual('[{"name":"TestPart","variant":"Default","params":"","quantity":1,"manufactured":true,"included":true},{"name":"SubPart","variant":"Default","params":"","quantity":2,"manufactured":true,"included":true}]')
it 'handles variants (different options) for parts in bom data correctly',->
project.addFile
name:"Project.coffee"
content:"""
class TestPart extends Part
constructor:(options) ->
defaults = {thickness:5}
{@thickness} = options = merge(defaults, options)
super options
@union(new Cylinder(h:@thickness, r:20,$fn:3))
testPart = new TestPart()
testPartVar2 = new TestPart({thickness:15})
assembly.add(testPart)
assembly.add(testPartVar2)
"""
checkDeferred $.when(project.compile()), (assembly) =>
expect(JSON.stringify(project.bom)).toEqual('[{"name":"TestPart","variant":"","params":"{\\"thickness\\":5}","quantity":1,"manufactured":true,"included":true},{"name":"TestPart","variant":"","params":"{\\"thickness\\":15}","quantity":1,"manufactured":true,"included":true}]')
it 'handles variants (different options) for parts in bom data correctly (background processing)',->
project.addFile
name:"Project.coffee"
content:"""
class TestPart extends Part
constructor:(options) ->
defaults = {thickness:5}
{@thickness} = options = merge(defaults, options)
super options
@union(new Cylinder(h:@thickness, r:20,$fn:3))
testPart = new TestPart()
testPartVar2 = new TestPart({thickness:15})
assembly.add(testPart)
assembly.add(testPartVar2)
"""
checkDeferred $.when(project.compile({backgroundProcessing:true})), (assembly) =>
expect(JSON.stringify(project.bom)).toEqual('[{"name":"TestPart","variant":"","params":"{\\"thickness\\":5}","quantity":1,"manufactured":true,"included":true},{"name":"TestPart","variant":"","params":"{\\"thickness\\":15}","quantity":1,"manufactured":true,"included":true}]')
it 'is marked as "dirty" when one of its files gets modified', ->
expect(project.isCompileAdvised).toBe(false)
project.addFile
name:"test_project"
content:"""
class TestPart extends Part
constructor:(options) ->
super options
@union(new Cylinder(h:300, r:20,$fn:3))
testPart = new TestPart()
assembly.add(testPart)
"""
project.isCompileAdvised = false
mainFile = project.rootFolder.get("test_project")
mainFile.content= ""
expect(project.isCompileAdvised).toBe(true)
#########################
###
describe "projectFile", ->
project = null
part = null
beforeEach ->
project = new Project
name:"test_project"
part = new ProjectFile
name: "test_part"
ext: "coscad"
content: ""
project.add part
afterEach ->
part.destroy()
localStorage.removeItem("Library-test_project")
localStorage.removeItem("Library-test_project-parts")
it 'flags itself as isCompileAdvised on change' , ->
part.content="DummyContent"
expect(part.isCompileAdvised).toBe true
it 'flags itself as not isCompileAdvised on save' , ->
part.content="DummyContent"
part.save()
expect(part.isCompileAdvised).toBe false
###
|
[
{
"context": "le Picture\"\n display: [\"show\"]\n\nRory = \n id: \"rory \"\n name: \"Rory Miller\"\n externalLink: \"http://c",
"end": 527,
"score": 0.9901135563850403,
"start": 523,
"tag": "USERNAME",
"value": "rory"
},
{
"context": "display: [\"show\"]\n\nRory = \n id: \"ro... | tests/dummy/app/models/instructor.coffee | annotating/VioDy | 0 | `import DS from 'ember-data'`
Instructor = DS.Model.extend
name: DS.attr "string",
label: "Instructor Name"
display: ["show", "index"]
description: DS.attr "string",
label: "About Me"
display: ["show"]
description: "The markdown-flavor text describing this instructor"
externalLink: DS.attr "string",
label: "Website"
description: "Link to this instructor's website"
display: ["show"]
mugShot: DS.attr "string",
label: "Profile Picture"
display: ["show"]
Rory =
id: "rory "
name: "Rory Miller"
externalLink: "http://chirontraining.com/"
mugShot: "http://violencedynamics.com/wp-content/uploads/2015/12/instructor-headshot-rory-miller.jpg"
description: """
“Force is a form of communication. It is the most emphatic possible way of saying ‘no’. For years my job was to say no, sometimes very emphatically, to violent people.
“I have been a Corrections Officer, a Sergeant, a Tactical Team member and a Tactical Team Leader; I have taught corrections and enforcement personnel skills from first aid to physical defense to crisis communication and mental health. I’ve done this from my west coast home to Baghdad. So far, my life has been a blast.
“I’m a bit scarred up, but generally happy.”
"""
Marc =
id: "marc "
name: "Marc MacYoung"
externalLink: "http://nononsenseselfdefense.com/"
mugShot: "http://violencedynamics.com/wp-content/uploads/2015/12/instructor-headshot-marc-macyoung.jpg"
description: """
The gang-infested streets of Los Angeles not only gave Marc MacYoung his street name “Animal,” but also firsthand experience about what does and does not work for self-defense. What he teaches is based on experience and proven reliability for surviving violence. If it didn’t work, he wouldn’t be alive to talk about it.
He is considered by many to be one of the most analytical thinkers on the subject of surviving violence and personal safety today. He has taught police, military, martial artists and civilians around the world. His message is always the same: Hand-to-hand combat is a last ditch effort when other, more effective, preventive measures have failed.
"""
Kathy =
id: "kathy"
name: "Kathy Jackson"
externalLink: "http://corneredcat.com/"
mugShot: "http://violencedynamics.com/wp-content/uploads/2015/12/instructor-headshot-kathy-jackson-150x150.jpg"
description: """
“I’m a firearms instructor, a homeschool mom, a small business owner, a former magazine editor, and a freelance writer. I guess you could say I wear a lot of hats. My husband and I have been married more than 25 years, and we have five sons.
“I’m a frequent contributor to Women & Guns Magazine, and my work has appeared in SWAT Magazine. I co-authored [Lessons from Armed America](http://www.amazon.com/Lessons-Armed-America-Kathy-Jackson/dp/1453685553), a book which presents several real-life accounts of people protecting themselves from criminals and the lessons we can learn from those situations. My most recent book is: [The Cornered Cat: A Woman’s Guide to Concealed Carry](http://www.amazon.com/Cornered-Cat-Womans-Guide-Concealed/dp/0982248792/ref=sr_1_1?ie=UTF8&qid=1314026571&sr=8-1).”
"""
Terry =
id: "terry"
name: "Terry Trahan"
externalLink: "http://weaselcraft.blogspot.com/"
mugShot: "http://violencedynamics.com/wp-content/uploads/2015/12/instructor-headshot-terry-trahan.jpg"
description: """
Having spent many years dealing with violence, various subcultures, and street life gives Terry Trahan a unique view on life and the dynamics of violence.
Having a strong interest in efficient answers to violence, de-escalation, urban survival, and escape, Terry’s focus is more on the civilian end of dealing with violence, covering armed, unarmed, improvised weapons, threat assessment, awareness, and unconventional strategies.
Terry has years of training in SouthEast Asian Martial Arts, heads the Kapatiran Suntukan Martial Arts organization, and is the lead instructor for [WeaselCraft](http://weaselcraft.blogspot.com/), his non-traditional approach to personal security, and specializes in all aspects of knives, from use, to design and function.
"""
Kaesey =
id: "kaese"
name: "Kasey Keckeisen"
externalLink: "http://practicalbudo.blogspot.com/"
mugShot: "http://violencedynamics.com/wp-content/uploads/2015/12/instructor-headshot-kasey-keckeisen.jpg"
description: """
Kasey Keckeisen is an experienced Police Officer, SWAT team leader, and SWAT training coordinator. Kasey Keckeisen is the United States Midwest Regional Director for the Edo Machi-Kata Taiho Jutsu organization, and the Minnesota State Director for One-On-One Control Tactics. Keckeisen Sensei Holds a 5th degree black belt in Jujutsu and is recognized as a Shihan by the International Shin Budo Association. Keckeisen Sensei also has an extensive training background in other Budo holding a 3rd degree black belt and teaching certificate from the International Yoshinkan Aikido Federation, a 1st degree black belt in Nippon Kan Aikido, and a 3rd degree black belt in Traditional Kodokan Judo.
"""
Instructor.reopenClass
FIXTURES: [Rory, Marc, Kathy, Terry, Kaesey]
`export default Instructor` | 48812 | `import DS from 'ember-data'`
Instructor = DS.Model.extend
name: DS.attr "string",
label: "Instructor Name"
display: ["show", "index"]
description: DS.attr "string",
label: "About Me"
display: ["show"]
description: "The markdown-flavor text describing this instructor"
externalLink: DS.attr "string",
label: "Website"
description: "Link to this instructor's website"
display: ["show"]
mugShot: DS.attr "string",
label: "Profile Picture"
display: ["show"]
Rory =
id: "rory "
name: "<NAME>"
externalLink: "http://chirontraining.com/"
mugShot: "http://violencedynamics.com/wp-content/uploads/2015/12/instructor-headshot-rory-miller.jpg"
description: """
“Force is a form of communication. It is the most emphatic possible way of saying ‘no’. For years my job was to say no, sometimes very emphatically, to violent people.
“I have been a Corrections Officer, a Sergeant, a Tactical Team member and a Tactical Team Leader; I have taught corrections and enforcement personnel skills from first aid to physical defense to crisis communication and mental health. I’ve done this from my west coast home to Baghdad. So far, my life has been a blast.
“I’m a bit scarred up, but generally happy.”
"""
Marc =
id: "marc "
name: "<NAME>"
externalLink: "http://nononsenseselfdefense.com/"
mugShot: "http://violencedynamics.com/wp-content/uploads/2015/12/instructor-headshot-marc-macyoung.jpg"
description: """
The gang-infested streets of Los Angeles not only gave Marc MacYoung his street name “Animal,” but also firsthand experience about what does and does not work for self-defense. What he teaches is based on experience and proven reliability for surviving violence. If it didn’t work, he wouldn’t be alive to talk about it.
He is considered by many to be one of the most analytical thinkers on the subject of surviving violence and personal safety today. He has taught police, military, martial artists and civilians around the world. His message is always the same: Hand-to-hand combat is a last ditch effort when other, more effective, preventive measures have failed.
"""
Kathy =
id: "kathy"
name: "<NAME>"
externalLink: "http://corneredcat.com/"
mugShot: "http://violencedynamics.com/wp-content/uploads/2015/12/instructor-headshot-kathy-jackson-150x150.jpg"
description: """
“I’m a firearms instructor, a homeschool mom, a small business owner, a former magazine editor, and a freelance writer. I guess you could say I wear a lot of hats. My husband and I have been married more than 25 years, and we have five sons.
“I’m a frequent contributor to Women & Guns Magazine, and my work has appeared in SWAT Magazine. I co-authored [Lessons from Armed America](http://www.amazon.com/Lessons-Armed-America-Kathy-Jackson/dp/1453685553), a book which presents several real-life accounts of people protecting themselves from criminals and the lessons we can learn from those situations. My most recent book is: [The Cornered Cat: A Woman’s Guide to Concealed Carry](http://www.amazon.com/Cornered-Cat-Womans-Guide-Concealed/dp/0982248792/ref=sr_1_1?ie=UTF8&qid=1314026571&sr=8-1).”
"""
Terry =
id: "terry"
name: "<NAME>"
externalLink: "http://weaselcraft.blogspot.com/"
mugShot: "http://violencedynamics.com/wp-content/uploads/2015/12/instructor-headshot-terry-trahan.jpg"
description: """
Having spent many years dealing with violence, various subcultures, and street life gives <NAME> a unique view on life and the dynamics of violence.
Having a strong interest in efficient answers to violence, de-escalation, urban survival, and escape, Terry’s focus is more on the civilian end of dealing with violence, covering armed, unarmed, improvised weapons, threat assessment, awareness, and unconventional strategies.
Terry has years of training in SouthEast Asian Martial Arts, heads the Kapatiran Suntukan Martial Arts organization, and is the lead instructor for [WeaselCraft](http://weaselcraft.blogspot.com/), his non-traditional approach to personal security, and specializes in all aspects of knives, from use, to design and function.
"""
Kaesey =
id: "kaese"
name: "<NAME>"
externalLink: "http://practicalbudo.blogspot.com/"
mugShot: "http://violencedynamics.com/wp-content/uploads/2015/12/instructor-headshot-kasey-keckeisen.jpg"
description: """
<NAME> is an experienced Police Officer, SWAT team leader, and SWAT training coordinator. <NAME> is the United States Midwest Regional Director for the Edo Machi-Kata Taiho Jutsu organization, and the Minnesota State Director for One-On-One Control Tactics. Keckeisen Sensei Holds a 5th degree black belt in Jujutsu and is recognized as a Shihan by the International Shin Budo Association. Keckeisen Sensei also has an extensive training background in other Budo holding a 3rd degree black belt and teaching certificate from the International Yoshinkan Aikido Federation, a 1st degree black belt in Nippon K<NAME>, and a 3rd degree black belt in Traditional Kodokan Judo.
"""
Instructor.reopenClass
FIXTURES: [<NAME>, <NAME>, <NAME>, <NAME>, <NAME>aesey]
`export default Instructor` | true | `import DS from 'ember-data'`
Instructor = DS.Model.extend
name: DS.attr "string",
label: "Instructor Name"
display: ["show", "index"]
description: DS.attr "string",
label: "About Me"
display: ["show"]
description: "The markdown-flavor text describing this instructor"
externalLink: DS.attr "string",
label: "Website"
description: "Link to this instructor's website"
display: ["show"]
mugShot: DS.attr "string",
label: "Profile Picture"
display: ["show"]
Rory =
id: "rory "
name: "PI:NAME:<NAME>END_PI"
externalLink: "http://chirontraining.com/"
mugShot: "http://violencedynamics.com/wp-content/uploads/2015/12/instructor-headshot-rory-miller.jpg"
description: """
“Force is a form of communication. It is the most emphatic possible way of saying ‘no’. For years my job was to say no, sometimes very emphatically, to violent people.
“I have been a Corrections Officer, a Sergeant, a Tactical Team member and a Tactical Team Leader; I have taught corrections and enforcement personnel skills from first aid to physical defense to crisis communication and mental health. I’ve done this from my west coast home to Baghdad. So far, my life has been a blast.
“I’m a bit scarred up, but generally happy.”
"""
Marc =
id: "marc "
name: "PI:NAME:<NAME>END_PI"
externalLink: "http://nononsenseselfdefense.com/"
mugShot: "http://violencedynamics.com/wp-content/uploads/2015/12/instructor-headshot-marc-macyoung.jpg"
description: """
The gang-infested streets of Los Angeles not only gave Marc MacYoung his street name “Animal,” but also firsthand experience about what does and does not work for self-defense. What he teaches is based on experience and proven reliability for surviving violence. If it didn’t work, he wouldn’t be alive to talk about it.
He is considered by many to be one of the most analytical thinkers on the subject of surviving violence and personal safety today. He has taught police, military, martial artists and civilians around the world. His message is always the same: Hand-to-hand combat is a last ditch effort when other, more effective, preventive measures have failed.
"""
Kathy =
id: "kathy"
name: "PI:NAME:<NAME>END_PI"
externalLink: "http://corneredcat.com/"
mugShot: "http://violencedynamics.com/wp-content/uploads/2015/12/instructor-headshot-kathy-jackson-150x150.jpg"
description: """
“I’m a firearms instructor, a homeschool mom, a small business owner, a former magazine editor, and a freelance writer. I guess you could say I wear a lot of hats. My husband and I have been married more than 25 years, and we have five sons.
“I’m a frequent contributor to Women & Guns Magazine, and my work has appeared in SWAT Magazine. I co-authored [Lessons from Armed America](http://www.amazon.com/Lessons-Armed-America-Kathy-Jackson/dp/1453685553), a book which presents several real-life accounts of people protecting themselves from criminals and the lessons we can learn from those situations. My most recent book is: [The Cornered Cat: A Woman’s Guide to Concealed Carry](http://www.amazon.com/Cornered-Cat-Womans-Guide-Concealed/dp/0982248792/ref=sr_1_1?ie=UTF8&qid=1314026571&sr=8-1).”
"""
Terry =
id: "terry"
name: "PI:NAME:<NAME>END_PI"
externalLink: "http://weaselcraft.blogspot.com/"
mugShot: "http://violencedynamics.com/wp-content/uploads/2015/12/instructor-headshot-terry-trahan.jpg"
description: """
Having spent many years dealing with violence, various subcultures, and street life gives PI:NAME:<NAME>END_PI a unique view on life and the dynamics of violence.
Having a strong interest in efficient answers to violence, de-escalation, urban survival, and escape, Terry’s focus is more on the civilian end of dealing with violence, covering armed, unarmed, improvised weapons, threat assessment, awareness, and unconventional strategies.
Terry has years of training in SouthEast Asian Martial Arts, heads the Kapatiran Suntukan Martial Arts organization, and is the lead instructor for [WeaselCraft](http://weaselcraft.blogspot.com/), his non-traditional approach to personal security, and specializes in all aspects of knives, from use, to design and function.
"""
Kaesey =
id: "kaese"
name: "PI:NAME:<NAME>END_PI"
externalLink: "http://practicalbudo.blogspot.com/"
mugShot: "http://violencedynamics.com/wp-content/uploads/2015/12/instructor-headshot-kasey-keckeisen.jpg"
description: """
PI:NAME:<NAME>END_PI is an experienced Police Officer, SWAT team leader, and SWAT training coordinator. PI:NAME:<NAME>END_PI is the United States Midwest Regional Director for the Edo Machi-Kata Taiho Jutsu organization, and the Minnesota State Director for One-On-One Control Tactics. Keckeisen Sensei Holds a 5th degree black belt in Jujutsu and is recognized as a Shihan by the International Shin Budo Association. Keckeisen Sensei also has an extensive training background in other Budo holding a 3rd degree black belt and teaching certificate from the International Yoshinkan Aikido Federation, a 1st degree black belt in Nippon KPI:NAME:<NAME>END_PI, and a 3rd degree black belt in Traditional Kodokan Judo.
"""
Instructor.reopenClass
FIXTURES: [PI:NAME:<NAME>END_PI, PI:NAME:<NAME>END_PI, PI:NAME:<NAME>END_PI, PI:NAME:<NAME>END_PI, PI:NAME:<NAME>END_PIaesey]
`export default Instructor` |
[
{
"context": ">\n @them = null\n runs( =>\n kb.getUser('dakota', 'usernames', (err, them) =>\n @them = the",
"end": 200,
"score": 0.9905455708503723,
"start": 194,
"tag": "USERNAME",
"value": "dakota"
},
{
"context": "m = null\n runs( =>\n kb.getUser('dakota... | packages/client-app/internal_packages/keybase/spec/keybase-spec.coffee | cnheider/nylas-mail | 24,369 | kb = require '../lib/keybase'
xdescribe "keybase lib", ->
# TODO stub keybase calls?
it "should be able to fetch an account by username", ->
@them = null
runs( =>
kb.getUser('dakota', 'usernames', (err, them) =>
@them = them
)
)
waitsFor((=> @them != null), 2000)
runs( =>
expect(@them?[0].components.username.val).toEqual("dakota")
)
it "should be able to fetch an account by key fingerprint", ->
@them = null
runs( =>
kb.getUser('7FA5A43BBF2BAD1845C8D0E8145FCCD989968E3B', 'key_fingerprint', (err, them) =>
@them = them
)
)
waitsFor((=> @them != null), 2000)
runs( =>
expect(@them?[0].components.username.val).toEqual("dakota")
)
it "should be able to fetch a user's key", ->
@key = null
runs( =>
kb.getKey('dakota', (error, key) =>
@key = key
)
)
waitsFor((=> @key != null), 2000)
runs( =>
expect(@key?.startsWith('-----BEGIN PGP PUBLIC KEY BLOCK-----'))
)
it "should be able to return an autocomplete query", ->
@completions = null
runs( =>
kb.autocomplete('dakota', (error, completions) =>
@completions = completions
)
)
waitsFor((=> @completions != null), 2000)
runs( =>
expect(@completions[0].components.username.val).toEqual("dakota")
)
| 42150 | kb = require '../lib/keybase'
xdescribe "keybase lib", ->
# TODO stub keybase calls?
it "should be able to fetch an account by username", ->
@them = null
runs( =>
kb.getUser('dakota', 'usernames', (err, them) =>
@them = them
)
)
waitsFor((=> @them != null), 2000)
runs( =>
expect(@them?[0].components.username.val).toEqual("dakota")
)
it "should be able to fetch an account by key fingerprint", ->
@them = null
runs( =>
kb.getUser('<KEY>', 'key_fingerprint', (err, them) =>
@them = them
)
)
waitsFor((=> @them != null), 2000)
runs( =>
expect(@them?[0].components.username.val).toEqual("dakota")
)
it "should be able to fetch a user's key", ->
@key = null
runs( =>
kb.getKey('dakota', (error, key) =>
@key = key
)
)
waitsFor((=> @key != null), 2000)
runs( =>
expect(@key?.startsWith('-----BEGIN PGP PUBLIC KEY BLOCK-----'))
)
it "should be able to return an autocomplete query", ->
@completions = null
runs( =>
kb.autocomplete('dakota', (error, completions) =>
@completions = completions
)
)
waitsFor((=> @completions != null), 2000)
runs( =>
expect(@completions[0].components.username.val).toEqual("dakota")
)
| true | kb = require '../lib/keybase'
xdescribe "keybase lib", ->
# TODO stub keybase calls?
it "should be able to fetch an account by username", ->
@them = null
runs( =>
kb.getUser('dakota', 'usernames', (err, them) =>
@them = them
)
)
waitsFor((=> @them != null), 2000)
runs( =>
expect(@them?[0].components.username.val).toEqual("dakota")
)
it "should be able to fetch an account by key fingerprint", ->
@them = null
runs( =>
kb.getUser('PI:KEY:<KEY>END_PI', 'key_fingerprint', (err, them) =>
@them = them
)
)
waitsFor((=> @them != null), 2000)
runs( =>
expect(@them?[0].components.username.val).toEqual("dakota")
)
it "should be able to fetch a user's key", ->
@key = null
runs( =>
kb.getKey('dakota', (error, key) =>
@key = key
)
)
waitsFor((=> @key != null), 2000)
runs( =>
expect(@key?.startsWith('-----BEGIN PGP PUBLIC KEY BLOCK-----'))
)
it "should be able to return an autocomplete query", ->
@completions = null
runs( =>
kb.autocomplete('dakota', (error, completions) =>
@completions = completions
)
)
waitsFor((=> @completions != null), 2000)
runs( =>
expect(@completions[0].components.username.val).toEqual("dakota")
)
|
[
{
"context": "# Copyright Joyent, Inc. and other Node contributors.\n#\n# Permission",
"end": 18,
"score": 0.9992351531982422,
"start": 12,
"tag": "NAME",
"value": "Joyent"
}
] | test/simple/test-fs-write-stream-change-open.coffee | lxe/io.coffee | 0 | # Copyright Joyent, Inc. and other Node contributors.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the
# following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
# NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
# USE OR OTHER DEALINGS IN THE SOFTWARE.
common = require("../common")
assert = require("assert")
path = require("path")
fs = require("fs")
file = path.join(common.tmpDir, "write.txt")
stream = fs.WriteStream(file)
_fs_close = fs.close
_fs_open = fs.open
# change the fs.open with an identical function after the WriteStream
# has pushed it onto its internal action queue, but before it's
# returned. This simulates AOP-style extension of the fs lib.
fs.open = ->
_fs_open.apply fs, arguments
fs.close = (fd) ->
assert.ok fd, "fs.close must not be called with an undefined fd."
fs.close = _fs_close
fs.open = _fs_open
return
stream.write "foo"
stream.end()
process.on "exit", ->
assert.equal fs.open, _fs_open
return
| 170254 | # Copyright <NAME>, Inc. and other Node contributors.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the
# following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
# NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
# USE OR OTHER DEALINGS IN THE SOFTWARE.
common = require("../common")
assert = require("assert")
path = require("path")
fs = require("fs")
file = path.join(common.tmpDir, "write.txt")
stream = fs.WriteStream(file)
_fs_close = fs.close
_fs_open = fs.open
# change the fs.open with an identical function after the WriteStream
# has pushed it onto its internal action queue, but before it's
# returned. This simulates AOP-style extension of the fs lib.
fs.open = ->
_fs_open.apply fs, arguments
fs.close = (fd) ->
assert.ok fd, "fs.close must not be called with an undefined fd."
fs.close = _fs_close
fs.open = _fs_open
return
stream.write "foo"
stream.end()
process.on "exit", ->
assert.equal fs.open, _fs_open
return
| true | # Copyright PI:NAME:<NAME>END_PI, Inc. and other Node contributors.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the
# following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
# NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
# USE OR OTHER DEALINGS IN THE SOFTWARE.
common = require("../common")
assert = require("assert")
path = require("path")
fs = require("fs")
file = path.join(common.tmpDir, "write.txt")
stream = fs.WriteStream(file)
_fs_close = fs.close
_fs_open = fs.open
# change the fs.open with an identical function after the WriteStream
# has pushed it onto its internal action queue, but before it's
# returned. This simulates AOP-style extension of the fs lib.
fs.open = ->
_fs_open.apply fs, arguments
fs.close = (fd) ->
assert.ok fd, "fs.close must not be called with an undefined fd."
fs.close = _fs_close
fs.open = _fs_open
return
stream.write "foo"
stream.end()
process.on "exit", ->
assert.equal fs.open, _fs_open
return
|
[
{
"context": "name: 'Gleam'\nscopeName: 'source.gleam'\ntype: 'tree-sitter",
"end": 8,
"score": 0.8312268853187561,
"start": 7,
"tag": "NAME",
"value": "G"
},
{
"context": "name: 'Gleam'\nscopeName: 'source.gleam'\ntype: 'tree-sitter'\npa",
"end": 12,
"score": 0.7344307899475... | grammars/tree-sitter-gleam.cson | itsgreggreg/language-gleam | 5 | name: 'Gleam'
scopeName: 'source.gleam'
type: 'tree-sitter'
parser: 'tree-sitter-gleam'
fileTypes: [
'gleam',
]
comments:
start: '// '
scopes:
'comment': 'comment.block'
'"as"' : 'keyword.control.as'
'"assert"' : 'keyword.control.assert'
'"case"' : 'keyword.control.case'
'"const"' : 'keyword.control.const'
'"external"' : 'keyword.control.pub'
'"fn"' : 'keyword.control.function'
'"if"' : 'keyword.control.if'
'"import"' : 'keyword.control.import'
'"let"' : 'keyword.control.let'
'"opaque"' : 'keyword.control.opaque'
'"pub"' : 'keyword.control.pub'
'"todo"' : 'keyword.control.todo'
'"try"' : 'keyword.control.try'
'"tuple"' : 'keyword.control.tuple'
'"type"' : 'keyword.control.type'
'module_identifier' : 'other'
'type_identifier' : 'support.storage.type'
'discard_identifier' : 'comment'
'type_variable' : 'support.variable.type'
'identifier' : 'support.variable.identifier'
'string_literal' : 'string.quoted.double'
'integer_literal' : 'constant.numeric.integer'
'float_literal' : 'constant.numeric.decimal'
'type_identifier > named_argument > identifier:nth-child(0)' : 'entity.name.function'
'"["': 'punctuation.definition.begin.bracket.square'
'"]"': 'punctuation.definition.end.bracket.square'
'","': 'punctuation.separator.delimiter'
'"{"': 'punctuation.section.block.begin.bracket.curly'
'"}"': 'punctuation.section.block.end.bracket.curly'
'"("': 'punctuation.section.parens.begin.bracket.round'
'")"': 'punctuation.section.parens.end.bracket.round'
'''
binary_expression > "+",
binary_expression > "-",
binary_expression > "/",
binary_expression > "*",
binary_expression > "+.",
binary_expression > "-.",
binary_expression > "/.",
binary_expression > "*.",
binary_expression > "%",
''' : 'keyword.operator.math'
'''
compound_assignment_expr > "!=",
compound_assignment_expr > "==",
''' : 'keyword.operator.cmp'
'''
binary_expression > "&&",
binary_expression > "||",
''' : 'keyword.operator.logic.boolean'
'''
spread > "..",
''' : 'keyword.operator.spread'
'''
case_branch > "|",
''' : 'keyword.operator.alt_match'
'''
"->",
''' : 'keyword.operator.misc'
folds: [
{
start: {index: 0, type: '{'}
end: {index: -1, type: '}'}
}
{
start: {index: 0, type: '['}
end: {index: -1, type: ']'}
}
{
start: {index: 0, type: '('}
end: {index: -1, type: ')'}
}
]
| 97796 | name: '<NAME>leam'
scopeName: 'source.gleam'
type: 'tree-sitter'
parser: 'tree-sitter-gleam'
fileTypes: [
'gleam',
]
comments:
start: '// '
scopes:
'comment': 'comment.block'
'"as"' : 'keyword.control.as'
'"assert"' : 'keyword.control.assert'
'"case"' : 'keyword.control.case'
'"const"' : 'keyword.control.const'
'"external"' : 'keyword.control.pub'
'"fn"' : 'keyword.control.function'
'"if"' : 'keyword.control.if'
'"import"' : 'keyword.control.import'
'"let"' : 'keyword.control.let'
'"opaque"' : 'keyword.control.opaque'
'"pub"' : 'keyword.control.pub'
'"todo"' : 'keyword.control.todo'
'"try"' : 'keyword.control.try'
'"tuple"' : 'keyword.control.tuple'
'"type"' : 'keyword.control.type'
'module_identifier' : 'other'
'type_identifier' : 'support.storage.type'
'discard_identifier' : 'comment'
'type_variable' : 'support.variable.type'
'identifier' : 'support.variable.identifier'
'string_literal' : 'string.quoted.double'
'integer_literal' : 'constant.numeric.integer'
'float_literal' : 'constant.numeric.decimal'
'type_identifier > named_argument > identifier:nth-child(0)' : 'entity.name.function'
'"["': 'punctuation.definition.begin.bracket.square'
'"]"': 'punctuation.definition.end.bracket.square'
'","': 'punctuation.separator.delimiter'
'"{"': 'punctuation.section.block.begin.bracket.curly'
'"}"': 'punctuation.section.block.end.bracket.curly'
'"("': 'punctuation.section.parens.begin.bracket.round'
'")"': 'punctuation.section.parens.end.bracket.round'
'''
binary_expression > "+",
binary_expression > "-",
binary_expression > "/",
binary_expression > "*",
binary_expression > "+.",
binary_expression > "-.",
binary_expression > "/.",
binary_expression > "*.",
binary_expression > "%",
''' : 'keyword.operator.math'
'''
compound_assignment_expr > "!=",
compound_assignment_expr > "==",
''' : 'keyword.operator.cmp'
'''
binary_expression > "&&",
binary_expression > "||",
''' : 'keyword.operator.logic.boolean'
'''
spread > "..",
''' : 'keyword.operator.spread'
'''
case_branch > "|",
''' : 'keyword.operator.alt_match'
'''
"->",
''' : 'keyword.operator.misc'
folds: [
{
start: {index: 0, type: '{'}
end: {index: -1, type: '}'}
}
{
start: {index: 0, type: '['}
end: {index: -1, type: ']'}
}
{
start: {index: 0, type: '('}
end: {index: -1, type: ')'}
}
]
| true | name: 'PI:NAME:<NAME>END_PIleam'
scopeName: 'source.gleam'
type: 'tree-sitter'
parser: 'tree-sitter-gleam'
fileTypes: [
'gleam',
]
comments:
start: '// '
scopes:
'comment': 'comment.block'
'"as"' : 'keyword.control.as'
'"assert"' : 'keyword.control.assert'
'"case"' : 'keyword.control.case'
'"const"' : 'keyword.control.const'
'"external"' : 'keyword.control.pub'
'"fn"' : 'keyword.control.function'
'"if"' : 'keyword.control.if'
'"import"' : 'keyword.control.import'
'"let"' : 'keyword.control.let'
'"opaque"' : 'keyword.control.opaque'
'"pub"' : 'keyword.control.pub'
'"todo"' : 'keyword.control.todo'
'"try"' : 'keyword.control.try'
'"tuple"' : 'keyword.control.tuple'
'"type"' : 'keyword.control.type'
'module_identifier' : 'other'
'type_identifier' : 'support.storage.type'
'discard_identifier' : 'comment'
'type_variable' : 'support.variable.type'
'identifier' : 'support.variable.identifier'
'string_literal' : 'string.quoted.double'
'integer_literal' : 'constant.numeric.integer'
'float_literal' : 'constant.numeric.decimal'
'type_identifier > named_argument > identifier:nth-child(0)' : 'entity.name.function'
'"["': 'punctuation.definition.begin.bracket.square'
'"]"': 'punctuation.definition.end.bracket.square'
'","': 'punctuation.separator.delimiter'
'"{"': 'punctuation.section.block.begin.bracket.curly'
'"}"': 'punctuation.section.block.end.bracket.curly'
'"("': 'punctuation.section.parens.begin.bracket.round'
'")"': 'punctuation.section.parens.end.bracket.round'
'''
binary_expression > "+",
binary_expression > "-",
binary_expression > "/",
binary_expression > "*",
binary_expression > "+.",
binary_expression > "-.",
binary_expression > "/.",
binary_expression > "*.",
binary_expression > "%",
''' : 'keyword.operator.math'
'''
compound_assignment_expr > "!=",
compound_assignment_expr > "==",
''' : 'keyword.operator.cmp'
'''
binary_expression > "&&",
binary_expression > "||",
''' : 'keyword.operator.logic.boolean'
'''
spread > "..",
''' : 'keyword.operator.spread'
'''
case_branch > "|",
''' : 'keyword.operator.alt_match'
'''
"->",
''' : 'keyword.operator.misc'
folds: [
{
start: {index: 0, type: '{'}
end: {index: -1, type: '}'}
}
{
start: {index: 0, type: '['}
end: {index: -1, type: ']'}
}
{
start: {index: 0, type: '('}
end: {index: -1, type: ')'}
}
]
|
[
{
"context": " to be unique.\n # [node-hat]: https://github.com/substack/node-hat\n #\n # This method is synchronous, beca",
"end": 13701,
"score": 0.9908843040466309,
"start": 13693,
"tag": "USERNAME",
"value": "substack"
},
{
"context": " session while it is closed. Unless you'r... | node_modules/share/node_modules/browserchannel/lib/server.coffee | LaPingvino/rizzoma | 88 | # # A BrowserChannel server.
#
# - Its still pretty young, so there's probably bugs lurking around and the API
# will still change quickly.
# - Its missing integration tests
#
# It works in all the browsers I've tried.
#
# I've written this using the literate programming style to try it out. So, thats why
# there's a million comments everywhere.
#
# The server is implemented as connect middleware. Its intended to be used like this:
#
# ```
# server = connect(
# browserChannel (client) -> client.send 'hi'
# )
# ```
# ## Dependancies, helper methods and constant data
# `parse` helps us decode URLs in requests
{parse} = require 'url'
# `querystring` will help decode the URL-encoded forward channel data
querystring = require 'querystring'
# `fs` is used to read & serve the client library
fs = require 'fs'
# Client sessions are `EventEmitters`
{EventEmitter} = require 'events'
# Client session Ids are generated using `node-hat`
hat = require('hat').rack(40, 36)
# `randomInt(n)` generates and returns a random int smaller than n (0 <= k < n)
randomInt = (n) -> Math.floor(Math.random() * n)
# `randomArrayElement(array)` Selects and returns a random element from *array*
randomArrayElement = (array) -> array[randomInt(array.length)]
# For testing we'll override `setInterval`, etc with special testing stub versions (so
# we don't have to actually wait for actual *time*. To do that, we need local variable
# versions (I don't want to edit the global versions). ... and they'll just point to the
# normal versions anyway.
{setInterval, clearInterval, setTimeout, clearTimeout, Date} = global
# The module is configurable
defaultOptions =
# An optional array of host prefixes. Each browserchannel client will randomly pick
# from the list of host prefixes when it connects. This reduces the impact of per-host
# connection limits.
#
# All host prefixes should point to the same server. Ie, if your server's hostname
# is *example.com* and your hostPrefixes contains ['a', 'b', 'c'],
# a.example.com, b.example.com and c.example.com should all point to the same host
# as example.com.
hostPrefixes: null
# You can specify the base URL which browserchannel connects to. Change this if you want
# to scope browserchannel in part of your app, or if you want /channel to mean something
# else, or whatever.
base: '/channel'
# We'll send keepalives every so often to make sure the http connection isn't closed by
# eagar clients. The standard timeout is 30 seconds, so we'll default to sending them
# every 20 seconds or so.
keepAliveInterval: 20 * 1000
# After awhile (30 seconds or so) of not having a backchannel connected, we'll evict the
# session completely. This will happen whenever a user closes their browser.
sessionTimeoutInterval: 30 * 1000
# All server responses set some standard HTTP headers.
# To be honest, I don't know how many of these are necessary. I just copied
# them from google.
#
# The nocache headers in particular seem unnecessary since each client
# request includes a randomized `zx=junk` query parameter.
standardHeaders =
'Content-Type': 'text/plain'
'Cache-Control': 'no-cache, no-store, max-age=0, must-revalidate'
'Pragma': 'no-cache'
'Expires': 'Fri, 01 Jan 1990 00:00:00 GMT'
'X-Content-Type-Options': 'nosniff'
# Gmail also sends this, though I'm not really sure what it does...
# 'X-Xss-Protection': '1; mode=block'
# The one exception to that is requests destined for iframes. They need to
# have content-type: text/html set for IE to process the juicy JS inside.
ieHeaders = Object.create standardHeaders
ieHeaders['Content-Type'] = 'text/html'
# Google's browserchannel server adds some junk after the first message data is sent. I
# assume this stops some whole-page buffering in IE. I assume the data used is noise so it
# doesn't compress.
#
# I don't really know why google does this. I'm assuming there's a good reason to it though.
ieJunk = "7cca69475363026330a0d99468e88d23ce95e222591126443015f5f462d9a177186c8701fb45a6ffe
e0daf1a178fc0f58cd309308fba7e6f011ac38c9cdd4580760f1d4560a84d5ca0355ecbbed2ab715a3350fe0c47
9050640bd0e77acec90c58c4d3dd0f5cf8d4510e68c8b12e087bd88cad349aafd2ab16b07b0b1b8276091217a44
a9fe92fedacffff48092ee693af\n"
# If the user is using IE, instead of using XHR backchannel loaded using
# a forever iframe. When data is sent, it is wrapped in <script></script> tags
# which call functions in the browserchannel library.
#
# This method wraps the normal `.writeHead()`, `.write()` and `.end()` methods by
# special versions which produce output based on the request's type.
#
# This **is not used** for:
#
# - The first channel test
# - The first *bind* connection a client makes. The server sends arrays there, but the
# connection is a POST and it returns immediately. So that request happens using XHR/Trident
# like regular forward channel requests.
messagingMethods = (query, res) ->
type = query.TYPE
if type == 'html'
junkSent = false
methods =
writeHead: ->
res.writeHead 200, 'OK', ieHeaders
res.write '<html><body>'
domain = query.DOMAIN
# If the iframe is making the request using a secondary domain, I think we need
# to set the `domain` to the original domain so that we can call the response methods.
if domain and domain != ''
# Make sure the domain doesn't contain anything by naughty by `JSON.stringify()`-ing
# it before passing it to the client. There are XSS vulnerabilities otherwise.
res.write "<script>try{document.domain=#{JSON.stringify domain};}catch(e){}</script>\n"
write: (data) ->
# The data is passed to `m()`, which is bound to *onTridentRpcMessage_* in the client.
res.write "<script>try {parent.m(#{JSON.stringify data})} catch(e) {}</script>\n"
unless junkSent
res.write ieJunk
junkSent = true
end: ->
# Once the data has been received, the client needs to call `d()`, which is bound to
# *onTridentDone_* with success=*true*.
# The weird spacing of this is copied from browserchannel. Its really not necessary.
res.end "<script>try {parent.d(); }catch (e){}</script>\n"
# This is a helper method for signalling an error in the request back to the client.
writeError: (statusCode, message) ->
# The HTML (iframe) handler has no way to discover that the embedded script tag
# didn't complete successfully. To signal errors, we return **200 OK** and call an
# exposed rpcClose() method on the page.
methods.writeHead()
res.end "<script>try {parent.rpcClose(#{JSON.stringify message})} catch(e){}</script>\n"
# For some reason, sending data during the second test (111112) works slightly differently for
# XHR, but its identical for html encoding. We'll use a writeRaw() method in that case, which
# is copied in the case of html.
methods.writeRaw = methods.write
methods
else
# For normal XHR requests, we send data normally.
writeHead: -> res.writeHead 200, 'OK', standardHeaders
write: (data) -> res.write "#{data.length}\n#{data}"
writeRaw: (data) -> res.write data
end: -> res.end()
writeError: (statusCode, message) ->
res.writeHead statusCode, standardHeaders
res.end message
# For telling the client its done bad.
#
# It turns out google's server isn't particularly fussy about signalling errors using the proper
# html RPC stuff, so this is useful for html connections too.
sendError = (res, statusCode, message) ->
res.writeHead statusCode, message
res.end "<html><body><h1>#{message}</h1></body></html>"
return
# ## Parsing client maps from the forward channel
#
# The client sends data in a series of url-encoded maps. The data is encoded like this:
#
# ```
# count=2&ofs=0&req0_x=3&req0_y=10&req1_abc=def
# ```
#
# First, we need to buffer up the request response and query string decode it.
bufferPostData = (req, callback) ->
data = []
req.on 'data', (chunk) ->
data.push chunk.toString 'utf8'
req.on 'end', ->
data = data.join ''
callback data
# Next, we'll need to decode the incoming client data into an array of objects.
#
# The data could be in two different forms:
#
# - Classical browserchannel format, which is a bunch of string->string url-encoded maps
# - A JSON object
#
# We can tell what format the data is in by inspecting the content-type header
#
# ## URL Encoded data
#
# Essentially, url encoded the data looks like this:
#
# ```
# { count: '2',
# ofs: '0',
# req0_x: '3',
# req0_y: '10',
# req1_abc: 'def'
# }
# ```
#
# ... and we will return an object in the form of `[{x:'3', y:'10'}, {abc: 'def'}, ...]`
#
# ## JSON Encoded data
#
# JSON encoded the data looks like:
#
# ```
# { ofs: 0
# , data: [null, {...}, 1000.4, 'hi', ...]
# }
# ```
#
# or `null` if there's no data.
#
# This function returns null if there's no data or {ofs, json:[...]} or {ofs, maps:[...]}
decodeData = (req, data) ->
if req.headers['content-type'] == 'application/json'
data = JSON.parse data
return null if data is null # There's no data. This is a valid response.
# We'll restructure it slightly to mark the data as JSON rather than maps.
{ofs, data} = data
{ofs, json:data}
else
# Maps. Ugh.
data = querystring.parse data
count = parseInt data.count
return null if count is 0
# ofs will be missing if count is zero
ofs = parseInt data.ofs
throw new Error 'invalid map data' if isNaN count or isNaN ofs
throw new Error 'Invalid maps' unless count == 0 or (count > 0 and data.ofs?)
maps = new Array count
# Scan through all the keys in the data. Every key of the form:
# `req123_xxx` will be used to populate its map.
regex = /^req(\d+)_(.+)$/
for key, val of data
match = regex.exec key
if match
id = match[1]
mapKey = match[2]
map = (maps[id] ||= {})
# The client uses `mapX_type=_badmap` to signify an error encoding a map.
continue if id == 'type' and mapKey == '_badmap'
map[mapKey] = val
{ofs, maps}
# This is a helper method to order the handling of messages / requests / whatever.
#
# Use it like this:
# inOrder = order 0
#
# inOrder 1, -> console.log 'second'
# inOrder 0, -> console.log 'first'
#
# Start is the ID of the first element we expect to receive. If we get data for earlier
# elements, we'll play them anyway if playOld is truthy.
order = (start, playOld) ->
# Base is the ID of the (missing) element at the start of the queue
base = start
# The queue will start with about 10 elements. Elements of the queue are undefined
# if we don't have data for that queue element.
queue = new Array 10
(seq, callback) ->
# Its important that all the cells of the array are truthy if we have data. We'll use an
# empty function instead of null.
callback or= ->
# Ignore old messages, or play them back immediately if playOld=true
if seq < base
callback() if playOld
else
queue[seq - base] = callback
while queue[0]
callback = queue.shift()
base++
callback()
# We need access to the client's sourcecode. I'm going to get it using a synchronous file call
# (it'll be fast anyway, and only happen once).
#
# I'm also going to set an etag on the client data so the browser client will be cached. I'm kind of
# uncomfortable about adding complexity here because its not like this code hasn't been written
# before, but.. I think a lot of people will use this API.
#
# I should probably look into hosting the client code as a javascript module using that client-side
# npm thing.
clientFile = "#{__dirname}/../dist/bcsocket.js"
clientStats = fs.statSync clientFile
try
clientCode = fs.readFileSync clientFile, 'utf8'
catch e
console.error 'Could not load the client javascript. Run `cake client` to generate it.'
throw e
# This is mostly to help development, but if the client is recompiled, I'll pull in a new version.
# This isn't tested by the unit tests - but its not a big deal.
fs.watchFile clientFile, persistent: false, (curr, prev) ->
if curr.mtime.getTime() != prev.mtime.getTime()
# Putting a synchronous file call here will stop the whole server while the client is reloaded.
# Again, this will only happen during development so its not a big deal.
console.log "Reloading client JS"
clientCode = fs.readFileSync clientFile, 'utf8'
clientStats = curr
# ---
#
# # The server middleware
#
# The server module returns a function, which you can call with your configuration
# options. It returns your configured connect middleware, which is actually another function.
module.exports = browserChannel = (options, onConnect) ->
if typeof onConnect == 'undefined'
onConnect = options
options = {}
options ||= {}
options[option] ?= value for option, value of defaultOptions
# Strip off a trailing slash in base.
base = options.base
base = base[... base.length - 1] if base.match /\/$/
# Add a leading slash back on base
base = "/#{base}" unless base.match /^\//
# map from sessionId -> session
sessions = {}
# Host prefixes provide a way to skirt around connection limits. They're only
# really important for old browsers.
getHostPrefix = ->
if options.hostPrefixes
randomArrayElement options.hostPrefixes
else
null
# # Create a new client session.
#
# This method will start a new client session.
#
# Session ids are generated by [node-hat]. They are guaranteed to be unique.
# [node-hat]: https://github.com/substack/node-hat
#
# This method is synchronous, because a database will never be involved in browserchannel
# session management. Browserchannel sessions only last as long as the user's browser
# is open. If there's any connection turbulence, the client will reconnect and get
# a new session id.
#
# Sometimes a client will specify an old session ID and old array ID. In this case, the client
# is reconnecting and we should evict the named session (if it exists).
createSession = (address, query, headers) ->
{RID:initialRid, CVER:appVersion, OSID:oldSessionId, OAID:oldArrayId} = query
if oldSessionId? and (oldSession = sessions[oldSessionId])
oldSession._acknowledgeArrays oldArrayId
oldSession.close 'Reconnected'
# Create a new session. Sessions extend node's [EventEmitter][] so they have access to
# goodies like `session.on(event, handler)`, `session.emit('paarty')`, etc.
# [EventEmitter]: http://nodejs.org/docs/v0.4.12/api/events.html
session = new EventEmitter
# The session's unique ID for this connection
session.id = hat()
# The client stores its IP address and headers from when it first opened the session. The
# handler can use this information for authentication or something.
session.address = address
session.headers = headers
# The session is a little state machine. It has the following states:
#
# - **init**: The session has been created and its sessionId hasn't been sent yet.
# The session moves to the **ok** state when the first data chunk is sent to the
# client.
#
# - **ok**: The session is sitting pretty and ready to send and receive data.
# The session will spend most of its time in this state.
#
# - **closed**: The session has been removed from the session list. It can no longer
# be used for any reason.
#
# It is invalid to send arrays to a session while it is closed. Unless you're
# Bruce Willis...
session.state = 'init'
# The state is modified through this method. It emits events when the state changes.
# (yay)
changeState = (newState) ->
oldState = session.state
session.state = newState
session.emit 'state changed', session.state, oldState
# The server sends messages to the client via a hanging GET request. Of course,
# the client has to be the one to open that request.
#
# This is a handle to null, or {res, methods, chunk}
#
# - **res** is the http response object
# - **methods** is a map of send(), etc methods for communicating properly with the backchannel -
# this will be different if the request comes from IE or not.
# - **chunk** specifies whether or not we're going to keep the connection open across multiple
# messages. If there's a buffering proxy in the way of the connection, we can't respond a bit at
# a time, so we close the backchannel after each data chunk. The client decides this during
# testing and passes a CI= parameter to the server when the backchannel connection is established.
# - **bytesSent** specifies how many bytes of data have been sent through the backchannel. We periodically
# close the backchannel and let the client reopen it, so things like the chrome web inspector stay
# usable.
backChannel = null
# The server sends data to the client by sending *arrays*. It seems a bit silly that
# client->server messages are maps and server->client messages are arrays, but there it is.
#
# Each entry in this array is of the form [id, data].
outgoingArrays = []
# `lastArrayId` is the array ID of the last queued array
lastArrayId = -1
# Every request from the client has an *AID* parameter which tells the server the ID
# of the last request the client has received. We won't remove arrays from the outgoingArrays
# list until the client has confirmed its received them.
#
# In `lastSentArrayId` we store the ID of the last array which we actually sent.
lastSentArrayId = -1
# I would like this method to be private or something, but it needs to be accessed from
# the HTTP request code below. The _ at the start will hopefully make people think twice
# before using it.
session._setBackChannel = (res, query) ->
clearBackChannel()
backChannel =
res: res
methods: messagingMethods query, res
chunk: query.CI == '0'
bytesSent: 0
res.connection.once 'close', -> clearBackChannel(res)
# We'll start the heartbeat interval and clear out the session timeout.
# The session timeout will be started again if the backchannel connection closes for
# any reason.
refreshHeartbeat()
clearTimeout sessionTimeout
# When a new backchannel is created, its possible that the old backchannel is dead.
# In this case, its possible that previously sent arrays haven't been received.
# By resetting lastSentArrayId, we're effectively rolling back the status of sent arrays
# to only those arrays which have been acknowledged.
lastSentArrayId = outgoingArrays[0].id - 1 if outgoingArrays.length > 0
# Send any arrays we've buffered now that we have a backchannel
@flush()
# If we haven't sent anything for 15 seconds, we'll send a little `['noop']` to the
# client so it knows we haven't forgotten it. (And to make sure the backchannel
# connection doesn't time out.)
heartbeat = null
# This method removes the back channel and any state associated with it. It'll get called
# when the backchannel closes naturally, is replaced or when the connection closes.
clearBackChannel = (res) ->
# clearBackChannel doesn't do anything if we call it repeatedly.
return unless backChannel
# Its important that we only delete the backchannel if the closed connection is actually
# the backchannel we're currently using.
return if res? and res != backChannel.res
# Conveniently, clearTimeout has no effect if the argument is null.
clearTimeout heartbeat
backChannel.methods.end()
backChannel = null
# Whenever we don't have a backchannel, we run the session timeout timer.
refreshSessionTimeout()
# This method sets / resets the heartbeat timeout to the full 15 seconds.
refreshHeartbeat = ->
clearTimeout heartbeat
heartbeat = setInterval (-> session.send ['noop']), options.keepAliveInterval
# The session will close if there's been no backchannel for awhile.
sessionTimeout = null
refreshSessionTimeout = ->
clearTimeout sessionTimeout
sessionTimeout = setTimeout (-> session.close 'Timed out'), options.sessionTimeoutInterval
# Since the session doesn't start with a backchannel, we'll kick off the timeout timer as soon as its
# created.
refreshSessionTimeout()
# The arrays get removed once they've been acknowledged
session._acknowledgeArrays = (id) ->
id = parseInt id if typeof id is 'string'
while outgoingArrays.length > 0 and outgoingArrays[0].id <= id
{confirmcallback} = outgoingArrays.shift()
# I've got no idea what to do if we get an exception thrown here. The session will end up
# in an inconsistant state...
confirmcallback?()
return
# Queue an array to be sent. The optional callbacks notifies a caller when the array has been
# sent, and then received by the client.
#
# queueArray returns the ID of the queued data chunk.
queueArray = (data, sendcallback, confirmcallback) ->
throw new Error "Cannot queue array when the session is already closed" if session.state == 'closed'
id = ++lastArrayId
outgoingArrays.push {id, data, sendcallback, confirmcallback}
lastArrayId
# The session has just been created. The first thing it needs to tell the client
# is its session id and host prefix and stuff.
#
# It would be pretty easy to add a callback here setting the client status to 'ok' or
# something, but its not really necessary. The client has already connected once the first
# POST /bind has been received.
queueArray ['c', session.id, getHostPrefix(), 8]
# Send the array data through the backchannel. This takes an optional callback which
# will be called with no arguments when the client acknowledges the array, or called with an
# error object if the client disconnects before the array is sent.
#
# queueArray can also take a callback argument which is called when the session sends the message
# in the first place. I'm not sure if I should expose this through send - I can't tell if its
# useful beyond the server code.
session.send = (arr, callback) ->
id = queueArray arr, null, callback
@flush()
id
# ### Maps
#
# The client sends maps to the server using POST requests. Its possible for the requests
# to come in out of order, so sometimes we need to buffer up incoming maps and reorder them
# before emitting them to the user.
#
# Each map has an ID (which starts at 0 when the session is first created).
# We'll emit received data to the user immediately if they're in order, but if they're out of order
# we'll use the little order helper above to order them. The order helper is instructed to not
# emit any old messages twice.
#
# There's a potential DOS attack here whereby a client could just spam the server with
# out-of-order maps until it runs out of memory. We should dump a session if there are
# too many entries in this dictionary.
mapBuffer = order 0, false
# This method is called whenever we get maps from the client. Offset is the ID of the first
# map. The data could either be maps or JSON data. If its maps, data contains {maps} and if its
# JSON data, maps contains {JSON}.
#
# Browserchannel has 2 different mechanisms for consistantly ordering messages in the forward channel:
#
# - Each forward channel request contains a request ID (RID=X), which start at a random value
# (set with the first session create packet). These increment by 1 with each request.
#
# If a request fails, it might be retried with the same RID as the previous message, and with extra
# maps tacked on the end. We need to handle the maps in this case.
#
# - Each map has an ID, counting from 0. ofs= in the POST data tells the server the ID of the first
# map in a request.
#
# As far as I can tell, the RID stuff can mostly be ignored. The one place it is important is in
# handling disconnect messages. The session should only be disconnected by a disconnect message when
# the preceeding messages have been received.
# All requests are handled in order too, though if not for disconnecting I don't think it would matter.
# Because of the funky retry-has-extra-maps logic, we'll allow processing requests twice.
ridBuffer = order initialRid, true
session._receivedData = (rid, data) ->
ridBuffer rid, ->
return if data is null
throw new Error 'Invalid data' unless data.maps? or data.json?
ridBuffer rid
id = data.ofs
# First, classic browserchannel maps.
if data.maps
# If an exception is thrown during this loop, I'm not really sure what the behaviour should be.
for map in data.maps
# The funky do expression here is used to pass the map into the closure.
# Another way to do it is to index into the data.maps array inside the function, but then I'd
# need to pass the index to the closure anyway.
mapBuffer id++, do (map) -> ->
return if session.state is 'closed'
session.emit 'map', map
# If you specify the key as JSON, the server will try to decode JSON data from the map and emit
# 'message'. This is a much nicer way to message the server.
if map.JSON?
try
message = JSON.parse map.JSON
session.emit 'message', message
else
# We have data.json. We'll just emit it directly.
for message in data.json
mapBuffer id++, do (map) -> ->
return if session.state is 'closed'
session.emit 'message', message
session._disconnectAt = (rid) ->
ridBuffer rid, -> session.close 'Disconnected'
# When we receive forwardchannel data, we reply with a special little 3-variable array to tell the
# client if it should reopen the backchannel.
#
# This method returns what the forward channel should reply with.
session._backChannelStatus = ->
# Find the arrays have been sent over the wire but haven't been acknowledged yet
numUnsentArrays = lastArrayId - lastSentArrayId
unacknowledgedArrays = outgoingArrays[... outgoingArrays.length - numUnsentArrays]
outstandingBytes = if unacknowledgedArrays.length == 0
0
else
# We don't care about the length of the array IDs or callback functions.
# I'm actually not sure what data the client expects here - the value is just used in a rough
# heuristic to determine if the backchannel should be reopened.
data = (a.data for a in unacknowledgedArrays)
JSON.stringify(data).length
[
(if backChannel then 1 else 0)
lastSentArrayId
outstandingBytes
]
# ## Encoding server arrays for the back channel
#
# The server sends data to the client in **chunks**. Each chunk is a *JSON* array prefixed
# by its length in bytes.
#
# The array looks like this:
#
# ```
# [
# [100, ['message', 'one']],
# [101, ['message', 'two']],
# [102, ['message', 'three']]
# ]
# ```
#
# Each individial message is prefixed by its *array id*, which is a counter starting at 0
# when the session is first created and incremented with each array.
# This will actually send the arrays to the backchannel on the next tick if the backchannel
# is alive.
session.flush = ->
process.nextTick ->
if backChannel
numUnsentArrays = lastArrayId - lastSentArrayId
if numUnsentArrays > 0
arrays = outgoingArrays[outgoingArrays.length - numUnsentArrays ...]
# I've abused outgoingArrays to also contain some callbacks. We only send [id, data] to
# the client.
data = ([id, data] for {id, data} in arrays)
bytes = JSON.stringify(data) + "\n"
# **Away!**
backChannel.methods.write bytes
backChannel.bytesSent += bytes.length
lastSentArrayId = lastArrayId
# Fire any send callbacks on the messages. These callbacks should only be called once.
# Again, not sure what to do if there are exceptions here.
for a in arrays
if a.sendcallback?
a.sendcallback?()
delete a.sendcallback
if !backChannel.chunk or backChannel.bytesSent > 10 * 1024
clearBackChannel()
# The first backchannel is the client's initial connection. Once we've sent the first
# data chunk to the client, we've officially opened the connection.
changeState 'ok' if session.state == 'init'
# The client's reported application version, or null. This is sent when the
# connection is first requested, so you can use it to make your application die / stay
# compatible with people who don't close their browsers.
session.appVersion = appVersion or null
# Signal to a client that it should stop trying to connect. This has no other effect
# on the server session.
#
# `stop` takes a callback which will be called once the message has been *sent* by the server.
# Typically, you should call it like this:
#
# ```
# session.stop ->
# session.close()
# ```
#
# I considered making this automatically close the connection after you've called it, or after
# you've sent the stop message or something, but if I did that it wouldn't be obvious that you
# can still receive messages after stop() has been called. (Because you can!). That would never
# come up when you're testing locally, but it *would* come up in production. This is more obvious.
session.stop = (callback) ->
return if @state is 'closed'
queueArray ['stop'], callback, null
@flush()
# This closes a session and makes the server forget about it.
#
# The client might try and reconnect if you only call `close()`. It'll get a new session if it does so.
#
# close takes an optional message argument, which is passed to the send event handlers.
session.close = (message) ->
# You can't double-close.
return if @state == 'closed'
changeState 'closed'
@emit 'close', message
clearBackChannel()
clearTimeout sessionTimeout
for {confirmcallback} in outgoingArrays
confirmcallback?(new Error message || 'closed')
delete sessions[@id]
#console.log "closed #{@id}"
sessions[session.id] = session
session
# This is the returned middleware. Connect middleware is a function which
# takes in an http request, an http response and a next method.
#
# The middleware can do one of two things:
#
# - Handle the request, sending data back to the server via the response
# - Call `next()`, which allows the next middleware in the stack a chance to
# handle the request.
middleware = (req, res, next) ->
{query, pathname} = parse req.url, true
#console.warn req.method, req.url
# If base is /foo, we don't match /foobar. (Currently no unit tests for this)
return next() if pathname.substring(0, base.length + 1) != "#{base}/"
{writeHead, write, writeRaw, end, writeError} = messagingMethods query, res
# # Serving the client
#
# The browserchannel server hosts a usable web client library at /CHANNEL/bcsocket.js.
# This library wraps the google closure library client implementation.
#
# If I have time, I would like to write my own version of the client to add a few features
# (websockets, message acknowledgement callbacks) and do some manual optimisations for speed.
# However, the current version works ok.
if pathname is "#{base}/bcsocket.js"
etag = "\"#{clientStats.size}-#{clientStats.mtime.getTime()}\""
res.writeHead 200, 'OK',
'Content-Type': 'application/javascript',
'ETag': etag,
'Content-Length': clientCode.length
# This code is manually tested because it looks like its impossible to send HEAD requests
# using nodejs's HTTP library at time of writing (0.4.12). (Yeah, I know, rite?)
if req.method is 'HEAD'
res.end()
else
res.end clientCode
# # Connection testing
#
# Before the browserchannel client connects, it tests the connection to make
# sure its working, and to look for buffering proxies.
#
# The server-side code for connection testing is completely stateless.
else if pathname is "#{base}/test"
# This server only supports browserchannel protocol version **8**.
# I have no idea if 400 is the right error here.
return sendError res, 400, 'Version 8 required' unless query.VER is '8'
#### Phase 1: Server info
# The client is requests host prefixes. The server responds with an array of
# ['hostprefix' or null, 'blockedprefix' or null].
#
# > Actually, I think you might be able to return [] if neither hostPrefix nor blockedPrefix
# > is defined. (Thats what google wave seems to do)
#
# - **hostprefix** is subdomain prepended onto the hostname of each request.
# This gets around browser connection limits. Using this requires a bank of
# configured DNS entries and SSL certificates if you're using HTTPS.
#
# - **blockedprefix** provides network admins a way to blacklist browserchannel
# requests. It is not supported by node-browserchannel.
if query.MODE == 'init' and req.method == 'GET'
hostPrefix = getHostPrefix()
blockedPrefix = null # Blocked prefixes aren't supported.
# We add an extra special header to tell the client that this server likes
# json-encoded forward channel data over form urlencoded channel data.
#
# It might be easier to put these headers in the response body or increment the
# version, but that might conflict with future browserchannel versions.
headers = Object.create standardHeaders
headers['X-Accept'] = 'application/json; application/x-www-form-urlencoded'
# This is a straight-up normal HTTP request like the forward channel requests.
# We don't use the funny iframe write methods.
res.writeHead 200, 'OK', headers
res.end(JSON.stringify [hostPrefix, blockedPrefix])
else
#### Phase 2: Buffering proxy detection
# The client is trying to determine if their connection is buffered or unbuffered.
# We reply with '11111', then 2 seconds later '2'.
#
# The client should get the data in 2 chunks - but they won't if there's a misbehaving
# corporate proxy in the way or something.
writeHead()
writeRaw '11111'
setTimeout (-> writeRaw '2'; end()), 2000
# # BrowserChannel connection
#
# Once a client has finished testing its connection, it connects.
#
# BrowserChannel communicates through two connections:
#
# - The **forward channel** is used for the client to send data to the server.
# It uses a **POST** request for each message.
# - The **back channel** is used to get data back from the server. This uses a
# hanging **GET** request. If chunking is disallowed (ie, if the proxy buffers)
# then the back channel is closed after each server message.
else if pathname == "#{base}/bind"
# I'm copying the behaviour of unknown SIDs below. I don't know how the client
# is supposed to detect this error, but, eh. The other choice is to `return writeError ...`
return sendError res, 400, 'Version 8 required' unless query.VER is '8'
# All browserchannel connections have an associated client object. A client
# is created immediately if the connection is new.
if query.SID
session = sessions[query.SID]
# This is a special error code for the client. It tells the client to abandon its
# connection request and reconnect.
#
# For some reason, google replies with the same response on HTTP and HTML requests here.
# I'll follow suit, though its a little weird. Maybe I should do the same with all client
# errors?
return sendError res, 400, 'Unknown SID' unless session
session._acknowledgeArrays query.AID if query.AID? and session
# ### Forward Channel
if req.method == 'POST'
if session == undefined
# The session is new! Make them a new session object and let the
# application know.
session = createSession req.connection.remoteAddress, query, req.headers
onConnect? session
bufferPostData req, (data) ->
try
data = decodeData req, data
session._receivedData query.RID, data
catch e
console.warn 'Error parsing forward channel', e.stack
return sendError res, 400, 'Bad data'
if session.state is 'init'
# The initial forward channel request is also used as a backchannel for the server's
# initial data (session id, etc). This connection is a little bit special - it is always
# encoded using length-prefixed json encoding and it is closed as soon as the first chunk is
# sent.
res.writeHead 200, 'OK', standardHeaders
session._setBackChannel res, CI:1, TYPE:'xmlhttp', RID:'rpc'
session.flush()
else if session.state is 'closed'
# If the onConnect handler called close() immediately, session.state can be already closed at this point.
# I'll assume there was an authentication problem and treat this as a forbidden connection attempt.
sendError res, 403, 'Forbidden'
else
# On normal forward channels, we reply to the request by telling the session
# if our backchannel is still live and telling it how many unconfirmed
# arrays we have.
response = JSON.stringify session._backChannelStatus()
res.writeHead 200, 'OK', standardHeaders
res.end "#{response.length}\n#{response}"
else if req.method is 'GET'
# ### Back channel
#
# GET messages are usually backchannel requests (server->client). Backchannel messages are handled
# by the session object.
if query.TYPE in ['xmlhttp', 'html']
return sendError res, 400, 'Invalid SID' if typeof query.SID != 'string' && query.SID.length < 5
return sendError res, 400, 'Expected RPC' unless query.RID is 'rpc'
writeHead()
session._setBackChannel res, query
# The client can manually disconnect by making a GET request with TYPE='terminate'
else if query.TYPE is 'terminate'
# We don't send any data in the response to the disconnect message.
#
# The client implements this using an img= appended to the page.
session?._disconnectAt query.RID
res.writeHead 200, 'OK', standardHeaders
res.end()
else
res.writeHead 405, 'Method Not Allowed', standardHeaders
res.end "Method not allowed"
else
# We'll 404 the user instead of letting another handler take care of it.
# Users shouldn't be using the specified URL prefix for anything else.
res.writeHead 404, 'Not Found', standardHeaders
res.end "Not found"
middleware.close = -> session.close() for id, session of sessions
# This is an undocumented, untested treat - if you pass the HTTP server / connect server to
# browserchannel through the options object, it can attach a close listener for you automatically.
options.server?.on 'close', middleware.close
middleware
# This will override the timer methods (`setInterval`, etc) with the testing stub versions,
# which are way faster.
browserChannel._setTimerMethods = (methods) ->
{setInterval, clearInterval, setTimeout, clearTimeout, Date} = methods
| 123374 | # # A BrowserChannel server.
#
# - Its still pretty young, so there's probably bugs lurking around and the API
# will still change quickly.
# - Its missing integration tests
#
# It works in all the browsers I've tried.
#
# I've written this using the literate programming style to try it out. So, thats why
# there's a million comments everywhere.
#
# The server is implemented as connect middleware. Its intended to be used like this:
#
# ```
# server = connect(
# browserChannel (client) -> client.send 'hi'
# )
# ```
# ## Dependancies, helper methods and constant data
# `parse` helps us decode URLs in requests
{parse} = require 'url'
# `querystring` will help decode the URL-encoded forward channel data
querystring = require 'querystring'
# `fs` is used to read & serve the client library
fs = require 'fs'
# Client sessions are `EventEmitters`
{EventEmitter} = require 'events'
# Client session Ids are generated using `node-hat`
hat = require('hat').rack(40, 36)
# `randomInt(n)` generates and returns a random int smaller than n (0 <= k < n)
randomInt = (n) -> Math.floor(Math.random() * n)
# `randomArrayElement(array)` Selects and returns a random element from *array*
randomArrayElement = (array) -> array[randomInt(array.length)]
# For testing we'll override `setInterval`, etc with special testing stub versions (so
# we don't have to actually wait for actual *time*. To do that, we need local variable
# versions (I don't want to edit the global versions). ... and they'll just point to the
# normal versions anyway.
{setInterval, clearInterval, setTimeout, clearTimeout, Date} = global
# The module is configurable
defaultOptions =
# An optional array of host prefixes. Each browserchannel client will randomly pick
# from the list of host prefixes when it connects. This reduces the impact of per-host
# connection limits.
#
# All host prefixes should point to the same server. Ie, if your server's hostname
# is *example.com* and your hostPrefixes contains ['a', 'b', 'c'],
# a.example.com, b.example.com and c.example.com should all point to the same host
# as example.com.
hostPrefixes: null
# You can specify the base URL which browserchannel connects to. Change this if you want
# to scope browserchannel in part of your app, or if you want /channel to mean something
# else, or whatever.
base: '/channel'
# We'll send keepalives every so often to make sure the http connection isn't closed by
# eagar clients. The standard timeout is 30 seconds, so we'll default to sending them
# every 20 seconds or so.
keepAliveInterval: 20 * 1000
# After awhile (30 seconds or so) of not having a backchannel connected, we'll evict the
# session completely. This will happen whenever a user closes their browser.
sessionTimeoutInterval: 30 * 1000
# All server responses set some standard HTTP headers.
# To be honest, I don't know how many of these are necessary. I just copied
# them from google.
#
# The nocache headers in particular seem unnecessary since each client
# request includes a randomized `zx=junk` query parameter.
standardHeaders =
'Content-Type': 'text/plain'
'Cache-Control': 'no-cache, no-store, max-age=0, must-revalidate'
'Pragma': 'no-cache'
'Expires': 'Fri, 01 Jan 1990 00:00:00 GMT'
'X-Content-Type-Options': 'nosniff'
# Gmail also sends this, though I'm not really sure what it does...
# 'X-Xss-Protection': '1; mode=block'
# The one exception to that is requests destined for iframes. They need to
# have content-type: text/html set for IE to process the juicy JS inside.
ieHeaders = Object.create standardHeaders
ieHeaders['Content-Type'] = 'text/html'
# Google's browserchannel server adds some junk after the first message data is sent. I
# assume this stops some whole-page buffering in IE. I assume the data used is noise so it
# doesn't compress.
#
# I don't really know why google does this. I'm assuming there's a good reason to it though.
ieJunk = "7cca69475363026330a0d99468e88d23ce95e222591126443015f5f462d9a177186c8701fb45a6ffe
e0daf1a178fc0f58cd309308fba7e6f011ac38c9cdd4580760f1d4560a84d5ca0355ecbbed2ab715a3350fe0c47
9050640bd0e77acec90c58c4d3dd0f5cf8d4510e68c8b12e087bd88cad349aafd2ab16b07b0b1b8276091217a44
a9fe92fedacffff48092ee693af\n"
# If the user is using IE, instead of using XHR backchannel loaded using
# a forever iframe. When data is sent, it is wrapped in <script></script> tags
# which call functions in the browserchannel library.
#
# This method wraps the normal `.writeHead()`, `.write()` and `.end()` methods by
# special versions which produce output based on the request's type.
#
# This **is not used** for:
#
# - The first channel test
# - The first *bind* connection a client makes. The server sends arrays there, but the
# connection is a POST and it returns immediately. So that request happens using XHR/Trident
# like regular forward channel requests.
messagingMethods = (query, res) ->
type = query.TYPE
if type == 'html'
junkSent = false
methods =
writeHead: ->
res.writeHead 200, 'OK', ieHeaders
res.write '<html><body>'
domain = query.DOMAIN
# If the iframe is making the request using a secondary domain, I think we need
# to set the `domain` to the original domain so that we can call the response methods.
if domain and domain != ''
# Make sure the domain doesn't contain anything by naughty by `JSON.stringify()`-ing
# it before passing it to the client. There are XSS vulnerabilities otherwise.
res.write "<script>try{document.domain=#{JSON.stringify domain};}catch(e){}</script>\n"
write: (data) ->
# The data is passed to `m()`, which is bound to *onTridentRpcMessage_* in the client.
res.write "<script>try {parent.m(#{JSON.stringify data})} catch(e) {}</script>\n"
unless junkSent
res.write ieJunk
junkSent = true
end: ->
# Once the data has been received, the client needs to call `d()`, which is bound to
# *onTridentDone_* with success=*true*.
# The weird spacing of this is copied from browserchannel. Its really not necessary.
res.end "<script>try {parent.d(); }catch (e){}</script>\n"
# This is a helper method for signalling an error in the request back to the client.
writeError: (statusCode, message) ->
# The HTML (iframe) handler has no way to discover that the embedded script tag
# didn't complete successfully. To signal errors, we return **200 OK** and call an
# exposed rpcClose() method on the page.
methods.writeHead()
res.end "<script>try {parent.rpcClose(#{JSON.stringify message})} catch(e){}</script>\n"
# For some reason, sending data during the second test (111112) works slightly differently for
# XHR, but its identical for html encoding. We'll use a writeRaw() method in that case, which
# is copied in the case of html.
methods.writeRaw = methods.write
methods
else
# For normal XHR requests, we send data normally.
writeHead: -> res.writeHead 200, 'OK', standardHeaders
write: (data) -> res.write "#{data.length}\n#{data}"
writeRaw: (data) -> res.write data
end: -> res.end()
writeError: (statusCode, message) ->
res.writeHead statusCode, standardHeaders
res.end message
# For telling the client its done bad.
#
# It turns out google's server isn't particularly fussy about signalling errors using the proper
# html RPC stuff, so this is useful for html connections too.
sendError = (res, statusCode, message) ->
res.writeHead statusCode, message
res.end "<html><body><h1>#{message}</h1></body></html>"
return
# ## Parsing client maps from the forward channel
#
# The client sends data in a series of url-encoded maps. The data is encoded like this:
#
# ```
# count=2&ofs=0&req0_x=3&req0_y=10&req1_abc=def
# ```
#
# First, we need to buffer up the request response and query string decode it.
bufferPostData = (req, callback) ->
data = []
req.on 'data', (chunk) ->
data.push chunk.toString 'utf8'
req.on 'end', ->
data = data.join ''
callback data
# Next, we'll need to decode the incoming client data into an array of objects.
#
# The data could be in two different forms:
#
# - Classical browserchannel format, which is a bunch of string->string url-encoded maps
# - A JSON object
#
# We can tell what format the data is in by inspecting the content-type header
#
# ## URL Encoded data
#
# Essentially, url encoded the data looks like this:
#
# ```
# { count: '2',
# ofs: '0',
# req0_x: '3',
# req0_y: '10',
# req1_abc: 'def'
# }
# ```
#
# ... and we will return an object in the form of `[{x:'3', y:'10'}, {abc: 'def'}, ...]`
#
# ## JSON Encoded data
#
# JSON encoded the data looks like:
#
# ```
# { ofs: 0
# , data: [null, {...}, 1000.4, 'hi', ...]
# }
# ```
#
# or `null` if there's no data.
#
# This function returns null if there's no data or {ofs, json:[...]} or {ofs, maps:[...]}
decodeData = (req, data) ->
if req.headers['content-type'] == 'application/json'
data = JSON.parse data
return null if data is null # There's no data. This is a valid response.
# We'll restructure it slightly to mark the data as JSON rather than maps.
{ofs, data} = data
{ofs, json:data}
else
# Maps. Ugh.
data = querystring.parse data
count = parseInt data.count
return null if count is 0
# ofs will be missing if count is zero
ofs = parseInt data.ofs
throw new Error 'invalid map data' if isNaN count or isNaN ofs
throw new Error 'Invalid maps' unless count == 0 or (count > 0 and data.ofs?)
maps = new Array count
# Scan through all the keys in the data. Every key of the form:
# `req123_xxx` will be used to populate its map.
regex = /^req(\d+)_(.+)$/
for key, val of data
match = regex.exec key
if match
id = match[1]
mapKey = match[2]
map = (maps[id] ||= {})
# The client uses `mapX_type=_badmap` to signify an error encoding a map.
continue if id == 'type' and mapKey == '_badmap'
map[mapKey] = val
{ofs, maps}
# This is a helper method to order the handling of messages / requests / whatever.
#
# Use it like this:
# inOrder = order 0
#
# inOrder 1, -> console.log 'second'
# inOrder 0, -> console.log 'first'
#
# Start is the ID of the first element we expect to receive. If we get data for earlier
# elements, we'll play them anyway if playOld is truthy.
order = (start, playOld) ->
# Base is the ID of the (missing) element at the start of the queue
base = start
# The queue will start with about 10 elements. Elements of the queue are undefined
# if we don't have data for that queue element.
queue = new Array 10
(seq, callback) ->
# Its important that all the cells of the array are truthy if we have data. We'll use an
# empty function instead of null.
callback or= ->
# Ignore old messages, or play them back immediately if playOld=true
if seq < base
callback() if playOld
else
queue[seq - base] = callback
while queue[0]
callback = queue.shift()
base++
callback()
# We need access to the client's sourcecode. I'm going to get it using a synchronous file call
# (it'll be fast anyway, and only happen once).
#
# I'm also going to set an etag on the client data so the browser client will be cached. I'm kind of
# uncomfortable about adding complexity here because its not like this code hasn't been written
# before, but.. I think a lot of people will use this API.
#
# I should probably look into hosting the client code as a javascript module using that client-side
# npm thing.
clientFile = "#{__dirname}/../dist/bcsocket.js"
clientStats = fs.statSync clientFile
try
clientCode = fs.readFileSync clientFile, 'utf8'
catch e
console.error 'Could not load the client javascript. Run `cake client` to generate it.'
throw e
# This is mostly to help development, but if the client is recompiled, I'll pull in a new version.
# This isn't tested by the unit tests - but its not a big deal.
fs.watchFile clientFile, persistent: false, (curr, prev) ->
if curr.mtime.getTime() != prev.mtime.getTime()
# Putting a synchronous file call here will stop the whole server while the client is reloaded.
# Again, this will only happen during development so its not a big deal.
console.log "Reloading client JS"
clientCode = fs.readFileSync clientFile, 'utf8'
clientStats = curr
# ---
#
# # The server middleware
#
# The server module returns a function, which you can call with your configuration
# options. It returns your configured connect middleware, which is actually another function.
module.exports = browserChannel = (options, onConnect) ->
if typeof onConnect == 'undefined'
onConnect = options
options = {}
options ||= {}
options[option] ?= value for option, value of defaultOptions
# Strip off a trailing slash in base.
base = options.base
base = base[... base.length - 1] if base.match /\/$/
# Add a leading slash back on base
base = "/#{base}" unless base.match /^\//
# map from sessionId -> session
sessions = {}
# Host prefixes provide a way to skirt around connection limits. They're only
# really important for old browsers.
getHostPrefix = ->
if options.hostPrefixes
randomArrayElement options.hostPrefixes
else
null
# # Create a new client session.
#
# This method will start a new client session.
#
# Session ids are generated by [node-hat]. They are guaranteed to be unique.
# [node-hat]: https://github.com/substack/node-hat
#
# This method is synchronous, because a database will never be involved in browserchannel
# session management. Browserchannel sessions only last as long as the user's browser
# is open. If there's any connection turbulence, the client will reconnect and get
# a new session id.
#
# Sometimes a client will specify an old session ID and old array ID. In this case, the client
# is reconnecting and we should evict the named session (if it exists).
createSession = (address, query, headers) ->
{RID:initialRid, CVER:appVersion, OSID:oldSessionId, OAID:oldArrayId} = query
if oldSessionId? and (oldSession = sessions[oldSessionId])
oldSession._acknowledgeArrays oldArrayId
oldSession.close 'Reconnected'
# Create a new session. Sessions extend node's [EventEmitter][] so they have access to
# goodies like `session.on(event, handler)`, `session.emit('paarty')`, etc.
# [EventEmitter]: http://nodejs.org/docs/v0.4.12/api/events.html
session = new EventEmitter
# The session's unique ID for this connection
session.id = hat()
# The client stores its IP address and headers from when it first opened the session. The
# handler can use this information for authentication or something.
session.address = address
session.headers = headers
# The session is a little state machine. It has the following states:
#
# - **init**: The session has been created and its sessionId hasn't been sent yet.
# The session moves to the **ok** state when the first data chunk is sent to the
# client.
#
# - **ok**: The session is sitting pretty and ready to send and receive data.
# The session will spend most of its time in this state.
#
# - **closed**: The session has been removed from the session list. It can no longer
# be used for any reason.
#
# It is invalid to send arrays to a session while it is closed. Unless you're
# <NAME>...
session.state = 'init'
# The state is modified through this method. It emits events when the state changes.
# (yay)
changeState = (newState) ->
oldState = session.state
session.state = newState
session.emit 'state changed', session.state, oldState
# The server sends messages to the client via a hanging GET request. Of course,
# the client has to be the one to open that request.
#
# This is a handle to null, or {res, methods, chunk}
#
# - **res** is the http response object
# - **methods** is a map of send(), etc methods for communicating properly with the backchannel -
# this will be different if the request comes from IE or not.
# - **chunk** specifies whether or not we're going to keep the connection open across multiple
# messages. If there's a buffering proxy in the way of the connection, we can't respond a bit at
# a time, so we close the backchannel after each data chunk. The client decides this during
# testing and passes a CI= parameter to the server when the backchannel connection is established.
# - **bytesSent** specifies how many bytes of data have been sent through the backchannel. We periodically
# close the backchannel and let the client reopen it, so things like the chrome web inspector stay
# usable.
backChannel = null
# The server sends data to the client by sending *arrays*. It seems a bit silly that
# client->server messages are maps and server->client messages are arrays, but there it is.
#
# Each entry in this array is of the form [id, data].
outgoingArrays = []
# `lastArrayId` is the array ID of the last queued array
lastArrayId = -1
# Every request from the client has an *AID* parameter which tells the server the ID
# of the last request the client has received. We won't remove arrays from the outgoingArrays
# list until the client has confirmed its received them.
#
# In `lastSentArrayId` we store the ID of the last array which we actually sent.
lastSentArrayId = -1
# I would like this method to be private or something, but it needs to be accessed from
# the HTTP request code below. The _ at the start will hopefully make people think twice
# before using it.
session._setBackChannel = (res, query) ->
clearBackChannel()
backChannel =
res: res
methods: messagingMethods query, res
chunk: query.CI == '0'
bytesSent: 0
res.connection.once 'close', -> clearBackChannel(res)
# We'll start the heartbeat interval and clear out the session timeout.
# The session timeout will be started again if the backchannel connection closes for
# any reason.
refreshHeartbeat()
clearTimeout sessionTimeout
# When a new backchannel is created, its possible that the old backchannel is dead.
# In this case, its possible that previously sent arrays haven't been received.
# By resetting lastSentArrayId, we're effectively rolling back the status of sent arrays
# to only those arrays which have been acknowledged.
lastSentArrayId = outgoingArrays[0].id - 1 if outgoingArrays.length > 0
# Send any arrays we've buffered now that we have a backchannel
@flush()
# If we haven't sent anything for 15 seconds, we'll send a little `['noop']` to the
# client so it knows we haven't forgotten it. (And to make sure the backchannel
# connection doesn't time out.)
heartbeat = null
# This method removes the back channel and any state associated with it. It'll get called
# when the backchannel closes naturally, is replaced or when the connection closes.
clearBackChannel = (res) ->
# clearBackChannel doesn't do anything if we call it repeatedly.
return unless backChannel
# Its important that we only delete the backchannel if the closed connection is actually
# the backchannel we're currently using.
return if res? and res != backChannel.res
# Conveniently, clearTimeout has no effect if the argument is null.
clearTimeout heartbeat
backChannel.methods.end()
backChannel = null
# Whenever we don't have a backchannel, we run the session timeout timer.
refreshSessionTimeout()
# This method sets / resets the heartbeat timeout to the full 15 seconds.
refreshHeartbeat = ->
clearTimeout heartbeat
heartbeat = setInterval (-> session.send ['noop']), options.keepAliveInterval
# The session will close if there's been no backchannel for awhile.
sessionTimeout = null
refreshSessionTimeout = ->
clearTimeout sessionTimeout
sessionTimeout = setTimeout (-> session.close 'Timed out'), options.sessionTimeoutInterval
# Since the session doesn't start with a backchannel, we'll kick off the timeout timer as soon as its
# created.
refreshSessionTimeout()
# The arrays get removed once they've been acknowledged
session._acknowledgeArrays = (id) ->
id = parseInt id if typeof id is 'string'
while outgoingArrays.length > 0 and outgoingArrays[0].id <= id
{confirmcallback} = outgoingArrays.shift()
# I've got no idea what to do if we get an exception thrown here. The session will end up
# in an inconsistant state...
confirmcallback?()
return
# Queue an array to be sent. The optional callbacks notifies a caller when the array has been
# sent, and then received by the client.
#
# queueArray returns the ID of the queued data chunk.
queueArray = (data, sendcallback, confirmcallback) ->
throw new Error "Cannot queue array when the session is already closed" if session.state == 'closed'
id = ++lastArrayId
outgoingArrays.push {id, data, sendcallback, confirmcallback}
lastArrayId
# The session has just been created. The first thing it needs to tell the client
# is its session id and host prefix and stuff.
#
# It would be pretty easy to add a callback here setting the client status to 'ok' or
# something, but its not really necessary. The client has already connected once the first
# POST /bind has been received.
queueArray ['c', session.id, getHostPrefix(), 8]
# Send the array data through the backchannel. This takes an optional callback which
# will be called with no arguments when the client acknowledges the array, or called with an
# error object if the client disconnects before the array is sent.
#
# queueArray can also take a callback argument which is called when the session sends the message
# in the first place. I'm not sure if I should expose this through send - I can't tell if its
# useful beyond the server code.
session.send = (arr, callback) ->
id = queueArray arr, null, callback
@flush()
id
# ### Maps
#
# The client sends maps to the server using POST requests. Its possible for the requests
# to come in out of order, so sometimes we need to buffer up incoming maps and reorder them
# before emitting them to the user.
#
# Each map has an ID (which starts at 0 when the session is first created).
# We'll emit received data to the user immediately if they're in order, but if they're out of order
# we'll use the little order helper above to order them. The order helper is instructed to not
# emit any old messages twice.
#
# There's a potential DOS attack here whereby a client could just spam the server with
# out-of-order maps until it runs out of memory. We should dump a session if there are
# too many entries in this dictionary.
mapBuffer = order 0, false
# This method is called whenever we get maps from the client. Offset is the ID of the first
# map. The data could either be maps or JSON data. If its maps, data contains {maps} and if its
# JSON data, maps contains {JSON}.
#
# Browserchannel has 2 different mechanisms for consistantly ordering messages in the forward channel:
#
# - Each forward channel request contains a request ID (RID=X), which start at a random value
# (set with the first session create packet). These increment by 1 with each request.
#
# If a request fails, it might be retried with the same RID as the previous message, and with extra
# maps tacked on the end. We need to handle the maps in this case.
#
# - Each map has an ID, counting from 0. ofs= in the POST data tells the server the ID of the first
# map in a request.
#
# As far as I can tell, the RID stuff can mostly be ignored. The one place it is important is in
# handling disconnect messages. The session should only be disconnected by a disconnect message when
# the preceeding messages have been received.
# All requests are handled in order too, though if not for disconnecting I don't think it would matter.
# Because of the funky retry-has-extra-maps logic, we'll allow processing requests twice.
ridBuffer = order initialRid, true
session._receivedData = (rid, data) ->
ridBuffer rid, ->
return if data is null
throw new Error 'Invalid data' unless data.maps? or data.json?
ridBuffer rid
id = data.ofs
# First, classic browserchannel maps.
if data.maps
# If an exception is thrown during this loop, I'm not really sure what the behaviour should be.
for map in data.maps
# The funky do expression here is used to pass the map into the closure.
# Another way to do it is to index into the data.maps array inside the function, but then I'd
# need to pass the index to the closure anyway.
mapBuffer id++, do (map) -> ->
return if session.state is 'closed'
session.emit 'map', map
# If you specify the key as JSON, the server will try to decode JSON data from the map and emit
# 'message'. This is a much nicer way to message the server.
if map.JSON?
try
message = JSON.parse map.JSON
session.emit 'message', message
else
# We have data.json. We'll just emit it directly.
for message in data.json
mapBuffer id++, do (map) -> ->
return if session.state is 'closed'
session.emit 'message', message
session._disconnectAt = (rid) ->
ridBuffer rid, -> session.close 'Disconnected'
# When we receive forwardchannel data, we reply with a special little 3-variable array to tell the
# client if it should reopen the backchannel.
#
# This method returns what the forward channel should reply with.
session._backChannelStatus = ->
# Find the arrays have been sent over the wire but haven't been acknowledged yet
numUnsentArrays = lastArrayId - lastSentArrayId
unacknowledgedArrays = outgoingArrays[... outgoingArrays.length - numUnsentArrays]
outstandingBytes = if unacknowledgedArrays.length == 0
0
else
# We don't care about the length of the array IDs or callback functions.
# I'm actually not sure what data the client expects here - the value is just used in a rough
# heuristic to determine if the backchannel should be reopened.
data = (a.data for a in unacknowledgedArrays)
JSON.stringify(data).length
[
(if backChannel then 1 else 0)
lastSentArrayId
outstandingBytes
]
# ## Encoding server arrays for the back channel
#
# The server sends data to the client in **chunks**. Each chunk is a *JSON* array prefixed
# by its length in bytes.
#
# The array looks like this:
#
# ```
# [
# [100, ['message', 'one']],
# [101, ['message', 'two']],
# [102, ['message', 'three']]
# ]
# ```
#
# Each individial message is prefixed by its *array id*, which is a counter starting at 0
# when the session is first created and incremented with each array.
# This will actually send the arrays to the backchannel on the next tick if the backchannel
# is alive.
session.flush = ->
process.nextTick ->
if backChannel
numUnsentArrays = lastArrayId - lastSentArrayId
if numUnsentArrays > 0
arrays = outgoingArrays[outgoingArrays.length - numUnsentArrays ...]
# I've abused outgoingArrays to also contain some callbacks. We only send [id, data] to
# the client.
data = ([id, data] for {id, data} in arrays)
bytes = JSON.stringify(data) + "\n"
# **Away!**
backChannel.methods.write bytes
backChannel.bytesSent += bytes.length
lastSentArrayId = lastArrayId
# Fire any send callbacks on the messages. These callbacks should only be called once.
# Again, not sure what to do if there are exceptions here.
for a in arrays
if a.sendcallback?
a.sendcallback?()
delete a.sendcallback
if !backChannel.chunk or backChannel.bytesSent > 10 * 1024
clearBackChannel()
# The first backchannel is the client's initial connection. Once we've sent the first
# data chunk to the client, we've officially opened the connection.
changeState 'ok' if session.state == 'init'
# The client's reported application version, or null. This is sent when the
# connection is first requested, so you can use it to make your application die / stay
# compatible with people who don't close their browsers.
session.appVersion = appVersion or null
# Signal to a client that it should stop trying to connect. This has no other effect
# on the server session.
#
# `stop` takes a callback which will be called once the message has been *sent* by the server.
# Typically, you should call it like this:
#
# ```
# session.stop ->
# session.close()
# ```
#
# I considered making this automatically close the connection after you've called it, or after
# you've sent the stop message or something, but if I did that it wouldn't be obvious that you
# can still receive messages after stop() has been called. (Because you can!). That would never
# come up when you're testing locally, but it *would* come up in production. This is more obvious.
session.stop = (callback) ->
return if @state is 'closed'
queueArray ['stop'], callback, null
@flush()
# This closes a session and makes the server forget about it.
#
# The client might try and reconnect if you only call `close()`. It'll get a new session if it does so.
#
# close takes an optional message argument, which is passed to the send event handlers.
session.close = (message) ->
# You can't double-close.
return if @state == 'closed'
changeState 'closed'
@emit 'close', message
clearBackChannel()
clearTimeout sessionTimeout
for {confirmcallback} in outgoingArrays
confirmcallback?(new Error message || 'closed')
delete sessions[@id]
#console.log "closed #{@id}"
sessions[session.id] = session
session
# This is the returned middleware. Connect middleware is a function which
# takes in an http request, an http response and a next method.
#
# The middleware can do one of two things:
#
# - Handle the request, sending data back to the server via the response
# - Call `next()`, which allows the next middleware in the stack a chance to
# handle the request.
middleware = (req, res, next) ->
{query, pathname} = parse req.url, true
#console.warn req.method, req.url
# If base is /foo, we don't match /foobar. (Currently no unit tests for this)
return next() if pathname.substring(0, base.length + 1) != "#{base}/"
{writeHead, write, writeRaw, end, writeError} = messagingMethods query, res
# # Serving the client
#
# The browserchannel server hosts a usable web client library at /CHANNEL/bcsocket.js.
# This library wraps the google closure library client implementation.
#
# If I have time, I would like to write my own version of the client to add a few features
# (websockets, message acknowledgement callbacks) and do some manual optimisations for speed.
# However, the current version works ok.
if pathname is "#{base}/bcsocket.js"
etag = "\"#{clientStats.size}-#{clientStats.mtime.getTime()}\""
res.writeHead 200, 'OK',
'Content-Type': 'application/javascript',
'ETag': etag,
'Content-Length': clientCode.length
# This code is manually tested because it looks like its impossible to send HEAD requests
# using nodejs's HTTP library at time of writing (0.4.12). (Yeah, I know, rite?)
if req.method is 'HEAD'
res.end()
else
res.end clientCode
# # Connection testing
#
# Before the browserchannel client connects, it tests the connection to make
# sure its working, and to look for buffering proxies.
#
# The server-side code for connection testing is completely stateless.
else if pathname is "#{base}/test"
# This server only supports browserchannel protocol version **8**.
# I have no idea if 400 is the right error here.
return sendError res, 400, 'Version 8 required' unless query.VER is '8'
#### Phase 1: Server info
# The client is requests host prefixes. The server responds with an array of
# ['hostprefix' or null, 'blockedprefix' or null].
#
# > Actually, I think you might be able to return [] if neither hostPrefix nor blockedPrefix
# > is defined. (Thats what google wave seems to do)
#
# - **hostprefix** is subdomain prepended onto the hostname of each request.
# This gets around browser connection limits. Using this requires a bank of
# configured DNS entries and SSL certificates if you're using HTTPS.
#
# - **blockedprefix** provides network admins a way to blacklist browserchannel
# requests. It is not supported by node-browserchannel.
if query.MODE == 'init' and req.method == 'GET'
hostPrefix = getHostPrefix()
blockedPrefix = null # Blocked prefixes aren't supported.
# We add an extra special header to tell the client that this server likes
# json-encoded forward channel data over form urlencoded channel data.
#
# It might be easier to put these headers in the response body or increment the
# version, but that might conflict with future browserchannel versions.
headers = Object.create standardHeaders
headers['X-Accept'] = 'application/json; application/x-www-form-urlencoded'
# This is a straight-up normal HTTP request like the forward channel requests.
# We don't use the funny iframe write methods.
res.writeHead 200, 'OK', headers
res.end(JSON.stringify [hostPrefix, blockedPrefix])
else
#### Phase 2: Buffering proxy detection
# The client is trying to determine if their connection is buffered or unbuffered.
# We reply with '11111', then 2 seconds later '2'.
#
# The client should get the data in 2 chunks - but they won't if there's a misbehaving
# corporate proxy in the way or something.
writeHead()
writeRaw '11111'
setTimeout (-> writeRaw '2'; end()), 2000
# # BrowserChannel connection
#
# Once a client has finished testing its connection, it connects.
#
# BrowserChannel communicates through two connections:
#
# - The **forward channel** is used for the client to send data to the server.
# It uses a **POST** request for each message.
# - The **back channel** is used to get data back from the server. This uses a
# hanging **GET** request. If chunking is disallowed (ie, if the proxy buffers)
# then the back channel is closed after each server message.
else if pathname == "#{base}/bind"
# I'm copying the behaviour of unknown SIDs below. I don't know how the client
# is supposed to detect this error, but, eh. The other choice is to `return writeError ...`
return sendError res, 400, 'Version 8 required' unless query.VER is '8'
# All browserchannel connections have an associated client object. A client
# is created immediately if the connection is new.
if query.SID
session = sessions[query.SID]
# This is a special error code for the client. It tells the client to abandon its
# connection request and reconnect.
#
# For some reason, google replies with the same response on HTTP and HTML requests here.
# I'll follow suit, though its a little weird. Maybe I should do the same with all client
# errors?
return sendError res, 400, 'Unknown SID' unless session
session._acknowledgeArrays query.AID if query.AID? and session
# ### Forward Channel
if req.method == 'POST'
if session == undefined
# The session is new! Make them a new session object and let the
# application know.
session = createSession req.connection.remoteAddress, query, req.headers
onConnect? session
bufferPostData req, (data) ->
try
data = decodeData req, data
session._receivedData query.RID, data
catch e
console.warn 'Error parsing forward channel', e.stack
return sendError res, 400, 'Bad data'
if session.state is 'init'
# The initial forward channel request is also used as a backchannel for the server's
# initial data (session id, etc). This connection is a little bit special - it is always
# encoded using length-prefixed json encoding and it is closed as soon as the first chunk is
# sent.
res.writeHead 200, 'OK', standardHeaders
session._setBackChannel res, CI:1, TYPE:'xmlhttp', RID:'rpc'
session.flush()
else if session.state is 'closed'
# If the onConnect handler called close() immediately, session.state can be already closed at this point.
# I'll assume there was an authentication problem and treat this as a forbidden connection attempt.
sendError res, 403, 'Forbidden'
else
# On normal forward channels, we reply to the request by telling the session
# if our backchannel is still live and telling it how many unconfirmed
# arrays we have.
response = JSON.stringify session._backChannelStatus()
res.writeHead 200, 'OK', standardHeaders
res.end "#{response.length}\n#{response}"
else if req.method is 'GET'
# ### Back channel
#
# GET messages are usually backchannel requests (server->client). Backchannel messages are handled
# by the session object.
if query.TYPE in ['xmlhttp', 'html']
return sendError res, 400, 'Invalid SID' if typeof query.SID != 'string' && query.SID.length < 5
return sendError res, 400, 'Expected RPC' unless query.RID is 'rpc'
writeHead()
session._setBackChannel res, query
# The client can manually disconnect by making a GET request with TYPE='terminate'
else if query.TYPE is 'terminate'
# We don't send any data in the response to the disconnect message.
#
# The client implements this using an img= appended to the page.
session?._disconnectAt query.RID
res.writeHead 200, 'OK', standardHeaders
res.end()
else
res.writeHead 405, 'Method Not Allowed', standardHeaders
res.end "Method not allowed"
else
# We'll 404 the user instead of letting another handler take care of it.
# Users shouldn't be using the specified URL prefix for anything else.
res.writeHead 404, 'Not Found', standardHeaders
res.end "Not found"
middleware.close = -> session.close() for id, session of sessions
# This is an undocumented, untested treat - if you pass the HTTP server / connect server to
# browserchannel through the options object, it can attach a close listener for you automatically.
options.server?.on 'close', middleware.close
middleware
# This will override the timer methods (`setInterval`, etc) with the testing stub versions,
# which are way faster.
browserChannel._setTimerMethods = (methods) ->
{setInterval, clearInterval, setTimeout, clearTimeout, Date} = methods
| true | # # A BrowserChannel server.
#
# - Its still pretty young, so there's probably bugs lurking around and the API
# will still change quickly.
# - Its missing integration tests
#
# It works in all the browsers I've tried.
#
# I've written this using the literate programming style to try it out. So, thats why
# there's a million comments everywhere.
#
# The server is implemented as connect middleware. Its intended to be used like this:
#
# ```
# server = connect(
# browserChannel (client) -> client.send 'hi'
# )
# ```
# ## Dependancies, helper methods and constant data
# `parse` helps us decode URLs in requests
{parse} = require 'url'
# `querystring` will help decode the URL-encoded forward channel data
querystring = require 'querystring'
# `fs` is used to read & serve the client library
fs = require 'fs'
# Client sessions are `EventEmitters`
{EventEmitter} = require 'events'
# Client session Ids are generated using `node-hat`
hat = require('hat').rack(40, 36)
# `randomInt(n)` generates and returns a random int smaller than n (0 <= k < n)
randomInt = (n) -> Math.floor(Math.random() * n)
# `randomArrayElement(array)` Selects and returns a random element from *array*
randomArrayElement = (array) -> array[randomInt(array.length)]
# For testing we'll override `setInterval`, etc with special testing stub versions (so
# we don't have to actually wait for actual *time*. To do that, we need local variable
# versions (I don't want to edit the global versions). ... and they'll just point to the
# normal versions anyway.
{setInterval, clearInterval, setTimeout, clearTimeout, Date} = global
# The module is configurable
defaultOptions =
# An optional array of host prefixes. Each browserchannel client will randomly pick
# from the list of host prefixes when it connects. This reduces the impact of per-host
# connection limits.
#
# All host prefixes should point to the same server. Ie, if your server's hostname
# is *example.com* and your hostPrefixes contains ['a', 'b', 'c'],
# a.example.com, b.example.com and c.example.com should all point to the same host
# as example.com.
hostPrefixes: null
# You can specify the base URL which browserchannel connects to. Change this if you want
# to scope browserchannel in part of your app, or if you want /channel to mean something
# else, or whatever.
base: '/channel'
# We'll send keepalives every so often to make sure the http connection isn't closed by
# eagar clients. The standard timeout is 30 seconds, so we'll default to sending them
# every 20 seconds or so.
keepAliveInterval: 20 * 1000
# After awhile (30 seconds or so) of not having a backchannel connected, we'll evict the
# session completely. This will happen whenever a user closes their browser.
sessionTimeoutInterval: 30 * 1000
# All server responses set some standard HTTP headers.
# To be honest, I don't know how many of these are necessary. I just copied
# them from google.
#
# The nocache headers in particular seem unnecessary since each client
# request includes a randomized `zx=junk` query parameter.
standardHeaders =
'Content-Type': 'text/plain'
'Cache-Control': 'no-cache, no-store, max-age=0, must-revalidate'
'Pragma': 'no-cache'
'Expires': 'Fri, 01 Jan 1990 00:00:00 GMT'
'X-Content-Type-Options': 'nosniff'
# Gmail also sends this, though I'm not really sure what it does...
# 'X-Xss-Protection': '1; mode=block'
# The one exception to that is requests destined for iframes. They need to
# have content-type: text/html set for IE to process the juicy JS inside.
ieHeaders = Object.create standardHeaders
ieHeaders['Content-Type'] = 'text/html'
# Google's browserchannel server adds some junk after the first message data is sent. I
# assume this stops some whole-page buffering in IE. I assume the data used is noise so it
# doesn't compress.
#
# I don't really know why google does this. I'm assuming there's a good reason to it though.
ieJunk = "7cca69475363026330a0d99468e88d23ce95e222591126443015f5f462d9a177186c8701fb45a6ffe
e0daf1a178fc0f58cd309308fba7e6f011ac38c9cdd4580760f1d4560a84d5ca0355ecbbed2ab715a3350fe0c47
9050640bd0e77acec90c58c4d3dd0f5cf8d4510e68c8b12e087bd88cad349aafd2ab16b07b0b1b8276091217a44
a9fe92fedacffff48092ee693af\n"
# If the user is using IE, instead of using XHR backchannel loaded using
# a forever iframe. When data is sent, it is wrapped in <script></script> tags
# which call functions in the browserchannel library.
#
# This method wraps the normal `.writeHead()`, `.write()` and `.end()` methods by
# special versions which produce output based on the request's type.
#
# This **is not used** for:
#
# - The first channel test
# - The first *bind* connection a client makes. The server sends arrays there, but the
# connection is a POST and it returns immediately. So that request happens using XHR/Trident
# like regular forward channel requests.
messagingMethods = (query, res) ->
type = query.TYPE
if type == 'html'
junkSent = false
methods =
writeHead: ->
res.writeHead 200, 'OK', ieHeaders
res.write '<html><body>'
domain = query.DOMAIN
# If the iframe is making the request using a secondary domain, I think we need
# to set the `domain` to the original domain so that we can call the response methods.
if domain and domain != ''
# Make sure the domain doesn't contain anything by naughty by `JSON.stringify()`-ing
# it before passing it to the client. There are XSS vulnerabilities otherwise.
res.write "<script>try{document.domain=#{JSON.stringify domain};}catch(e){}</script>\n"
write: (data) ->
# The data is passed to `m()`, which is bound to *onTridentRpcMessage_* in the client.
res.write "<script>try {parent.m(#{JSON.stringify data})} catch(e) {}</script>\n"
unless junkSent
res.write ieJunk
junkSent = true
end: ->
# Once the data has been received, the client needs to call `d()`, which is bound to
# *onTridentDone_* with success=*true*.
# The weird spacing of this is copied from browserchannel. Its really not necessary.
res.end "<script>try {parent.d(); }catch (e){}</script>\n"
# This is a helper method for signalling an error in the request back to the client.
writeError: (statusCode, message) ->
# The HTML (iframe) handler has no way to discover that the embedded script tag
# didn't complete successfully. To signal errors, we return **200 OK** and call an
# exposed rpcClose() method on the page.
methods.writeHead()
res.end "<script>try {parent.rpcClose(#{JSON.stringify message})} catch(e){}</script>\n"
# For some reason, sending data during the second test (111112) works slightly differently for
# XHR, but its identical for html encoding. We'll use a writeRaw() method in that case, which
# is copied in the case of html.
methods.writeRaw = methods.write
methods
else
# For normal XHR requests, we send data normally.
writeHead: -> res.writeHead 200, 'OK', standardHeaders
write: (data) -> res.write "#{data.length}\n#{data}"
writeRaw: (data) -> res.write data
end: -> res.end()
writeError: (statusCode, message) ->
res.writeHead statusCode, standardHeaders
res.end message
# For telling the client its done bad.
#
# It turns out google's server isn't particularly fussy about signalling errors using the proper
# html RPC stuff, so this is useful for html connections too.
sendError = (res, statusCode, message) ->
res.writeHead statusCode, message
res.end "<html><body><h1>#{message}</h1></body></html>"
return
# ## Parsing client maps from the forward channel
#
# The client sends data in a series of url-encoded maps. The data is encoded like this:
#
# ```
# count=2&ofs=0&req0_x=3&req0_y=10&req1_abc=def
# ```
#
# First, we need to buffer up the request response and query string decode it.
bufferPostData = (req, callback) ->
data = []
req.on 'data', (chunk) ->
data.push chunk.toString 'utf8'
req.on 'end', ->
data = data.join ''
callback data
# Next, we'll need to decode the incoming client data into an array of objects.
#
# The data could be in two different forms:
#
# - Classical browserchannel format, which is a bunch of string->string url-encoded maps
# - A JSON object
#
# We can tell what format the data is in by inspecting the content-type header
#
# ## URL Encoded data
#
# Essentially, url encoded the data looks like this:
#
# ```
# { count: '2',
# ofs: '0',
# req0_x: '3',
# req0_y: '10',
# req1_abc: 'def'
# }
# ```
#
# ... and we will return an object in the form of `[{x:'3', y:'10'}, {abc: 'def'}, ...]`
#
# ## JSON Encoded data
#
# JSON encoded the data looks like:
#
# ```
# { ofs: 0
# , data: [null, {...}, 1000.4, 'hi', ...]
# }
# ```
#
# or `null` if there's no data.
#
# This function returns null if there's no data or {ofs, json:[...]} or {ofs, maps:[...]}
decodeData = (req, data) ->
if req.headers['content-type'] == 'application/json'
data = JSON.parse data
return null if data is null # There's no data. This is a valid response.
# We'll restructure it slightly to mark the data as JSON rather than maps.
{ofs, data} = data
{ofs, json:data}
else
# Maps. Ugh.
data = querystring.parse data
count = parseInt data.count
return null if count is 0
# ofs will be missing if count is zero
ofs = parseInt data.ofs
throw new Error 'invalid map data' if isNaN count or isNaN ofs
throw new Error 'Invalid maps' unless count == 0 or (count > 0 and data.ofs?)
maps = new Array count
# Scan through all the keys in the data. Every key of the form:
# `req123_xxx` will be used to populate its map.
regex = /^req(\d+)_(.+)$/
for key, val of data
match = regex.exec key
if match
id = match[1]
mapKey = match[2]
map = (maps[id] ||= {})
# The client uses `mapX_type=_badmap` to signify an error encoding a map.
continue if id == 'type' and mapKey == '_badmap'
map[mapKey] = val
{ofs, maps}
# This is a helper method to order the handling of messages / requests / whatever.
#
# Use it like this:
# inOrder = order 0
#
# inOrder 1, -> console.log 'second'
# inOrder 0, -> console.log 'first'
#
# Start is the ID of the first element we expect to receive. If we get data for earlier
# elements, we'll play them anyway if playOld is truthy.
order = (start, playOld) ->
# Base is the ID of the (missing) element at the start of the queue
base = start
# The queue will start with about 10 elements. Elements of the queue are undefined
# if we don't have data for that queue element.
queue = new Array 10
(seq, callback) ->
# Its important that all the cells of the array are truthy if we have data. We'll use an
# empty function instead of null.
callback or= ->
# Ignore old messages, or play them back immediately if playOld=true
if seq < base
callback() if playOld
else
queue[seq - base] = callback
while queue[0]
callback = queue.shift()
base++
callback()
# We need access to the client's sourcecode. I'm going to get it using a synchronous file call
# (it'll be fast anyway, and only happen once).
#
# I'm also going to set an etag on the client data so the browser client will be cached. I'm kind of
# uncomfortable about adding complexity here because its not like this code hasn't been written
# before, but.. I think a lot of people will use this API.
#
# I should probably look into hosting the client code as a javascript module using that client-side
# npm thing.
clientFile = "#{__dirname}/../dist/bcsocket.js"
clientStats = fs.statSync clientFile
try
clientCode = fs.readFileSync clientFile, 'utf8'
catch e
console.error 'Could not load the client javascript. Run `cake client` to generate it.'
throw e
# This is mostly to help development, but if the client is recompiled, I'll pull in a new version.
# This isn't tested by the unit tests - but its not a big deal.
fs.watchFile clientFile, persistent: false, (curr, prev) ->
if curr.mtime.getTime() != prev.mtime.getTime()
# Putting a synchronous file call here will stop the whole server while the client is reloaded.
# Again, this will only happen during development so its not a big deal.
console.log "Reloading client JS"
clientCode = fs.readFileSync clientFile, 'utf8'
clientStats = curr
# ---
#
# # The server middleware
#
# The server module returns a function, which you can call with your configuration
# options. It returns your configured connect middleware, which is actually another function.
module.exports = browserChannel = (options, onConnect) ->
if typeof onConnect == 'undefined'
onConnect = options
options = {}
options ||= {}
options[option] ?= value for option, value of defaultOptions
# Strip off a trailing slash in base.
base = options.base
base = base[... base.length - 1] if base.match /\/$/
# Add a leading slash back on base
base = "/#{base}" unless base.match /^\//
# map from sessionId -> session
sessions = {}
# Host prefixes provide a way to skirt around connection limits. They're only
# really important for old browsers.
getHostPrefix = ->
if options.hostPrefixes
randomArrayElement options.hostPrefixes
else
null
# # Create a new client session.
#
# This method will start a new client session.
#
# Session ids are generated by [node-hat]. They are guaranteed to be unique.
# [node-hat]: https://github.com/substack/node-hat
#
# This method is synchronous, because a database will never be involved in browserchannel
# session management. Browserchannel sessions only last as long as the user's browser
# is open. If there's any connection turbulence, the client will reconnect and get
# a new session id.
#
# Sometimes a client will specify an old session ID and old array ID. In this case, the client
# is reconnecting and we should evict the named session (if it exists).
createSession = (address, query, headers) ->
{RID:initialRid, CVER:appVersion, OSID:oldSessionId, OAID:oldArrayId} = query
if oldSessionId? and (oldSession = sessions[oldSessionId])
oldSession._acknowledgeArrays oldArrayId
oldSession.close 'Reconnected'
# Create a new session. Sessions extend node's [EventEmitter][] so they have access to
# goodies like `session.on(event, handler)`, `session.emit('paarty')`, etc.
# [EventEmitter]: http://nodejs.org/docs/v0.4.12/api/events.html
session = new EventEmitter
# The session's unique ID for this connection
session.id = hat()
# The client stores its IP address and headers from when it first opened the session. The
# handler can use this information for authentication or something.
session.address = address
session.headers = headers
# The session is a little state machine. It has the following states:
#
# - **init**: The session has been created and its sessionId hasn't been sent yet.
# The session moves to the **ok** state when the first data chunk is sent to the
# client.
#
# - **ok**: The session is sitting pretty and ready to send and receive data.
# The session will spend most of its time in this state.
#
# - **closed**: The session has been removed from the session list. It can no longer
# be used for any reason.
#
# It is invalid to send arrays to a session while it is closed. Unless you're
# PI:NAME:<NAME>END_PI...
session.state = 'init'
# The state is modified through this method. It emits events when the state changes.
# (yay)
changeState = (newState) ->
oldState = session.state
session.state = newState
session.emit 'state changed', session.state, oldState
# The server sends messages to the client via a hanging GET request. Of course,
# the client has to be the one to open that request.
#
# This is a handle to null, or {res, methods, chunk}
#
# - **res** is the http response object
# - **methods** is a map of send(), etc methods for communicating properly with the backchannel -
# this will be different if the request comes from IE or not.
# - **chunk** specifies whether or not we're going to keep the connection open across multiple
# messages. If there's a buffering proxy in the way of the connection, we can't respond a bit at
# a time, so we close the backchannel after each data chunk. The client decides this during
# testing and passes a CI= parameter to the server when the backchannel connection is established.
# - **bytesSent** specifies how many bytes of data have been sent through the backchannel. We periodically
# close the backchannel and let the client reopen it, so things like the chrome web inspector stay
# usable.
backChannel = null
# The server sends data to the client by sending *arrays*. It seems a bit silly that
# client->server messages are maps and server->client messages are arrays, but there it is.
#
# Each entry in this array is of the form [id, data].
outgoingArrays = []
# `lastArrayId` is the array ID of the last queued array
lastArrayId = -1
# Every request from the client has an *AID* parameter which tells the server the ID
# of the last request the client has received. We won't remove arrays from the outgoingArrays
# list until the client has confirmed its received them.
#
# In `lastSentArrayId` we store the ID of the last array which we actually sent.
lastSentArrayId = -1
# I would like this method to be private or something, but it needs to be accessed from
# the HTTP request code below. The _ at the start will hopefully make people think twice
# before using it.
session._setBackChannel = (res, query) ->
clearBackChannel()
backChannel =
res: res
methods: messagingMethods query, res
chunk: query.CI == '0'
bytesSent: 0
res.connection.once 'close', -> clearBackChannel(res)
# We'll start the heartbeat interval and clear out the session timeout.
# The session timeout will be started again if the backchannel connection closes for
# any reason.
refreshHeartbeat()
clearTimeout sessionTimeout
# When a new backchannel is created, its possible that the old backchannel is dead.
# In this case, its possible that previously sent arrays haven't been received.
# By resetting lastSentArrayId, we're effectively rolling back the status of sent arrays
# to only those arrays which have been acknowledged.
lastSentArrayId = outgoingArrays[0].id - 1 if outgoingArrays.length > 0
# Send any arrays we've buffered now that we have a backchannel
@flush()
# If we haven't sent anything for 15 seconds, we'll send a little `['noop']` to the
# client so it knows we haven't forgotten it. (And to make sure the backchannel
# connection doesn't time out.)
heartbeat = null
# This method removes the back channel and any state associated with it. It'll get called
# when the backchannel closes naturally, is replaced or when the connection closes.
clearBackChannel = (res) ->
# clearBackChannel doesn't do anything if we call it repeatedly.
return unless backChannel
# Its important that we only delete the backchannel if the closed connection is actually
# the backchannel we're currently using.
return if res? and res != backChannel.res
# Conveniently, clearTimeout has no effect if the argument is null.
clearTimeout heartbeat
backChannel.methods.end()
backChannel = null
# Whenever we don't have a backchannel, we run the session timeout timer.
refreshSessionTimeout()
# This method sets / resets the heartbeat timeout to the full 15 seconds.
refreshHeartbeat = ->
clearTimeout heartbeat
heartbeat = setInterval (-> session.send ['noop']), options.keepAliveInterval
# The session will close if there's been no backchannel for awhile.
sessionTimeout = null
refreshSessionTimeout = ->
clearTimeout sessionTimeout
sessionTimeout = setTimeout (-> session.close 'Timed out'), options.sessionTimeoutInterval
# Since the session doesn't start with a backchannel, we'll kick off the timeout timer as soon as its
# created.
refreshSessionTimeout()
# The arrays get removed once they've been acknowledged
session._acknowledgeArrays = (id) ->
id = parseInt id if typeof id is 'string'
while outgoingArrays.length > 0 and outgoingArrays[0].id <= id
{confirmcallback} = outgoingArrays.shift()
# I've got no idea what to do if we get an exception thrown here. The session will end up
# in an inconsistant state...
confirmcallback?()
return
# Queue an array to be sent. The optional callbacks notifies a caller when the array has been
# sent, and then received by the client.
#
# queueArray returns the ID of the queued data chunk.
queueArray = (data, sendcallback, confirmcallback) ->
throw new Error "Cannot queue array when the session is already closed" if session.state == 'closed'
id = ++lastArrayId
outgoingArrays.push {id, data, sendcallback, confirmcallback}
lastArrayId
# The session has just been created. The first thing it needs to tell the client
# is its session id and host prefix and stuff.
#
# It would be pretty easy to add a callback here setting the client status to 'ok' or
# something, but its not really necessary. The client has already connected once the first
# POST /bind has been received.
queueArray ['c', session.id, getHostPrefix(), 8]
# Send the array data through the backchannel. This takes an optional callback which
# will be called with no arguments when the client acknowledges the array, or called with an
# error object if the client disconnects before the array is sent.
#
# queueArray can also take a callback argument which is called when the session sends the message
# in the first place. I'm not sure if I should expose this through send - I can't tell if its
# useful beyond the server code.
session.send = (arr, callback) ->
id = queueArray arr, null, callback
@flush()
id
# ### Maps
#
# The client sends maps to the server using POST requests. Its possible for the requests
# to come in out of order, so sometimes we need to buffer up incoming maps and reorder them
# before emitting them to the user.
#
# Each map has an ID (which starts at 0 when the session is first created).
# We'll emit received data to the user immediately if they're in order, but if they're out of order
# we'll use the little order helper above to order them. The order helper is instructed to not
# emit any old messages twice.
#
# There's a potential DOS attack here whereby a client could just spam the server with
# out-of-order maps until it runs out of memory. We should dump a session if there are
# too many entries in this dictionary.
mapBuffer = order 0, false
# This method is called whenever we get maps from the client. Offset is the ID of the first
# map. The data could either be maps or JSON data. If its maps, data contains {maps} and if its
# JSON data, maps contains {JSON}.
#
# Browserchannel has 2 different mechanisms for consistantly ordering messages in the forward channel:
#
# - Each forward channel request contains a request ID (RID=X), which start at a random value
# (set with the first session create packet). These increment by 1 with each request.
#
# If a request fails, it might be retried with the same RID as the previous message, and with extra
# maps tacked on the end. We need to handle the maps in this case.
#
# - Each map has an ID, counting from 0. ofs= in the POST data tells the server the ID of the first
# map in a request.
#
# As far as I can tell, the RID stuff can mostly be ignored. The one place it is important is in
# handling disconnect messages. The session should only be disconnected by a disconnect message when
# the preceeding messages have been received.
# All requests are handled in order too, though if not for disconnecting I don't think it would matter.
# Because of the funky retry-has-extra-maps logic, we'll allow processing requests twice.
ridBuffer = order initialRid, true
session._receivedData = (rid, data) ->
ridBuffer rid, ->
return if data is null
throw new Error 'Invalid data' unless data.maps? or data.json?
ridBuffer rid
id = data.ofs
# First, classic browserchannel maps.
if data.maps
# If an exception is thrown during this loop, I'm not really sure what the behaviour should be.
for map in data.maps
# The funky do expression here is used to pass the map into the closure.
# Another way to do it is to index into the data.maps array inside the function, but then I'd
# need to pass the index to the closure anyway.
mapBuffer id++, do (map) -> ->
return if session.state is 'closed'
session.emit 'map', map
# If you specify the key as JSON, the server will try to decode JSON data from the map and emit
# 'message'. This is a much nicer way to message the server.
if map.JSON?
try
message = JSON.parse map.JSON
session.emit 'message', message
else
# We have data.json. We'll just emit it directly.
for message in data.json
mapBuffer id++, do (map) -> ->
return if session.state is 'closed'
session.emit 'message', message
session._disconnectAt = (rid) ->
ridBuffer rid, -> session.close 'Disconnected'
# When we receive forwardchannel data, we reply with a special little 3-variable array to tell the
# client if it should reopen the backchannel.
#
# This method returns what the forward channel should reply with.
session._backChannelStatus = ->
# Find the arrays have been sent over the wire but haven't been acknowledged yet
numUnsentArrays = lastArrayId - lastSentArrayId
unacknowledgedArrays = outgoingArrays[... outgoingArrays.length - numUnsentArrays]
outstandingBytes = if unacknowledgedArrays.length == 0
0
else
# We don't care about the length of the array IDs or callback functions.
# I'm actually not sure what data the client expects here - the value is just used in a rough
# heuristic to determine if the backchannel should be reopened.
data = (a.data for a in unacknowledgedArrays)
JSON.stringify(data).length
[
(if backChannel then 1 else 0)
lastSentArrayId
outstandingBytes
]
# ## Encoding server arrays for the back channel
#
# The server sends data to the client in **chunks**. Each chunk is a *JSON* array prefixed
# by its length in bytes.
#
# The array looks like this:
#
# ```
# [
# [100, ['message', 'one']],
# [101, ['message', 'two']],
# [102, ['message', 'three']]
# ]
# ```
#
# Each individial message is prefixed by its *array id*, which is a counter starting at 0
# when the session is first created and incremented with each array.
# This will actually send the arrays to the backchannel on the next tick if the backchannel
# is alive.
session.flush = ->
process.nextTick ->
if backChannel
numUnsentArrays = lastArrayId - lastSentArrayId
if numUnsentArrays > 0
arrays = outgoingArrays[outgoingArrays.length - numUnsentArrays ...]
# I've abused outgoingArrays to also contain some callbacks. We only send [id, data] to
# the client.
data = ([id, data] for {id, data} in arrays)
bytes = JSON.stringify(data) + "\n"
# **Away!**
backChannel.methods.write bytes
backChannel.bytesSent += bytes.length
lastSentArrayId = lastArrayId
# Fire any send callbacks on the messages. These callbacks should only be called once.
# Again, not sure what to do if there are exceptions here.
for a in arrays
if a.sendcallback?
a.sendcallback?()
delete a.sendcallback
if !backChannel.chunk or backChannel.bytesSent > 10 * 1024
clearBackChannel()
# The first backchannel is the client's initial connection. Once we've sent the first
# data chunk to the client, we've officially opened the connection.
changeState 'ok' if session.state == 'init'
# The client's reported application version, or null. This is sent when the
# connection is first requested, so you can use it to make your application die / stay
# compatible with people who don't close their browsers.
session.appVersion = appVersion or null
# Signal to a client that it should stop trying to connect. This has no other effect
# on the server session.
#
# `stop` takes a callback which will be called once the message has been *sent* by the server.
# Typically, you should call it like this:
#
# ```
# session.stop ->
# session.close()
# ```
#
# I considered making this automatically close the connection after you've called it, or after
# you've sent the stop message or something, but if I did that it wouldn't be obvious that you
# can still receive messages after stop() has been called. (Because you can!). That would never
# come up when you're testing locally, but it *would* come up in production. This is more obvious.
session.stop = (callback) ->
return if @state is 'closed'
queueArray ['stop'], callback, null
@flush()
# This closes a session and makes the server forget about it.
#
# The client might try and reconnect if you only call `close()`. It'll get a new session if it does so.
#
# close takes an optional message argument, which is passed to the send event handlers.
session.close = (message) ->
# You can't double-close.
return if @state == 'closed'
changeState 'closed'
@emit 'close', message
clearBackChannel()
clearTimeout sessionTimeout
for {confirmcallback} in outgoingArrays
confirmcallback?(new Error message || 'closed')
delete sessions[@id]
#console.log "closed #{@id}"
sessions[session.id] = session
session
# This is the returned middleware. Connect middleware is a function which
# takes in an http request, an http response and a next method.
#
# The middleware can do one of two things:
#
# - Handle the request, sending data back to the server via the response
# - Call `next()`, which allows the next middleware in the stack a chance to
# handle the request.
middleware = (req, res, next) ->
{query, pathname} = parse req.url, true
#console.warn req.method, req.url
# If base is /foo, we don't match /foobar. (Currently no unit tests for this)
return next() if pathname.substring(0, base.length + 1) != "#{base}/"
{writeHead, write, writeRaw, end, writeError} = messagingMethods query, res
# # Serving the client
#
# The browserchannel server hosts a usable web client library at /CHANNEL/bcsocket.js.
# This library wraps the google closure library client implementation.
#
# If I have time, I would like to write my own version of the client to add a few features
# (websockets, message acknowledgement callbacks) and do some manual optimisations for speed.
# However, the current version works ok.
if pathname is "#{base}/bcsocket.js"
etag = "\"#{clientStats.size}-#{clientStats.mtime.getTime()}\""
res.writeHead 200, 'OK',
'Content-Type': 'application/javascript',
'ETag': etag,
'Content-Length': clientCode.length
# This code is manually tested because it looks like its impossible to send HEAD requests
# using nodejs's HTTP library at time of writing (0.4.12). (Yeah, I know, rite?)
if req.method is 'HEAD'
res.end()
else
res.end clientCode
# # Connection testing
#
# Before the browserchannel client connects, it tests the connection to make
# sure its working, and to look for buffering proxies.
#
# The server-side code for connection testing is completely stateless.
else if pathname is "#{base}/test"
# This server only supports browserchannel protocol version **8**.
# I have no idea if 400 is the right error here.
return sendError res, 400, 'Version 8 required' unless query.VER is '8'
#### Phase 1: Server info
# The client is requests host prefixes. The server responds with an array of
# ['hostprefix' or null, 'blockedprefix' or null].
#
# > Actually, I think you might be able to return [] if neither hostPrefix nor blockedPrefix
# > is defined. (Thats what google wave seems to do)
#
# - **hostprefix** is subdomain prepended onto the hostname of each request.
# This gets around browser connection limits. Using this requires a bank of
# configured DNS entries and SSL certificates if you're using HTTPS.
#
# - **blockedprefix** provides network admins a way to blacklist browserchannel
# requests. It is not supported by node-browserchannel.
if query.MODE == 'init' and req.method == 'GET'
hostPrefix = getHostPrefix()
blockedPrefix = null # Blocked prefixes aren't supported.
# We add an extra special header to tell the client that this server likes
# json-encoded forward channel data over form urlencoded channel data.
#
# It might be easier to put these headers in the response body or increment the
# version, but that might conflict with future browserchannel versions.
headers = Object.create standardHeaders
headers['X-Accept'] = 'application/json; application/x-www-form-urlencoded'
# This is a straight-up normal HTTP request like the forward channel requests.
# We don't use the funny iframe write methods.
res.writeHead 200, 'OK', headers
res.end(JSON.stringify [hostPrefix, blockedPrefix])
else
#### Phase 2: Buffering proxy detection
# The client is trying to determine if their connection is buffered or unbuffered.
# We reply with '11111', then 2 seconds later '2'.
#
# The client should get the data in 2 chunks - but they won't if there's a misbehaving
# corporate proxy in the way or something.
writeHead()
writeRaw '11111'
setTimeout (-> writeRaw '2'; end()), 2000
# # BrowserChannel connection
#
# Once a client has finished testing its connection, it connects.
#
# BrowserChannel communicates through two connections:
#
# - The **forward channel** is used for the client to send data to the server.
# It uses a **POST** request for each message.
# - The **back channel** is used to get data back from the server. This uses a
# hanging **GET** request. If chunking is disallowed (ie, if the proxy buffers)
# then the back channel is closed after each server message.
else if pathname == "#{base}/bind"
# I'm copying the behaviour of unknown SIDs below. I don't know how the client
# is supposed to detect this error, but, eh. The other choice is to `return writeError ...`
return sendError res, 400, 'Version 8 required' unless query.VER is '8'
# All browserchannel connections have an associated client object. A client
# is created immediately if the connection is new.
if query.SID
session = sessions[query.SID]
# This is a special error code for the client. It tells the client to abandon its
# connection request and reconnect.
#
# For some reason, google replies with the same response on HTTP and HTML requests here.
# I'll follow suit, though its a little weird. Maybe I should do the same with all client
# errors?
return sendError res, 400, 'Unknown SID' unless session
session._acknowledgeArrays query.AID if query.AID? and session
# ### Forward Channel
if req.method == 'POST'
if session == undefined
# The session is new! Make them a new session object and let the
# application know.
session = createSession req.connection.remoteAddress, query, req.headers
onConnect? session
bufferPostData req, (data) ->
try
data = decodeData req, data
session._receivedData query.RID, data
catch e
console.warn 'Error parsing forward channel', e.stack
return sendError res, 400, 'Bad data'
if session.state is 'init'
# The initial forward channel request is also used as a backchannel for the server's
# initial data (session id, etc). This connection is a little bit special - it is always
# encoded using length-prefixed json encoding and it is closed as soon as the first chunk is
# sent.
res.writeHead 200, 'OK', standardHeaders
session._setBackChannel res, CI:1, TYPE:'xmlhttp', RID:'rpc'
session.flush()
else if session.state is 'closed'
# If the onConnect handler called close() immediately, session.state can be already closed at this point.
# I'll assume there was an authentication problem and treat this as a forbidden connection attempt.
sendError res, 403, 'Forbidden'
else
# On normal forward channels, we reply to the request by telling the session
# if our backchannel is still live and telling it how many unconfirmed
# arrays we have.
response = JSON.stringify session._backChannelStatus()
res.writeHead 200, 'OK', standardHeaders
res.end "#{response.length}\n#{response}"
else if req.method is 'GET'
# ### Back channel
#
# GET messages are usually backchannel requests (server->client). Backchannel messages are handled
# by the session object.
if query.TYPE in ['xmlhttp', 'html']
return sendError res, 400, 'Invalid SID' if typeof query.SID != 'string' && query.SID.length < 5
return sendError res, 400, 'Expected RPC' unless query.RID is 'rpc'
writeHead()
session._setBackChannel res, query
# The client can manually disconnect by making a GET request with TYPE='terminate'
else if query.TYPE is 'terminate'
# We don't send any data in the response to the disconnect message.
#
# The client implements this using an img= appended to the page.
session?._disconnectAt query.RID
res.writeHead 200, 'OK', standardHeaders
res.end()
else
res.writeHead 405, 'Method Not Allowed', standardHeaders
res.end "Method not allowed"
else
# We'll 404 the user instead of letting another handler take care of it.
# Users shouldn't be using the specified URL prefix for anything else.
res.writeHead 404, 'Not Found', standardHeaders
res.end "Not found"
middleware.close = -> session.close() for id, session of sessions
# This is an undocumented, untested treat - if you pass the HTTP server / connect server to
# browserchannel through the options object, it can attach a close listener for you automatically.
options.server?.on 'close', middleware.close
middleware
# This will override the timer methods (`setInterval`, etc) with the testing stub versions,
# which are way faster.
browserChannel._setTimerMethods = (methods) ->
{setInterval, clearInterval, setTimeout, clearTimeout, Date} = methods
|
[
{
"context": "City in Atom\n# @version 0.1.0\n# @author Synder <info@synder.dev>\n#\n# @website https://githu",
"end": 89,
"score": 0.998927116394043,
"start": 83,
"tag": "NAME",
"value": "Synder"
},
{
"context": "tom\n# @version 0.1.0\n# @author Synder <in... | lib/extension.coffee | SynderDEV/atom-city | 2 | ###!
# Atom City - Build your City in Atom
# @version 0.1.0
# @author Synder <info@synder.dev>
#
# @website https://github.com/SynderDEV/atom-city
# @license MIT
# @copyright Copyright © 2021 Synder <info@synder.dev>
# @copyright Copyright © 2020 - 2021 pytesNET <info@pytes.net>
###
Game = require './core/game'
module.exports =
###
# Hook - Initialize Plugin
# @param {state}
###
initialize: (state) ->
atomCityMenu =
label: 'Atom City'
command: 'atom-city:start'
atomCityCmds =
'atom-city:start': (event) -> atom.workspace.open 'atom-city://create'
atomCityPlay = (data) ->
atom.workspace.open data
# Loop Savestates
savestates = Game.getSaveStates()
for state of savestates
if ! atomCityMenu.submenu
atomCityMenu.submenu = []
key = savestates[state]
atomCityMenu.submenu.push {
label: state,
command: 'atom-city:play:' + key
}
atomCityCmds['atom-city:play:' + key] = atomCityPlay.bind @, key
# Handle 'Create New'
if Object.keys(atomCityMenu).indexOf('submenu') >= 0
atomCityMenu.submenu.unshift {
type: 'separator'
}
atomCityMenu.submenu.unshift {
label: 'Create a new City',
command: 'atom-city:start'
}
# Add App Menu & Commands
atom.menu.add [ { label: 'Packages', submenu: [ atomCityMenu ] } ]
atom.commands.add 'atom-workspace', atomCityCmds
@
###
# Hook - Activate Plugin
# @param {state}
###
activate: (state) ->
atom.views.addViewProvider Game.constructor
atom.workspace.onDidChangeActivePaneItem (pane) => Game.onPanelChange pane
atom.workspace.addOpener (uri) ->
if uri.startsWith 'atom-city://'
game = new Game uri
game
###
# Hook - Deactivate Plugin
# @param {state}
###
deactivate: (state) ->
@
| 198828 | ###!
# Atom City - Build your City in Atom
# @version 0.1.0
# @author <NAME> <<EMAIL>>
#
# @website https://github.com/SynderDEV/atom-city
# @license MIT
# @copyright Copyright © 2021 Synder <<EMAIL>>
# @copyright Copyright © 2020 - 2021 pytesNET <<EMAIL>>
###
Game = require './core/game'
module.exports =
###
# Hook - Initialize Plugin
# @param {state}
###
initialize: (state) ->
atomCityMenu =
label: 'Atom City'
command: 'atom-city:start'
atomCityCmds =
'atom-city:start': (event) -> atom.workspace.open 'atom-city://create'
atomCityPlay = (data) ->
atom.workspace.open data
# Loop Savestates
savestates = Game.getSaveStates()
for state of savestates
if ! atomCityMenu.submenu
atomCityMenu.submenu = []
key = savestates[state]
atomCityMenu.submenu.push {
label: state,
command: 'atom-city:play:' + key
}
atomCityCmds['atom-city:play:' + key] = atomCityPlay.bind @, key
# Handle 'Create New'
if Object.keys(atomCityMenu).indexOf('submenu') >= 0
atomCityMenu.submenu.unshift {
type: 'separator'
}
atomCityMenu.submenu.unshift {
label: 'Create a new City',
command: 'atom-city:start'
}
# Add App Menu & Commands
atom.menu.add [ { label: 'Packages', submenu: [ atomCityMenu ] } ]
atom.commands.add 'atom-workspace', atomCityCmds
@
###
# Hook - Activate Plugin
# @param {state}
###
activate: (state) ->
atom.views.addViewProvider Game.constructor
atom.workspace.onDidChangeActivePaneItem (pane) => Game.onPanelChange pane
atom.workspace.addOpener (uri) ->
if uri.startsWith 'atom-city://'
game = new Game uri
game
###
# Hook - Deactivate Plugin
# @param {state}
###
deactivate: (state) ->
@
| true | ###!
# Atom City - Build your City in Atom
# @version 0.1.0
# @author PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI>
#
# @website https://github.com/SynderDEV/atom-city
# @license MIT
# @copyright Copyright © 2021 Synder <PI:EMAIL:<EMAIL>END_PI>
# @copyright Copyright © 2020 - 2021 pytesNET <PI:EMAIL:<EMAIL>END_PI>
###
Game = require './core/game'
module.exports =
###
# Hook - Initialize Plugin
# @param {state}
###
initialize: (state) ->
atomCityMenu =
label: 'Atom City'
command: 'atom-city:start'
atomCityCmds =
'atom-city:start': (event) -> atom.workspace.open 'atom-city://create'
atomCityPlay = (data) ->
atom.workspace.open data
# Loop Savestates
savestates = Game.getSaveStates()
for state of savestates
if ! atomCityMenu.submenu
atomCityMenu.submenu = []
key = savestates[state]
atomCityMenu.submenu.push {
label: state,
command: 'atom-city:play:' + key
}
atomCityCmds['atom-city:play:' + key] = atomCityPlay.bind @, key
# Handle 'Create New'
if Object.keys(atomCityMenu).indexOf('submenu') >= 0
atomCityMenu.submenu.unshift {
type: 'separator'
}
atomCityMenu.submenu.unshift {
label: 'Create a new City',
command: 'atom-city:start'
}
# Add App Menu & Commands
atom.menu.add [ { label: 'Packages', submenu: [ atomCityMenu ] } ]
atom.commands.add 'atom-workspace', atomCityCmds
@
###
# Hook - Activate Plugin
# @param {state}
###
activate: (state) ->
atom.views.addViewProvider Game.constructor
atom.workspace.onDidChangeActivePaneItem (pane) => Game.onPanelChange pane
atom.workspace.addOpener (uri) ->
if uri.startsWith 'atom-city://'
game = new Game uri
game
###
# Hook - Deactivate Plugin
# @param {state}
###
deactivate: (state) ->
@
|
[
{
"context": "13-11-15T00:00:00.000Z\",\n \"account\": \"1234567890\"\n },\n {\n \"label\": \"LIDL\"",
"end": 286,
"score": 0.9126906991004944,
"start": 277,
"tag": "KEY",
"value": "234567890"
},
{
"context": "13-11-20T00:00:00.000Z\",\n ... | tests/fixtures/weboob/operations.coffee | einSelbst/cozy-pfm | 0 | banks = require '../banks-all.json'
output = {}
for bank in banks
output[bank.uuid] = [
{
"label": "SNCF",
"raw": "SNCF Carte X1234",
"amount": "-15.00",
"rdate": "2013-11-15T00:00:00.000Z",
"account": "1234567890"
},
{
"label": "LIDL",
"raw": "LIDL Carte X1234",
"amount": "-5.00",
"rdate": "2013-11-20T00:00:00.000Z",
"account": "1234567890"
},
{
"label": "Salaire",
"raw": "Virement Cozycloud",
"amount": "700.00",
"rdate": "2013-11-10T00:00:00.000Z",
"account": "1234567890"
},
{
"label": "Salaire",
"raw": "Virement Cozycloud",
"amount": "700.00",
"rdate": "2013-10-10T00:00:00.000Z",
"account": "1234567890"
},
{
"label": "Loyer",
"raw": "Virement agence Paris Champs Elysées",
"amount": "-300.00",
"rdate": "2013-11-04T00:00:00.000Z",
"account": "1234567890"
},
{
"label": "Loyer",
"raw": "Virement agence Paris Champs Elysées",
"amount": "-300.00",
"rdate": "2013-10-04T00:00:00.000Z",
"account": "1234567890"
},
{
"account": "1234567890",
"label": "ARMAND THIERY",
"rdate": "2013-11-09T00:00:00.000Z",
"amount": "-179",
"raw": "CARTE X1234 09/11 ARMAND THIERY"
},
{
"label": "Intermarché courses",
"raw": "Intermarché CARTE X1234",
"amount": "-100.00",
"rdate": "2013-11-19T00:00:00.000Z",
"account": "1234567890"
},
{
"account": "1234567890",
"label": "LIDL 0304",
"rdate": "2013-11-19T00:00:00.000Z",
"amount": "-11.35",
"raw": "CARTE X1234 19/11 LIDL 0304"
},
{
"label": "Pot de vin",
"raw": "Pot de vin CARTE LES3Suisses",
"amount": "100.00",
"rdate": "2013-11-15T00:00:00.000Z",
"account": "0987654321"
},
{
"label": "Virement interne",
"raw": "Virement interne pour le compte 1234567890",
"amount": "-200.00",
"rdate": "2013-11-20T00:00:00.000Z",
"account": "0987654321"
}
]
module.exports = output | 128167 | banks = require '../banks-all.json'
output = {}
for bank in banks
output[bank.uuid] = [
{
"label": "SNCF",
"raw": "SNCF Carte X1234",
"amount": "-15.00",
"rdate": "2013-11-15T00:00:00.000Z",
"account": "1<KEY>"
},
{
"label": "LIDL",
"raw": "LIDL Carte X1234",
"amount": "-5.00",
"rdate": "2013-11-20T00:00:00.000Z",
"account": "1<KEY>"
},
{
"label": "<NAME>",
"raw": "<NAME> Co<NAME>",
"amount": "700.00",
"rdate": "2013-11-10T00:00:00.000Z",
"account": "1<KEY>"
},
{
"label": "<NAME>",
"raw": "<NAME>irement Cozy<NAME>",
"amount": "700.00",
"rdate": "2013-10-10T00:00:00.000Z",
"account": "1<KEY>"
},
{
"label": "<NAME>",
"raw": "Virement agence Paris Champs Elysées",
"amount": "-300.00",
"rdate": "2013-11-04T00:00:00.000Z",
"account": "1<KEY>"
},
{
"label": "<NAME>",
"raw": "Virement agence Paris Champs Elysées",
"amount": "-300.00",
"rdate": "2013-10-04T00:00:00.000Z",
"account": "<KEY>3<KEY>678<KEY>"
},
{
"account": "<KEY>",
"label": "<NAME>",
"rdate": "2013-11-09T00:00:00.000Z",
"amount": "-179",
"raw": "CARTE X1234 09/11 <NAME>"
},
{
"label": "Intermarché courses",
"raw": "Intermarché CARTE X1234",
"amount": "-100.00",
"rdate": "2013-11-19T00:00:00.000Z",
"account": "<KEY>"
},
{
"account": "<KEY>",
"label": "LIDL 0304",
"rdate": "2013-11-19T00:00:00.000Z",
"amount": "-11.35",
"raw": "CARTE X1234 19/11 LIDL 0304"
},
{
"label": "Pot de vin",
"raw": "Pot de vin <NAME>",
"amount": "100.00",
"rdate": "2013-11-15T00:00:00.000Z",
"account": "098<KEY>"
},
{
"label": "Virement interne",
"raw": "Virement interne pour le compte 1234567890",
"amount": "-200.00",
"rdate": "2013-11-20T00:00:00.000Z",
"account": "0987654321"
}
]
module.exports = output | true | banks = require '../banks-all.json'
output = {}
for bank in banks
output[bank.uuid] = [
{
"label": "SNCF",
"raw": "SNCF Carte X1234",
"amount": "-15.00",
"rdate": "2013-11-15T00:00:00.000Z",
"account": "1PI:KEY:<KEY>END_PI"
},
{
"label": "LIDL",
"raw": "LIDL Carte X1234",
"amount": "-5.00",
"rdate": "2013-11-20T00:00:00.000Z",
"account": "1PI:KEY:<KEY>END_PI"
},
{
"label": "PI:NAME:<NAME>END_PI",
"raw": "PI:NAME:<NAME>END_PI CoPI:NAME:<NAME>END_PI",
"amount": "700.00",
"rdate": "2013-11-10T00:00:00.000Z",
"account": "1PI:KEY:<KEY>END_PI"
},
{
"label": "PI:NAME:<NAME>END_PI",
"raw": "PI:NAME:<NAME>END_PIirement CozyPI:NAME:<NAME>END_PI",
"amount": "700.00",
"rdate": "2013-10-10T00:00:00.000Z",
"account": "1PI:KEY:<KEY>END_PI"
},
{
"label": "PI:NAME:<NAME>END_PI",
"raw": "Virement agence Paris Champs Elysées",
"amount": "-300.00",
"rdate": "2013-11-04T00:00:00.000Z",
"account": "1PI:KEY:<KEY>END_PI"
},
{
"label": "PI:NAME:<NAME>END_PI",
"raw": "Virement agence Paris Champs Elysées",
"amount": "-300.00",
"rdate": "2013-10-04T00:00:00.000Z",
"account": "PI:KEY:<KEY>END_PI3PI:KEY:<KEY>END_PI678PI:KEY:<KEY>END_PI"
},
{
"account": "PI:KEY:<KEY>END_PI",
"label": "PI:NAME:<NAME>END_PI",
"rdate": "2013-11-09T00:00:00.000Z",
"amount": "-179",
"raw": "CARTE X1234 09/11 PI:NAME:<NAME>END_PI"
},
{
"label": "Intermarché courses",
"raw": "Intermarché CARTE X1234",
"amount": "-100.00",
"rdate": "2013-11-19T00:00:00.000Z",
"account": "PI:KEY:<KEY>END_PI"
},
{
"account": "PI:KEY:<KEY>END_PI",
"label": "LIDL 0304",
"rdate": "2013-11-19T00:00:00.000Z",
"amount": "-11.35",
"raw": "CARTE X1234 19/11 LIDL 0304"
},
{
"label": "Pot de vin",
"raw": "Pot de vin PI:NAME:<NAME>END_PI",
"amount": "100.00",
"rdate": "2013-11-15T00:00:00.000Z",
"account": "098PI:KEY:<KEY>END_PI"
},
{
"label": "Virement interne",
"raw": "Virement interne pour le compte 1234567890",
"amount": "-200.00",
"rdate": "2013-11-20T00:00:00.000Z",
"account": "0987654321"
}
]
module.exports = output |
[
{
"context": " \n# Copyright 2011 - 2013 Mark Masse (OSS project WRML.org) \n# ",
"end": 824,
"score": 0.9998018145561218,
"start": 814,
"tag": "NAME",
"value": "Mark Masse"
}
] | wrmldoc/js/app/controllers/_base.coffee | wrml/wrml | 47 | #
# WRML - Web Resource Modeling Language
# __ __ ______ __ __ __
# /\ \ _ \ \ /\ == \ /\ "-./ \ /\ \
# \ \ \/ ".\ \\ \ __< \ \ \-./\ \\ \ \____
# \ \__/".~\_\\ \_\ \_\\ \_\ \ \_\\ \_____\
# \/_/ \/_/ \/_/ /_/ \/_/ \/_/ \/_____/
#
# http://www.wrml.org
#
# Copyright 2011 - 2013 Mark Masse (OSS project WRML.org)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# CoffeeScript
@Wrmldoc.module "Controllers", (Controllers, App, Backbone, Marionette, $, _) ->
class Controllers.Base extends Marionette.Controller
constructor: (options = {}) ->
@region = options.region or App.request "default:region"
super options
@_instance_id = _.uniqueId("controller")
App.execute "register:instance", @, @_instance_id
close: (args...) ->
delete @region
delete @options
super args
App.execute "unregister:instance", @, @_instance_id
show: (view) ->
@listenTo view, "close", @close
@region.show view | 26966 | #
# WRML - Web Resource Modeling Language
# __ __ ______ __ __ __
# /\ \ _ \ \ /\ == \ /\ "-./ \ /\ \
# \ \ \/ ".\ \\ \ __< \ \ \-./\ \\ \ \____
# \ \__/".~\_\\ \_\ \_\\ \_\ \ \_\\ \_____\
# \/_/ \/_/ \/_/ /_/ \/_/ \/_/ \/_____/
#
# http://www.wrml.org
#
# Copyright 2011 - 2013 <NAME> (OSS project WRML.org)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# CoffeeScript
@Wrmldoc.module "Controllers", (Controllers, App, Backbone, Marionette, $, _) ->
class Controllers.Base extends Marionette.Controller
constructor: (options = {}) ->
@region = options.region or App.request "default:region"
super options
@_instance_id = _.uniqueId("controller")
App.execute "register:instance", @, @_instance_id
close: (args...) ->
delete @region
delete @options
super args
App.execute "unregister:instance", @, @_instance_id
show: (view) ->
@listenTo view, "close", @close
@region.show view | true | #
# WRML - Web Resource Modeling Language
# __ __ ______ __ __ __
# /\ \ _ \ \ /\ == \ /\ "-./ \ /\ \
# \ \ \/ ".\ \\ \ __< \ \ \-./\ \\ \ \____
# \ \__/".~\_\\ \_\ \_\\ \_\ \ \_\\ \_____\
# \/_/ \/_/ \/_/ /_/ \/_/ \/_/ \/_____/
#
# http://www.wrml.org
#
# Copyright 2011 - 2013 PI:NAME:<NAME>END_PI (OSS project WRML.org)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# CoffeeScript
@Wrmldoc.module "Controllers", (Controllers, App, Backbone, Marionette, $, _) ->
class Controllers.Base extends Marionette.Controller
constructor: (options = {}) ->
@region = options.region or App.request "default:region"
super options
@_instance_id = _.uniqueId("controller")
App.execute "register:instance", @, @_instance_id
close: (args...) ->
delete @region
delete @options
super args
App.execute "unregister:instance", @, @_instance_id
show: (view) ->
@listenTo view, "close", @close
@region.show view |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.