entities listlengths 1 8.61k | max_stars_repo_path stringlengths 7 172 | max_stars_repo_name stringlengths 5 89 | max_stars_count int64 0 82k | content stringlengths 14 1.05M | id stringlengths 2 6 | new_content stringlengths 15 1.05M | modified bool 1 class | references stringlengths 29 1.05M |
|---|---|---|---|---|---|---|---|---|
[
{
"context": "NodeId = transactionId ? nodeId\n\n flowNodeKey = \"#{sendTo}:#{redisNodeId}\"\n bucket = @_getBucket()\n\n @isIntervalAvaila",
"end": 717,
"score": 0.9780714511871338,
"start": 691,
"tag": "KEY",
"value": "\"#{sendTo}:#{redisNodeId}\""
},
{
"context": "}, call... | src/ping-job-processor.coffee | octoblu/nanocyte-interval-service | 0 | _ = require 'lodash'
async = require 'async'
debug = require('debug')('nanocyte-interval-service:ping-job-processor')
MeshbluHttp = require 'meshblu-http'
{Stats} = require 'fast-stats'
class PingJobProcessor
constructor: (options, dependencies={}) ->
{
@meshbluConfig
@client
@kue
@pingInterval
@queue
@registerJobProcessor
} = options
@MeshbluHttp = dependencies.MeshbluHttp ? MeshbluHttp
processJob: (job, ignore, callback) =>
debug 'processing ping job', job.id, 'data', JSON.stringify job.data
{sendTo, nodeId, transactionId} = job.data
redisNodeId = transactionId ? nodeId
flowNodeKey = "#{sendTo}:#{redisNodeId}"
bucket = @_getBucket()
@isIntervalAvailable {sendTo, nodeId, transactionId}, (error, intervalAvailable) =>
return callback error if error?
return callback() unless intervalAvailable
@isSystemStable (error, systemStable) =>
return callback error if error?
debug 'isSystemStable?', systemStable
@clearIfUnstable systemStable, (error) =>
return callback error if error?
@client.hget "ping:count:total", flowNodeKey, (error, count) =>
return callback error if error?
debug 'ping:count:total', flowNodeKey, count
count ?= 0
if systemStable && parseInt(count || 0) >= 5
return @_disableJobs({pingJobId: job.id, sendTo, nodeId, transactionId}, callback)
keys = [
"interval/uuid/#{sendTo}/#{redisNodeId}"
"interval/token/#{sendTo}/#{redisNodeId}"
]
@client.mget keys, (error, result) =>
return callback error if error?
[uuid, token] = result
config = _.defaults {uuid, token}, @meshbluConfig
meshbluHttp = new @MeshbluHttp config
message =
devices: [sendTo]
topic: 'ping'
payload:
from: nodeId
nodeId: nodeId
transactionId: transactionId
bucket: @_getBucket()
timestamp: _.now()
tasks = [
async.apply @client.hincrby, "ping:count:#{bucket}", 'total:ping', 1
async.apply meshbluHttp.message, message
async.apply @registerJobProcessor.createPingJob, job.data
]
if systemStable
tasks.push async.apply @client.hincrby, 'ping:count:total', flowNodeKey, 1
async.series tasks, callback
_disableJobs: ({pingJobId, sendTo, nodeId, transactionId}, callback) =>
redisNodeId = transactionId ? nodeId
@client.smembers "interval/job/#{sendTo}/#{redisNodeId}", (err, jobIds) =>
jobIds ?= []
async.eachSeries jobIds, async.apply(@_disableJob, {sendTo, nodeId, transactionId}), (error) =>
return callback error if error?
@_removeJob pingJobId, callback
_disableJob: ({sendTo, nodeId, transactionId}, jobId, callback) =>
redisNodeId = transactionId ? nodeId
@client.hset 'ping:disabled', "#{sendTo}:#{redisNodeId}", Date.now(), callback
_removeJob: (jobId, callback) =>
return callback() unless jobId?
@kue.Job.get jobId, (error, job) =>
job.remove() unless error?
callback()
isSystemStable: (callback) =>
bucket1 = @_getBucket 2
bucket2 = @_getBucket 3
bucket3 = @_getBucket 4
bucket4 = @_getBucket 5
bucket5 = @_getBucket 6
tasks = [
async.apply @client.hmget, "ping:count:#{bucket1}", 'total:ping', 'total:pong'
async.apply @client.hmget, "ping:count:#{bucket2}", 'total:ping', 'total:pong'
async.apply @client.hmget, "ping:count:#{bucket3}", 'total:ping', 'total:pong'
async.apply @client.hmget, "ping:count:#{bucket4}", 'total:ping', 'total:pong'
async.apply @client.hmget, "ping:count:#{bucket5}", 'total:ping', 'total:pong'
]
async.series tasks, (error, results) =>
return callback error if error?
stats = new Stats()
undefinedPongs = _.some results, ([ping,pong]) => _.isUndefined(pong) || _.isNull(pong)
return callback null, false if undefinedPongs
zeroPongs = _.some results, ([ping,pong]) => parseInt(pong) == 0
return callback null, false if zeroPongs
_.each results, ([ping,pong]) =>
avg = parseInt(pong) / parseInt(ping)
stats.push avg if pong?
dev = stats.σ()
callback null, dev == 0 || dev.toFixed(2) <= 0.01
_getBucket: (modifier=0) =>
_.floor (Date.now() - (@pingInterval*modifier)) / @pingInterval
clearIfUnstable: (stable, callback) =>
return callback() if stable
@client.del 'ping:count:total', callback
isIntervalAvailable: ({sendTo,nodeId,transactionId}, callback) =>
redisNodeId = transactionId ? nodeId
@client.hexists 'ping:disabled', "#{sendTo}:#{redisNodeId}", (error, exists) =>
return callback error if error?
return callback null, false if exists == 1
@client.exists "interval/active/#{sendTo}/#{redisNodeId}", (error, exists) =>
return callback error if error?
callback null, exists == 1
module.exports = PingJobProcessor
| 151065 | _ = require 'lodash'
async = require 'async'
debug = require('debug')('nanocyte-interval-service:ping-job-processor')
MeshbluHttp = require 'meshblu-http'
{Stats} = require 'fast-stats'
class PingJobProcessor
constructor: (options, dependencies={}) ->
{
@meshbluConfig
@client
@kue
@pingInterval
@queue
@registerJobProcessor
} = options
@MeshbluHttp = dependencies.MeshbluHttp ? MeshbluHttp
processJob: (job, ignore, callback) =>
debug 'processing ping job', job.id, 'data', JSON.stringify job.data
{sendTo, nodeId, transactionId} = job.data
redisNodeId = transactionId ? nodeId
flowNodeKey = <KEY>
bucket = @_getBucket()
@isIntervalAvailable {sendTo, nodeId, transactionId}, (error, intervalAvailable) =>
return callback error if error?
return callback() unless intervalAvailable
@isSystemStable (error, systemStable) =>
return callback error if error?
debug 'isSystemStable?', systemStable
@clearIfUnstable systemStable, (error) =>
return callback error if error?
@client.hget "ping:count:total", flowNodeKey, (error, count) =>
return callback error if error?
debug 'ping:count:total', flowNodeKey, count
count ?= 0
if systemStable && parseInt(count || 0) >= 5
return @_disableJobs({pingJobId: job.id, sendTo, nodeId, transactionId}, callback)
keys = [
"<KEY>
"<KEY>
]
@client.mget keys, (error, result) =>
return callback error if error?
[uuid, token] = result
config = _.defaults {uuid, token}, @meshbluConfig
meshbluHttp = new @MeshbluHttp config
message =
devices: [sendTo]
topic: 'ping'
payload:
from: nodeId
nodeId: nodeId
transactionId: transactionId
bucket: @_getBucket()
timestamp: _.now()
tasks = [
async.apply @client.hincrby, "ping:count:#{bucket}", 'total:ping', 1
async.apply meshbluHttp.message, message
async.apply @registerJobProcessor.createPingJob, job.data
]
if systemStable
tasks.push async.apply @client.hincrby, 'ping:count:total', flowNodeKey, 1
async.series tasks, callback
_disableJobs: ({pingJobId, sendTo, nodeId, transactionId}, callback) =>
redisNodeId = transactionId ? nodeId
@client.smembers "interval/job/#{sendTo}/#{redisNodeId}", (err, jobIds) =>
jobIds ?= []
async.eachSeries jobIds, async.apply(@_disableJob, {sendTo, nodeId, transactionId}), (error) =>
return callback error if error?
@_removeJob pingJobId, callback
_disableJob: ({sendTo, nodeId, transactionId}, jobId, callback) =>
redisNodeId = transactionId ? nodeId
@client.hset 'ping:disabled', "#{sendTo}:#{redisNodeId}", Date.now(), callback
_removeJob: (jobId, callback) =>
return callback() unless jobId?
@kue.Job.get jobId, (error, job) =>
job.remove() unless error?
callback()
isSystemStable: (callback) =>
bucket1 = @_getBucket 2
bucket2 = @_getBucket 3
bucket3 = @_getBucket 4
bucket4 = @_getBucket 5
bucket5 = @_getBucket 6
tasks = [
async.apply @client.hmget, "ping:count:#{bucket1}", 'total:ping', 'total:pong'
async.apply @client.hmget, "ping:count:#{bucket2}", 'total:ping', 'total:pong'
async.apply @client.hmget, "ping:count:#{bucket3}", 'total:ping', 'total:pong'
async.apply @client.hmget, "ping:count:#{bucket4}", 'total:ping', 'total:pong'
async.apply @client.hmget, "ping:count:#{bucket5}", 'total:ping', 'total:pong'
]
async.series tasks, (error, results) =>
return callback error if error?
stats = new Stats()
undefinedPongs = _.some results, ([ping,pong]) => _.isUndefined(pong) || _.isNull(pong)
return callback null, false if undefinedPongs
zeroPongs = _.some results, ([ping,pong]) => parseInt(pong) == 0
return callback null, false if zeroPongs
_.each results, ([ping,pong]) =>
avg = parseInt(pong) / parseInt(ping)
stats.push avg if pong?
dev = stats.σ()
callback null, dev == 0 || dev.toFixed(2) <= 0.01
_getBucket: (modifier=0) =>
_.floor (Date.now() - (@pingInterval*modifier)) / @pingInterval
clearIfUnstable: (stable, callback) =>
return callback() if stable
@client.del 'ping:count:total', callback
isIntervalAvailable: ({sendTo,nodeId,transactionId}, callback) =>
redisNodeId = transactionId ? nodeId
@client.hexists 'ping:disabled', "#{sendTo}:#{redisNodeId}", (error, exists) =>
return callback error if error?
return callback null, false if exists == 1
@client.exists "interval/active/#{sendTo}/#{redisNodeId}", (error, exists) =>
return callback error if error?
callback null, exists == 1
module.exports = PingJobProcessor
| true | _ = require 'lodash'
async = require 'async'
debug = require('debug')('nanocyte-interval-service:ping-job-processor')
MeshbluHttp = require 'meshblu-http'
{Stats} = require 'fast-stats'
class PingJobProcessor
constructor: (options, dependencies={}) ->
{
@meshbluConfig
@client
@kue
@pingInterval
@queue
@registerJobProcessor
} = options
@MeshbluHttp = dependencies.MeshbluHttp ? MeshbluHttp
processJob: (job, ignore, callback) =>
debug 'processing ping job', job.id, 'data', JSON.stringify job.data
{sendTo, nodeId, transactionId} = job.data
redisNodeId = transactionId ? nodeId
flowNodeKey = PI:KEY:<KEY>END_PI
bucket = @_getBucket()
@isIntervalAvailable {sendTo, nodeId, transactionId}, (error, intervalAvailable) =>
return callback error if error?
return callback() unless intervalAvailable
@isSystemStable (error, systemStable) =>
return callback error if error?
debug 'isSystemStable?', systemStable
@clearIfUnstable systemStable, (error) =>
return callback error if error?
@client.hget "ping:count:total", flowNodeKey, (error, count) =>
return callback error if error?
debug 'ping:count:total', flowNodeKey, count
count ?= 0
if systemStable && parseInt(count || 0) >= 5
return @_disableJobs({pingJobId: job.id, sendTo, nodeId, transactionId}, callback)
keys = [
"PI:KEY:<KEY>END_PI
"PI:KEY:<KEY>END_PI
]
@client.mget keys, (error, result) =>
return callback error if error?
[uuid, token] = result
config = _.defaults {uuid, token}, @meshbluConfig
meshbluHttp = new @MeshbluHttp config
message =
devices: [sendTo]
topic: 'ping'
payload:
from: nodeId
nodeId: nodeId
transactionId: transactionId
bucket: @_getBucket()
timestamp: _.now()
tasks = [
async.apply @client.hincrby, "ping:count:#{bucket}", 'total:ping', 1
async.apply meshbluHttp.message, message
async.apply @registerJobProcessor.createPingJob, job.data
]
if systemStable
tasks.push async.apply @client.hincrby, 'ping:count:total', flowNodeKey, 1
async.series tasks, callback
_disableJobs: ({pingJobId, sendTo, nodeId, transactionId}, callback) =>
redisNodeId = transactionId ? nodeId
@client.smembers "interval/job/#{sendTo}/#{redisNodeId}", (err, jobIds) =>
jobIds ?= []
async.eachSeries jobIds, async.apply(@_disableJob, {sendTo, nodeId, transactionId}), (error) =>
return callback error if error?
@_removeJob pingJobId, callback
_disableJob: ({sendTo, nodeId, transactionId}, jobId, callback) =>
redisNodeId = transactionId ? nodeId
@client.hset 'ping:disabled', "#{sendTo}:#{redisNodeId}", Date.now(), callback
_removeJob: (jobId, callback) =>
return callback() unless jobId?
@kue.Job.get jobId, (error, job) =>
job.remove() unless error?
callback()
isSystemStable: (callback) =>
bucket1 = @_getBucket 2
bucket2 = @_getBucket 3
bucket3 = @_getBucket 4
bucket4 = @_getBucket 5
bucket5 = @_getBucket 6
tasks = [
async.apply @client.hmget, "ping:count:#{bucket1}", 'total:ping', 'total:pong'
async.apply @client.hmget, "ping:count:#{bucket2}", 'total:ping', 'total:pong'
async.apply @client.hmget, "ping:count:#{bucket3}", 'total:ping', 'total:pong'
async.apply @client.hmget, "ping:count:#{bucket4}", 'total:ping', 'total:pong'
async.apply @client.hmget, "ping:count:#{bucket5}", 'total:ping', 'total:pong'
]
async.series tasks, (error, results) =>
return callback error if error?
stats = new Stats()
undefinedPongs = _.some results, ([ping,pong]) => _.isUndefined(pong) || _.isNull(pong)
return callback null, false if undefinedPongs
zeroPongs = _.some results, ([ping,pong]) => parseInt(pong) == 0
return callback null, false if zeroPongs
_.each results, ([ping,pong]) =>
avg = parseInt(pong) / parseInt(ping)
stats.push avg if pong?
dev = stats.σ()
callback null, dev == 0 || dev.toFixed(2) <= 0.01
_getBucket: (modifier=0) =>
_.floor (Date.now() - (@pingInterval*modifier)) / @pingInterval
clearIfUnstable: (stable, callback) =>
return callback() if stable
@client.del 'ping:count:total', callback
isIntervalAvailable: ({sendTo,nodeId,transactionId}, callback) =>
redisNodeId = transactionId ? nodeId
@client.hexists 'ping:disabled', "#{sendTo}:#{redisNodeId}", (error, exists) =>
return callback error if error?
return callback null, false if exists == 1
@client.exists "interval/active/#{sendTo}/#{redisNodeId}", (error, exists) =>
return callback error if error?
callback null, exists == 1
module.exports = PingJobProcessor
|
[
{
"context": "\t\t\t\treturn Users.create\n\t\t\t\t\t\t\tusername: req.body.username\n\t\t\t\t\t\t\temail: req.body.email\n\t\t\t\t\t\t\tpassword: req",
"end": 463,
"score": 0.4882405400276184,
"start": 455,
"tag": "USERNAME",
"value": "username"
},
{
"context": "name\n\t\t\t\t\t\t\t... | src/server/routes/post/register_account.coffee | leftiness/ton-hub | 1 | seq = require "sequelize"
moment = require "moment"
xsrf = require "../../common/XsrfService.js"
messages = require "../../common/messages.js"
Users = require "../../database/Users.js"
route =
verb: "post"
path: "/register_account"
fn: [
xsrf.check
(req, res, next) ->
Users.destroy
where:
createdAt: $lt: moment().subtract(5, "days").valueOf()
active: false
.then () ->
return Users.create
username: req.body.username
email: req.body.email
password: req.body.password
displayName: req.body.displayName
.then (model) ->
# TODO Send email with activation code and username.
# TODO Remove console.log of activation code
console.log "Activation: #{model.activationCode}"
url = "/activate_account?email=#{req.body.email}"
return res.redirect url
.catch seq.ValidationError, (err) ->
paths = []
err.errors.forEach (error) ->
paths.push error.path
message = "#{messages.invalid.field}: #{paths}"
url = "/register_account?error=#{message}"
return res.redirect url
.catch (err) -> return next err
]
module.exports = route
| 87701 | seq = require "sequelize"
moment = require "moment"
xsrf = require "../../common/XsrfService.js"
messages = require "../../common/messages.js"
Users = require "../../database/Users.js"
route =
verb: "post"
path: "/register_account"
fn: [
xsrf.check
(req, res, next) ->
Users.destroy
where:
createdAt: $lt: moment().subtract(5, "days").valueOf()
active: false
.then () ->
return Users.create
username: req.body.username
email: req.body.email
password: <PASSWORD>
displayName: req.body.displayName
.then (model) ->
# TODO Send email with activation code and username.
# TODO Remove console.log of activation code
console.log "Activation: #{model.activationCode}"
url = "/activate_account?email=#{req.body.email}"
return res.redirect url
.catch seq.ValidationError, (err) ->
paths = []
err.errors.forEach (error) ->
paths.push error.path
message = "#{messages.invalid.field}: #{paths}"
url = "/register_account?error=#{message}"
return res.redirect url
.catch (err) -> return next err
]
module.exports = route
| true | seq = require "sequelize"
moment = require "moment"
xsrf = require "../../common/XsrfService.js"
messages = require "../../common/messages.js"
Users = require "../../database/Users.js"
route =
verb: "post"
path: "/register_account"
fn: [
xsrf.check
(req, res, next) ->
Users.destroy
where:
createdAt: $lt: moment().subtract(5, "days").valueOf()
active: false
.then () ->
return Users.create
username: req.body.username
email: req.body.email
password: PI:PASSWORD:<PASSWORD>END_PI
displayName: req.body.displayName
.then (model) ->
# TODO Send email with activation code and username.
# TODO Remove console.log of activation code
console.log "Activation: #{model.activationCode}"
url = "/activate_account?email=#{req.body.email}"
return res.redirect url
.catch seq.ValidationError, (err) ->
paths = []
err.errors.forEach (error) ->
paths.push error.path
message = "#{messages.invalid.field}: #{paths}"
url = "/register_account?error=#{message}"
return res.redirect url
.catch (err) -> return next err
]
module.exports = route
|
[
{
"context": " author: Ember.belongsTo(Author, { key: 'author_id' })\n )\n\n Post.adapter = Ember.FixtureAdapte",
"end": 430,
"score": 0.9028123617172241,
"start": 428,
"tag": "KEY",
"value": "id"
}
] | assets/scripts/spec/unit/incomplete_spec.coffee | rwjblue/travis-web | 0 | fullPostHash = null
Post = null
Author = null
module "Travis.Model - incomplete",
setup: ->
fullPostHash = {
id: '1',
title: 'foo',
published_at: 'today',
author_id: '1'
}
Author = Travis.Model.extend()
Post = Travis.Model.extend(
title: Ember.attr('string'),
publishedAt: Ember.attr('string', key: 'published_at'),
author: Ember.belongsTo(Author, { key: 'author_id' })
)
Post.adapter = Ember.FixtureAdapter.create()
test "record is marked as incomplete if attributes are missing when loading a record", ->
Post.load([{ id: '1', title: 'foo' }])
record = Post.find('1')
ok(record.get('incomplete'), 'record should be incomplete')
equal(record.get('title'), 'foo', 'attributes should be accessible')
test "record is marked as complete if missing attributes are loaded", ->
Post.load([{ id: '1', title: 'foo' }])
record = Post.find('1')
ok(record.get('incomplete'), 'record should be complete')
equal(record.get('title'), 'foo', 'attributes should be accessible')
record.load('1', fullPostHash)
ok(!record.get('incomplete'), 'record should be complete')
test "record is marked as incomplete if belongsTo key is missing", ->
delete(fullPostHash.author_id)
Post.load([fullPostHash])
record = Post.find('1')
ok(record.get('incomplete'), 'record should be incomplete')
test "proeperty can be loaded as null, which means that the property is still loaded", ->
fullPostHash.author_id = null
fullPostHash.title = null
Post.load([fullPostHash])
record = Post.find('1')
ok(!record.get('incomplete'), 'record should be complete')
equal(record.get('title'), null, 'title should be null')
test "when accessing missing property, record is loaded", ->
Post.FIXTURES = [fullPostHash]
Post.load([{ id: '1' }])
record = null
Ember.run -> record = Post.find('1')
ok(record.get('incomplete'), 'record should be incomplete')
publishedAt = null
Ember.run -> publishedAt = record.get('publishedAt')
ok(!publishedAt, 'publishedAt should be missing')
stop()
setTimeout( ->
start()
Ember.run -> publishedAt = record.get('publishedAt')
equal(publishedAt, 'today', 'publishedAt should be loaded')
ok(!record.get('incomplete'), 'record should be complete')
, 50)
| 14948 | fullPostHash = null
Post = null
Author = null
module "Travis.Model - incomplete",
setup: ->
fullPostHash = {
id: '1',
title: 'foo',
published_at: 'today',
author_id: '1'
}
Author = Travis.Model.extend()
Post = Travis.Model.extend(
title: Ember.attr('string'),
publishedAt: Ember.attr('string', key: 'published_at'),
author: Ember.belongsTo(Author, { key: 'author_<KEY>' })
)
Post.adapter = Ember.FixtureAdapter.create()
test "record is marked as incomplete if attributes are missing when loading a record", ->
Post.load([{ id: '1', title: 'foo' }])
record = Post.find('1')
ok(record.get('incomplete'), 'record should be incomplete')
equal(record.get('title'), 'foo', 'attributes should be accessible')
test "record is marked as complete if missing attributes are loaded", ->
Post.load([{ id: '1', title: 'foo' }])
record = Post.find('1')
ok(record.get('incomplete'), 'record should be complete')
equal(record.get('title'), 'foo', 'attributes should be accessible')
record.load('1', fullPostHash)
ok(!record.get('incomplete'), 'record should be complete')
test "record is marked as incomplete if belongsTo key is missing", ->
delete(fullPostHash.author_id)
Post.load([fullPostHash])
record = Post.find('1')
ok(record.get('incomplete'), 'record should be incomplete')
test "proeperty can be loaded as null, which means that the property is still loaded", ->
fullPostHash.author_id = null
fullPostHash.title = null
Post.load([fullPostHash])
record = Post.find('1')
ok(!record.get('incomplete'), 'record should be complete')
equal(record.get('title'), null, 'title should be null')
test "when accessing missing property, record is loaded", ->
Post.FIXTURES = [fullPostHash]
Post.load([{ id: '1' }])
record = null
Ember.run -> record = Post.find('1')
ok(record.get('incomplete'), 'record should be incomplete')
publishedAt = null
Ember.run -> publishedAt = record.get('publishedAt')
ok(!publishedAt, 'publishedAt should be missing')
stop()
setTimeout( ->
start()
Ember.run -> publishedAt = record.get('publishedAt')
equal(publishedAt, 'today', 'publishedAt should be loaded')
ok(!record.get('incomplete'), 'record should be complete')
, 50)
| true | fullPostHash = null
Post = null
Author = null
module "Travis.Model - incomplete",
setup: ->
fullPostHash = {
id: '1',
title: 'foo',
published_at: 'today',
author_id: '1'
}
Author = Travis.Model.extend()
Post = Travis.Model.extend(
title: Ember.attr('string'),
publishedAt: Ember.attr('string', key: 'published_at'),
author: Ember.belongsTo(Author, { key: 'author_PI:KEY:<KEY>END_PI' })
)
Post.adapter = Ember.FixtureAdapter.create()
test "record is marked as incomplete if attributes are missing when loading a record", ->
Post.load([{ id: '1', title: 'foo' }])
record = Post.find('1')
ok(record.get('incomplete'), 'record should be incomplete')
equal(record.get('title'), 'foo', 'attributes should be accessible')
test "record is marked as complete if missing attributes are loaded", ->
Post.load([{ id: '1', title: 'foo' }])
record = Post.find('1')
ok(record.get('incomplete'), 'record should be complete')
equal(record.get('title'), 'foo', 'attributes should be accessible')
record.load('1', fullPostHash)
ok(!record.get('incomplete'), 'record should be complete')
test "record is marked as incomplete if belongsTo key is missing", ->
delete(fullPostHash.author_id)
Post.load([fullPostHash])
record = Post.find('1')
ok(record.get('incomplete'), 'record should be incomplete')
test "proeperty can be loaded as null, which means that the property is still loaded", ->
fullPostHash.author_id = null
fullPostHash.title = null
Post.load([fullPostHash])
record = Post.find('1')
ok(!record.get('incomplete'), 'record should be complete')
equal(record.get('title'), null, 'title should be null')
test "when accessing missing property, record is loaded", ->
Post.FIXTURES = [fullPostHash]
Post.load([{ id: '1' }])
record = null
Ember.run -> record = Post.find('1')
ok(record.get('incomplete'), 'record should be incomplete')
publishedAt = null
Ember.run -> publishedAt = record.get('publishedAt')
ok(!publishedAt, 'publishedAt should be missing')
stop()
setTimeout( ->
start()
Ember.run -> publishedAt = record.get('publishedAt')
equal(publishedAt, 'today', 'publishedAt should be loaded')
ok(!record.get('incomplete'), 'record should be complete')
, 50)
|
[
{
"context": "ists.' }\n user.resetPasswordToken = token\n user.resetPasswordExpires = Date.",
"end": 1154,
"score": 0.8519141674041748,
"start": 1149,
"tag": "PASSWORD",
"value": "token"
},
{
"context": "User.findOne({\n resetPasswordToke... | src/routes/authentication.coffee | ureport-web/ureport-s | 3 | express = require('express')
router = express.Router()
User = require('../models/user')
passport = require('passport')
async = require('async');
crypto = require('crypto');
router.post '/login', (req, res, next) ->
passport.authenticate('local', (err, user, info) ->
if (info)
return next(info)
if (err)
return next(err)
if (!user)
return res.redirect('/login')
req.login user, (err) ->
if (err)
return next(err)
return res.json({session: req.session})
)(req, res, next);
router.post '/logout', (req, res, next) ->
req.logout()
res.json { msg: 'You are log out'}
router.post '/forgot', (req, res, next) ->
async.waterfall [
(done) ->
crypto.randomBytes 20, (err, buf) ->
token = buf.toString('hex')
done(err, token)
(token, done) ->
User.findOne { email: req.body.email }, (err, user) ->
if !user
res.json { "msg": 'No account with that email address exists.' }
user.resetPasswordToken = token
user.resetPasswordExpires = Date.now() + 3600000
# 1 hour
user.save (err) ->
done(err, token, user)
(token, user, done) ->
done(null, {
token : token
})
], (err, data) ->
if err
return next(err)
res.json { success: true, token: data.token }
router.get '/reset/:token', (req, res, next) ->
User.findOne({
resetPasswordToken: req.params.token,
resetPasswordExpires: $gt: Date.now()
})
.exec((err, user) ->
return next(err) if err
if !user
res.json { "msg": 'Password reset token is invalid or has expired.'}
res.json { success: true, user: user }
);
router.post '/reset/:token', (req, res, next) ->
async.waterfall [
(done) ->
User.findOne({
resetPasswordToken: req.params.token,
resetPasswordExpires: $gt: Date.now()
})
.exec((err, user) ->
return next(err) if err
if !user
res.json { success: false, msg: 'Password reset token is invalid or has expired.'}
user.password = req.body.password;
user.resetPasswordToken = undefined;
user.resetPasswordExpires = undefined;
user.save (err) ->
req.logIn user, (err) ->
done(err, user)
);
(user, done) ->
done(null, {
user : user
})
], (err, data) ->
if err
return next(err)
res.json { success: true, msg: 'Reset now at ' + data.link }
module.exports = router | 96962 | express = require('express')
router = express.Router()
User = require('../models/user')
passport = require('passport')
async = require('async');
crypto = require('crypto');
router.post '/login', (req, res, next) ->
passport.authenticate('local', (err, user, info) ->
if (info)
return next(info)
if (err)
return next(err)
if (!user)
return res.redirect('/login')
req.login user, (err) ->
if (err)
return next(err)
return res.json({session: req.session})
)(req, res, next);
router.post '/logout', (req, res, next) ->
req.logout()
res.json { msg: 'You are log out'}
router.post '/forgot', (req, res, next) ->
async.waterfall [
(done) ->
crypto.randomBytes 20, (err, buf) ->
token = buf.toString('hex')
done(err, token)
(token, done) ->
User.findOne { email: req.body.email }, (err, user) ->
if !user
res.json { "msg": 'No account with that email address exists.' }
user.resetPasswordToken = <PASSWORD>
user.resetPasswordExpires = Date.now() + 3600000
# 1 hour
user.save (err) ->
done(err, token, user)
(token, user, done) ->
done(null, {
token : token
})
], (err, data) ->
if err
return next(err)
res.json { success: true, token: data.token }
router.get '/reset/:token', (req, res, next) ->
User.findOne({
resetPasswordToken: req.params.token,
resetPasswordExpires: $gt: Date.now()
})
.exec((err, user) ->
return next(err) if err
if !user
res.json { "msg": 'Password reset token is invalid or has expired.'}
res.json { success: true, user: user }
);
router.post '/reset/:token', (req, res, next) ->
async.waterfall [
(done) ->
User.findOne({
resetPasswordToken: <PASSWORD>,
resetPasswordExpires: $gt: Date.now()
})
.exec((err, user) ->
return next(err) if err
if !user
res.json { success: false, msg: 'Password reset token is invalid or has expired.'}
user.password = <PASSWORD>;
user.resetPasswordToken = <PASSWORD>;
user.resetPasswordExpires = <PASSWORD>;
user.save (err) ->
req.logIn user, (err) ->
done(err, user)
);
(user, done) ->
done(null, {
user : user
})
], (err, data) ->
if err
return next(err)
res.json { success: true, msg: 'Reset now at ' + data.link }
module.exports = router | true | express = require('express')
router = express.Router()
User = require('../models/user')
passport = require('passport')
async = require('async');
crypto = require('crypto');
router.post '/login', (req, res, next) ->
passport.authenticate('local', (err, user, info) ->
if (info)
return next(info)
if (err)
return next(err)
if (!user)
return res.redirect('/login')
req.login user, (err) ->
if (err)
return next(err)
return res.json({session: req.session})
)(req, res, next);
router.post '/logout', (req, res, next) ->
req.logout()
res.json { msg: 'You are log out'}
router.post '/forgot', (req, res, next) ->
async.waterfall [
(done) ->
crypto.randomBytes 20, (err, buf) ->
token = buf.toString('hex')
done(err, token)
(token, done) ->
User.findOne { email: req.body.email }, (err, user) ->
if !user
res.json { "msg": 'No account with that email address exists.' }
user.resetPasswordToken = PI:PASSWORD:<PASSWORD>END_PI
user.resetPasswordExpires = Date.now() + 3600000
# 1 hour
user.save (err) ->
done(err, token, user)
(token, user, done) ->
done(null, {
token : token
})
], (err, data) ->
if err
return next(err)
res.json { success: true, token: data.token }
router.get '/reset/:token', (req, res, next) ->
User.findOne({
resetPasswordToken: req.params.token,
resetPasswordExpires: $gt: Date.now()
})
.exec((err, user) ->
return next(err) if err
if !user
res.json { "msg": 'Password reset token is invalid or has expired.'}
res.json { success: true, user: user }
);
router.post '/reset/:token', (req, res, next) ->
async.waterfall [
(done) ->
User.findOne({
resetPasswordToken: PI:PASSWORD:<PASSWORD>END_PI,
resetPasswordExpires: $gt: Date.now()
})
.exec((err, user) ->
return next(err) if err
if !user
res.json { success: false, msg: 'Password reset token is invalid or has expired.'}
user.password = PI:PASSWORD:<PASSWORD>END_PI;
user.resetPasswordToken = PI:PASSWORD:<PASSWORD>END_PI;
user.resetPasswordExpires = PI:PASSWORD:<PASSWORD>END_PI;
user.save (err) ->
req.logIn user, (err) ->
done(err, user)
);
(user, done) ->
done(null, {
user : user
})
], (err, data) ->
if err
return next(err)
res.json { success: true, msg: 'Reset now at ' + data.link }
module.exports = router |
[
{
"context": "/icons/jobtong@2x.png'\n\n @_fields.push\n key: 'webhookUrl'\n type: 'text'\n readOnly: true\n descript",
"end": 1450,
"score": 0.9533259868621826,
"start": 1440,
"tag": "KEY",
"value": "webhookUrl"
}
] | src/services/jobtong.coffee | jianliaoim/talk-services | 40 | _ = require 'lodash'
util = require '../util'
_receiveWebhook = ({body}) ->
payload = body or {}
texts = []
texts.push "性别:#{payload.sex}" if payload.sex
texts.push "年龄:#{payload.age}" if payload.age
texts.push "学历:#{payload.degree}" if payload.degree
texts.push "经验年限:#{payload.experience}" if payload.experience
texts.push "当前公司:#{payload.company}" if payload.company
texts.push "当前职位:#{payload.job_name}" if payload.job_name
texts.push "简历投递日期:#{payload.apply_at}" if payload.apply_at
message =
attachments: [
category: 'quote'
data:
title: payload.title
text: texts.join '\n'
redirectUrl: payload.url
imageUrl: payload.face_url
]
message
module.exports = ->
@title = '周伯通招聘'
@template = 'webhook'
@summary = util.i18n
zh: '周伯通,招人喜欢'
en: 'Jobtong makes job search more enjoyable.'
@description = util.i18n
zh: '周伯通招聘( http://www.jobtong.com ),2014年重装上阵,打造最懂互联网的招聘社区。是国内最知名的社会化招聘平台之一,倡导营销化、社会化的招聘服务,引领网络招聘行业新变革!'
en: "Jobtong (http://www.jobtong.com), starting in 2014, is trying to build a recruitment community that is most familiar with Internet. It's one of the most famous social recruitment platform in China. We are advocacing marketing and socialization recruitment services and leading the new revolution of online recruitment industry!"
@iconUrl = util.static 'images/icons/jobtong@2x.png'
@_fields.push
key: 'webhookUrl'
type: 'text'
readOnly: true
description: util.i18n
zh: 'Webhook URL'
en: 'Webhook URL'
@registerEvent 'service.webhook', _receiveWebhook
| 17120 | _ = require 'lodash'
util = require '../util'
_receiveWebhook = ({body}) ->
payload = body or {}
texts = []
texts.push "性别:#{payload.sex}" if payload.sex
texts.push "年龄:#{payload.age}" if payload.age
texts.push "学历:#{payload.degree}" if payload.degree
texts.push "经验年限:#{payload.experience}" if payload.experience
texts.push "当前公司:#{payload.company}" if payload.company
texts.push "当前职位:#{payload.job_name}" if payload.job_name
texts.push "简历投递日期:#{payload.apply_at}" if payload.apply_at
message =
attachments: [
category: 'quote'
data:
title: payload.title
text: texts.join '\n'
redirectUrl: payload.url
imageUrl: payload.face_url
]
message
module.exports = ->
@title = '周伯通招聘'
@template = 'webhook'
@summary = util.i18n
zh: '周伯通,招人喜欢'
en: 'Jobtong makes job search more enjoyable.'
@description = util.i18n
zh: '周伯通招聘( http://www.jobtong.com ),2014年重装上阵,打造最懂互联网的招聘社区。是国内最知名的社会化招聘平台之一,倡导营销化、社会化的招聘服务,引领网络招聘行业新变革!'
en: "Jobtong (http://www.jobtong.com), starting in 2014, is trying to build a recruitment community that is most familiar with Internet. It's one of the most famous social recruitment platform in China. We are advocacing marketing and socialization recruitment services and leading the new revolution of online recruitment industry!"
@iconUrl = util.static 'images/icons/jobtong@2x.png'
@_fields.push
key: '<KEY>'
type: 'text'
readOnly: true
description: util.i18n
zh: 'Webhook URL'
en: 'Webhook URL'
@registerEvent 'service.webhook', _receiveWebhook
| true | _ = require 'lodash'
util = require '../util'
_receiveWebhook = ({body}) ->
payload = body or {}
texts = []
texts.push "性别:#{payload.sex}" if payload.sex
texts.push "年龄:#{payload.age}" if payload.age
texts.push "学历:#{payload.degree}" if payload.degree
texts.push "经验年限:#{payload.experience}" if payload.experience
texts.push "当前公司:#{payload.company}" if payload.company
texts.push "当前职位:#{payload.job_name}" if payload.job_name
texts.push "简历投递日期:#{payload.apply_at}" if payload.apply_at
message =
attachments: [
category: 'quote'
data:
title: payload.title
text: texts.join '\n'
redirectUrl: payload.url
imageUrl: payload.face_url
]
message
module.exports = ->
@title = '周伯通招聘'
@template = 'webhook'
@summary = util.i18n
zh: '周伯通,招人喜欢'
en: 'Jobtong makes job search more enjoyable.'
@description = util.i18n
zh: '周伯通招聘( http://www.jobtong.com ),2014年重装上阵,打造最懂互联网的招聘社区。是国内最知名的社会化招聘平台之一,倡导营销化、社会化的招聘服务,引领网络招聘行业新变革!'
en: "Jobtong (http://www.jobtong.com), starting in 2014, is trying to build a recruitment community that is most familiar with Internet. It's one of the most famous social recruitment platform in China. We are advocacing marketing and socialization recruitment services and leading the new revolution of online recruitment industry!"
@iconUrl = util.static 'images/icons/jobtong@2x.png'
@_fields.push
key: 'PI:KEY:<KEY>END_PI'
type: 'text'
readOnly: true
description: util.i18n
zh: 'Webhook URL'
en: 'Webhook URL'
@registerEvent 'service.webhook', _receiveWebhook
|
[
{
"context": "equirements: ['lamp']\n },\n {\n name: 'bed',\n image: 'bed',\n price: 400,\n ",
"end": 2469,
"score": 0.9547386765480042,
"start": 2466,
"tag": "NAME",
"value": "bed"
},
{
"context": "equirements: ['lamp']\n },\n {\n name: 'f... | src/main.coffee | esenti/ld37-one-room | 0 | c = document.getElementById('draw')
ctx = c.getContext('2d')
delta = 0
now = 0
before = Date.now()
elapsed = 0
loading = 0
DEBUG = false
# c.width = window.innerWidth
# c.height = window.innerHeight
c.width = 800
c.height = 600
keysDown = {}
keysPressed = {}
window.addEventListener("keydown", (e) ->
keysDown[e.keyCode] = true
keysPressed[e.keyCode] = true
, false)
window.addEventListener("keyup", (e) ->
delete keysDown[e.keyCode]
, false)
setDelta = ->
now = Date.now()
delta = (now - before) / 1000
before = now
if not DEBUG
console.log = () ->
null
player =
x: 0
y: 0
speed: 4
color: '#ffffff'
rotation: 0
money: 750
energy: 100
hunger: 0
sanity: 100
state:
name: 'standing'
room =
width: 6
height: 8
images = {}
items = [
{
name: 'lamp',
image: 'lamp',
price: 20,
width: 1,
height: 1,
actions: [{
name: 'look at light'
f: ->
if Math.random() < 0.5
player.sanity = clamp(player.sanity + (Math.random() * 2), 0, 100)
makePopup('+sanity', 'green')
else
player.sanity = clamp(player.sanity - (Math.random() * 2), 0, 100)
makePopup('-sanity', 'red')
}]
requirements: []
},
{
name: 'computer',
image: 'computer',
price: 500,
width: 1,
height: 1,
actions: [{
name: 'work'
f: ->
if player.energy <= 20
makePopup('you\'re too tired to work', 'white')
return
player.money += 50
makePopup('+$50', 'green')
player.energy -= 20
makePopup('-energy', 'red', 0.5)
player.sanity -= (Math.random() * 14 + 6)
makePopup('-sanity', 'red', 1.0)
}, {
name: 'play'
f: ->
if player.energy <= 10
makePopup('you\'re too tired to play', 'white')
return
player.energy -= 10
makePopup('-energy', 'red')
player.sanity = clamp(player.sanity + Math.random() * 5, 0, 100)
makePopup('+sanity', 'green', 0.5)
},
]
requirements: ['lamp']
},
{
name: 'bed',
image: 'bed',
price: 400,
width: 1,
height: 2,
actions: [{
name: 'sleep',
f: ->
fadeOut(->
fadeIn()
)
player.energy = clamp(player.energy + Math.random() * 50 + 30, 0, 100)
player.hunger = clamp(player.hunger + Math.random() * 30, 0, 100)
makePopup('+energy', 'green', 2.0)
makePopup('+hunger', 'red', 2.5)
},
]
requirements: ['lamp']
},
{
name: 'fridge',
image: 'fridge',
price: 800,
width: 1,
height: 1,
actions: [{
name: 'eat',
f: ->
if player.money <= 20
makePopup('you can\'t afford that', 'white')
return
player.hunger = clamp(player.hunger - (Math.random() * 20 + 10), 0, 100)
makePopup('-hunger', 'green')
},
]
requirements: ['lamp']
},
]
itemsAvailable = []
itemPlaced = null
itemsBought = []
activeItem = null
buyMenu = false
ogre = false
fade = {
state: 'none'
}
clamp = (v, min, max) ->
if v < min then min else if v > max then max else v
collides = (a, b, as, bs) ->
a.x + as > b.x and a.x < b.x + bs and a.y + as > b.y and a.y < b.y + bs
loadImage = (name) ->
img = new Image()
console.log 'loading'
loading += 1
img.onload = ->
console.log 'loaded'
images[name] = img
loading -= 1
img.src = 'img/' + name + '.png'
TO_RADIANS = Math.PI/180
drawRotatedImage = (image, x, y, angle) ->
ctx.save()
ctx.translate(x, y)
ctx.rotate(angle * TO_RADIANS)
ctx.drawImage(image, -(image.width/2), -(image.height/2))
ctx.restore()
fadeOut = (cb) ->
if fade.state == 'none'
fade.state = 'out'
fade.step = 0.0
fade.cb = cb
fadeIn = (cb) ->
if fade.state == 'none'
fade.state = 'in'
fade.step = 1.0
fade.cb = cb
tick = ->
setDelta()
elapsed += delta
update(delta)
draw(delta)
keysPressed = {}
if not ogre
window.requestAnimationFrame(tick)
tileFree = (x, y) ->
if x < 0 or x >= room.width or y < 0 or y >= room.height
return false
if x == player.x and y == player.y
return false
for item in itemsBought
if x >= item.x and x < item.x + item.item.width and
y >= item.y and y < item.y + item.item.height
return false
return true
itemAvailable = (item) ->
if item.price > player.money
return false
requiredMissing = item.requirements.length
for r in item.requirements
for item in itemsBought
if item.item.name == r
requiredMissing -= 1
break
return requiredMissing == 0
popups = []
makePopup = (text, color, delay=0) ->
if color == 'green'
color = '#408358'
else if color == 'red'
color = '#8c291b'
popups.push
text: text
x: player.x * 32 + 16,
y: player.y * 32,
color: color,
delay: delay,
speed: 20,
duration: 1.5
getItemAt = (x, y) ->
for item in itemsBought
if x >= item.x and x < item.x + item.item.width and
y >= item.y and y < item.y + item.item.height
return item
update = (delta) ->
console.log keysDown
if fade.state == 'none'
if buyMenu
if keysPressed[66]
buyMenu = false
else
itemsAvailable = (item for item in items when itemAvailable(item))
for i in [0..3]
if keysPressed[49 + i] and itemsAvailable[i]
itemPlaced =
x: 0,
y: 0,
item: itemsAvailable[i],
if itemPlaced
if keysPressed[68]
itemPlaced.x += 1
if keysPressed[65]
itemPlaced.x -= 1
if keysPressed[83]
itemPlaced.y += 1
if keysPressed[87]
itemPlaced.y -= 1
if keysPressed[32]
placeValid = true
for x in [0..itemPlaced.item.width - 1]
for y in [0..itemPlaced.item.height - 1]
if not tileFree(itemPlaced.x + x, itemPlaced.y + y)
placeValid = false
if placeValid
itemsBought.push(itemPlaced)
player.money -= itemPlaced.item.price
makePopup('-$' + itemPlaced.item.price, 'red')
itemPlaced = null
buyMenu = false
else
if keysPressed[66]
buyMenu = true
if player.state.name == 'standing'
if keysDown[68]
player.rotation = 0
if tileFree(player.x + 1, player.y)
player.state =
name: 'moving'
direction: 'right'
if keysDown[65]
player.rotation = 180
if tileFree(player.x - 1, player.y)
player.state =
name: 'moving'
direction: 'left'
if keysDown[83]
player.rotation = 90
if tileFree(player.x, player.y + 1)
player.state =
name: 'moving'
direction: 'down'
if keysDown[87]
player.rotation = -90
if tileFree(player.x, player.y - 1)
player.state =
name: 'moving'
direction: 'up'
if keysPressed[72] and activeItem and activeItem.item.actions[0]
activeItem.item.actions[0].f()
if keysPressed[74] and activeItem and activeItem.item.actions[1]
activeItem.item.actions[1].f()
if keysPressed[75] and activeItem and activeItem.item.actions[2]
activeItem.item.actions[2].f()
if keysPressed[76] and activeItem and activeItem.item.actions[3]
activeItem.item.actions[3].f()
if player.state.name == 'moving'
if player.state.direction == 'right'
if !player.state.target
player.state.target = player.x + 1
player.x += delta * player.speed
if player.x >= player.state.target
player.x = Math.round(player.state.target)
player.state =
name: 'standing'
if player.state.direction == 'left'
if !player.state.target
player.state.target = player.x - 1
player.x -= delta * player.speed
if player.x <= player.state.target
player.x = Math.round(player.state.target)
player.state =
name: 'standing'
if player.state.direction == 'up'
if !player.state.target
player.state.target = player.y - 1
player.y -= delta * player.speed
if player.y <= player.state.target
player.y = Math.round(player.state.target)
player.state =
name: 'standing'
if player.state.direction == 'down'
if !player.state.target
player.state.target = player.y + 1
player.y += delta * player.speed
if player.y >= player.state.target
player.y = Math.round(player.state.target)
player.state =
name: 'standing'
for popup in popups
popup.delay -= delta
if popup.delay <= 0
popup.y -= delta * popup.speed
popup.duration -= delta
if popup.duration <= 0
popup.delete = true
for popup, i in popups
if popup.delete
popups.splice(i, 1)
break
if Math.random() < 0.001
player.sanity -= Math.random() * 6
if player.hunger >= 80
player.sanity -= Math.random() * 8
if player.energy <= 10
player.sanity -= Math.random() * 8
makePopup('-sanity', 'red')
if Math.random() < 0.001
player.energy = clamp(player.energy - Math.random() * 6, 0, 100)
if player.hunger >= 80
player.energy = clamp(player.energy - Math.random() * 8, 0, 100)
makePopup('-energy', 'red')
if Math.random() < 0.001
player.hunger = clamp(player.hunger + Math.random() * 6, 0, 100)
makePopup('+hunger', 'red')
frontTile =
x: player.x
y: player.y
if player.rotation == 0
frontTile.x += 1
else if player.rotation == 90
frontTile.y += 1
else if player.rotation == 180
frontTile.x -= 1
else if player.rotation == -90
frontTile.y -= 1
activeItem = getItemAt(frontTile.x, frontTile.y)
if player.sanity <= 0
fadeOut(->
ogre = true)
if fade.state == 'out'
fade.step += delta
if fade.step >= 1.0
fade.state = 'none'
if fade.cb
fade.cb()
if fade.state == 'in'
fade.step -= delta
if fade.step <= 0.0
fade.state = 'none'
if fade.cb
fade.cb()
draw = (delta) ->
ctx.clearRect(0, 0, c.width, c.height)
ctx.save()
ctx.translate(32 * 10, 32 * 2)
for x in [0..room.width - 1]
drawRotatedImage(images['wall'], x * 32 + 16, -32 + 16, 0)
drawRotatedImage(images['wall_corner'], -32 + 16, -32 + 16, 0)
for y in [0..room.height - 1]
drawRotatedImage(images['wall'], room.width * 32 + 16, y * 32 + 16, 90)
drawRotatedImage(images['wall_corner'], room.width * 32 + 16, -32 + 16, 90)
for x in [0..room.width - 1]
drawRotatedImage(images['wall'], x * 32 + 16, room.height * 32 + 16, 180)
drawRotatedImage(images['wall_corner'], room.width * 32 + 16, room.height * 32 + 16, 180)
for y in [0..room.height - 1]
drawRotatedImage(images['wall'], -32 + 16, y * 32 + 16, 270)
drawRotatedImage(images['wall_corner'], -32 + 16, room.height * 32 + 16, 270)
for x in [0..room.width - 1]
for y in [0..room.height - 1]
ctx.drawImage(images['floor'], x * 32, y * 32)
drawRotatedImage(images['player'], player.x * 32 + 16, player.y * 32 + 16, player.rotation)
for item in itemsBought
ctx.drawImage(images[item.item.image], item.x * 32, item.y * 32)
if DEBUG
frontTile =
x: player.x
y: player.y
if player.rotation == 0
frontTile.x += 1
else if player.rotation == 90
frontTile.y += 1
else if player.rotation == 180
frontTile.x -= 1
else if player.rotation == -90
frontTile.y -= 1
ctx.fillStyle = 'rgba(0, 200, 0, 0.5)'
ctx.fillRect(frontTile.x * 32, frontTile.y * 32, 32, 32)
if itemPlaced
tintedName = itemPlaced.item.image + '-tinted'
if not images[tintedName]
fg = images[itemPlaced.item.image]
buffer = document.createElement('canvas')
buffer.width = fg.width
buffer.height = fg.height
bx = buffer.getContext('2d')
bx.fillStyle = '#FF0000'
bx.fillRect(0, 0, buffer.width, buffer.height)
bx.globalCompositeOperation = "destination-atop"
bx.drawImage(fg, 0, 0)
images[tintedName] = buffer
placeValid = true
for x in [0..itemPlaced.item.width - 1]
for y in [0..itemPlaced.item.height - 1]
if not tileFree(itemPlaced.x + x, itemPlaced.y + y)
placeValid = false
img = if placeValid then itemPlaced.item.image else tintedName
ctx.drawImage(images[img], itemPlaced.x * 32, itemPlaced.y * 32)
for popup in popups
if popup.delay <= 0
ctx.font = '14px Visitor'
ctx.textAlign = 'center'
ctx.fillStyle = popup.color
ctx.fillText(popup.text, popup.x, popup.y)
ctx.restore()
if buyMenu
ctx.fillStyle = '#222222'
ctx.fillRect(0, 400, 800, 600)
ctx.textAlign = 'center'
ctx.fillStyle = '#ffffff'
for item, i in itemsAvailable
ctx.fillText('[' + (i + 1) + ']', 160 * (i + 1), 430)
ctx.drawImage(images[item.image], 160 * (i + 1) - 16, 460)
ctx.fillText(item.name, 160 * (i + 1), 550)
ctx.fillText('$' + item.price, 160 * (i + 1), 566)
ctx.font = '14px Visitor'
ctx.textAlign = 'right'
ctx.fillStyle = '#ffffff'
ctx.fillText('energy', 50, 20)
ctx.fillText('hunger', 50, 32)
ctx.fillText('sanity', 50, 44)
ctx.fillText('$' + player.money, 700, 44)
ctx.fillRect(54, 14, player.energy, 6)
ctx.fillRect(54, 26, player.hunger, 6)
ctx.fillRect(54, 38, player.sanity, 6)
if DEBUG
ctx.fillText(Math.round(player.energy), 180, 20)
ctx.fillText(Math.round(player.hunger), 180, 32)
ctx.fillText(Math.round(player.sanity), 180, 44)
ctx.textAlign = 'left'
if itemPlaced
ctx.fillText('[space] place', 340, 344)
else if buyMenu
ctx.fillText('[B] close', 340, 344)
else
ctx.fillText('[B] buy', 340, 344)
buttons = ['H', 'J', 'K', 'L']
if activeItem
for action, i in activeItem.item.actions
ctx.fillText('[' + buttons[i] + '] ' + action.name, 340, 360 + i * 16)
ctx.textAlign = 'right'
if fade.state != 'none'
ctx.fillStyle = 'rgba(0, 0, 0, ' + fade.step + ')'
ctx.fillRect(0, 0, 800, 600)
if ogre
ctx.fillStyle = '#000000'
ctx.fillRect(0, 0, 800, 600)
ctx.font = '180px Visitor'
ctx.textAlign = 'center'
ctx.textBaseline = 'middle'
ctx.fillStyle = '#ffffff'
ctx.fillText('THE END', 400, 300)
do ->
w = window
for vendor in ['ms', 'moz', 'webkit', 'o']
break if w.requestAnimationFrame
w.requestAnimationFrame = w["#{vendor}RequestAnimationFrame"]
if not w.requestAnimationFrame
targetTime = 0
w.requestAnimationFrame = (callback) ->
targetTime = Math.max targetTime + 16, currentTime = +new Date
w.setTimeout (-> callback +new Date), targetTime - currentTime
for img in ['floor', 'lamp', 'computer', 'bed', 'fridge', 'player', 'wall', 'wall_corner']
loadImage(img)
load = ->
if loading
console.log(loading)
window.requestAnimationFrame(load)
else
console.log('All loaded!')
window.requestAnimationFrame(tick)
load()
| 45960 | c = document.getElementById('draw')
ctx = c.getContext('2d')
delta = 0
now = 0
before = Date.now()
elapsed = 0
loading = 0
DEBUG = false
# c.width = window.innerWidth
# c.height = window.innerHeight
c.width = 800
c.height = 600
keysDown = {}
keysPressed = {}
window.addEventListener("keydown", (e) ->
keysDown[e.keyCode] = true
keysPressed[e.keyCode] = true
, false)
window.addEventListener("keyup", (e) ->
delete keysDown[e.keyCode]
, false)
setDelta = ->
now = Date.now()
delta = (now - before) / 1000
before = now
if not DEBUG
console.log = () ->
null
player =
x: 0
y: 0
speed: 4
color: '#ffffff'
rotation: 0
money: 750
energy: 100
hunger: 0
sanity: 100
state:
name: 'standing'
room =
width: 6
height: 8
images = {}
items = [
{
name: 'lamp',
image: 'lamp',
price: 20,
width: 1,
height: 1,
actions: [{
name: 'look at light'
f: ->
if Math.random() < 0.5
player.sanity = clamp(player.sanity + (Math.random() * 2), 0, 100)
makePopup('+sanity', 'green')
else
player.sanity = clamp(player.sanity - (Math.random() * 2), 0, 100)
makePopup('-sanity', 'red')
}]
requirements: []
},
{
name: 'computer',
image: 'computer',
price: 500,
width: 1,
height: 1,
actions: [{
name: 'work'
f: ->
if player.energy <= 20
makePopup('you\'re too tired to work', 'white')
return
player.money += 50
makePopup('+$50', 'green')
player.energy -= 20
makePopup('-energy', 'red', 0.5)
player.sanity -= (Math.random() * 14 + 6)
makePopup('-sanity', 'red', 1.0)
}, {
name: 'play'
f: ->
if player.energy <= 10
makePopup('you\'re too tired to play', 'white')
return
player.energy -= 10
makePopup('-energy', 'red')
player.sanity = clamp(player.sanity + Math.random() * 5, 0, 100)
makePopup('+sanity', 'green', 0.5)
},
]
requirements: ['lamp']
},
{
name: '<NAME>',
image: 'bed',
price: 400,
width: 1,
height: 2,
actions: [{
name: 'sleep',
f: ->
fadeOut(->
fadeIn()
)
player.energy = clamp(player.energy + Math.random() * 50 + 30, 0, 100)
player.hunger = clamp(player.hunger + Math.random() * 30, 0, 100)
makePopup('+energy', 'green', 2.0)
makePopup('+hunger', 'red', 2.5)
},
]
requirements: ['lamp']
},
{
name: '<NAME>',
image: 'fridge',
price: 800,
width: 1,
height: 1,
actions: [{
name: 'eat',
f: ->
if player.money <= 20
makePopup('you can\'t afford that', 'white')
return
player.hunger = clamp(player.hunger - (Math.random() * 20 + 10), 0, 100)
makePopup('-hunger', 'green')
},
]
requirements: ['lamp']
},
]
itemsAvailable = []
itemPlaced = null
itemsBought = []
activeItem = null
buyMenu = false
ogre = false
fade = {
state: 'none'
}
clamp = (v, min, max) ->
if v < min then min else if v > max then max else v
collides = (a, b, as, bs) ->
a.x + as > b.x and a.x < b.x + bs and a.y + as > b.y and a.y < b.y + bs
loadImage = (name) ->
img = new Image()
console.log 'loading'
loading += 1
img.onload = ->
console.log 'loaded'
images[name] = img
loading -= 1
img.src = 'img/' + name + '.png'
TO_RADIANS = Math.PI/180
drawRotatedImage = (image, x, y, angle) ->
ctx.save()
ctx.translate(x, y)
ctx.rotate(angle * TO_RADIANS)
ctx.drawImage(image, -(image.width/2), -(image.height/2))
ctx.restore()
fadeOut = (cb) ->
if fade.state == 'none'
fade.state = 'out'
fade.step = 0.0
fade.cb = cb
fadeIn = (cb) ->
if fade.state == 'none'
fade.state = 'in'
fade.step = 1.0
fade.cb = cb
tick = ->
setDelta()
elapsed += delta
update(delta)
draw(delta)
keysPressed = {}
if not ogre
window.requestAnimationFrame(tick)
tileFree = (x, y) ->
if x < 0 or x >= room.width or y < 0 or y >= room.height
return false
if x == player.x and y == player.y
return false
for item in itemsBought
if x >= item.x and x < item.x + item.item.width and
y >= item.y and y < item.y + item.item.height
return false
return true
itemAvailable = (item) ->
if item.price > player.money
return false
requiredMissing = item.requirements.length
for r in item.requirements
for item in itemsBought
if item.item.name == r
requiredMissing -= 1
break
return requiredMissing == 0
popups = []
makePopup = (text, color, delay=0) ->
if color == 'green'
color = '#408358'
else if color == 'red'
color = '#8c291b'
popups.push
text: text
x: player.x * 32 + 16,
y: player.y * 32,
color: color,
delay: delay,
speed: 20,
duration: 1.5
getItemAt = (x, y) ->
for item in itemsBought
if x >= item.x and x < item.x + item.item.width and
y >= item.y and y < item.y + item.item.height
return item
update = (delta) ->
console.log keysDown
if fade.state == 'none'
if buyMenu
if keysPressed[66]
buyMenu = false
else
itemsAvailable = (item for item in items when itemAvailable(item))
for i in [0..3]
if keysPressed[49 + i] and itemsAvailable[i]
itemPlaced =
x: 0,
y: 0,
item: itemsAvailable[i],
if itemPlaced
if keysPressed[68]
itemPlaced.x += 1
if keysPressed[65]
itemPlaced.x -= 1
if keysPressed[83]
itemPlaced.y += 1
if keysPressed[87]
itemPlaced.y -= 1
if keysPressed[32]
placeValid = true
for x in [0..itemPlaced.item.width - 1]
for y in [0..itemPlaced.item.height - 1]
if not tileFree(itemPlaced.x + x, itemPlaced.y + y)
placeValid = false
if placeValid
itemsBought.push(itemPlaced)
player.money -= itemPlaced.item.price
makePopup('-$' + itemPlaced.item.price, 'red')
itemPlaced = null
buyMenu = false
else
if keysPressed[66]
buyMenu = true
if player.state.name == 'standing'
if keysDown[68]
player.rotation = 0
if tileFree(player.x + 1, player.y)
player.state =
name: 'moving'
direction: 'right'
if keysDown[65]
player.rotation = 180
if tileFree(player.x - 1, player.y)
player.state =
name: 'moving'
direction: 'left'
if keysDown[83]
player.rotation = 90
if tileFree(player.x, player.y + 1)
player.state =
name: 'moving'
direction: 'down'
if keysDown[87]
player.rotation = -90
if tileFree(player.x, player.y - 1)
player.state =
name: 'moving'
direction: 'up'
if keysPressed[72] and activeItem and activeItem.item.actions[0]
activeItem.item.actions[0].f()
if keysPressed[74] and activeItem and activeItem.item.actions[1]
activeItem.item.actions[1].f()
if keysPressed[75] and activeItem and activeItem.item.actions[2]
activeItem.item.actions[2].f()
if keysPressed[76] and activeItem and activeItem.item.actions[3]
activeItem.item.actions[3].f()
if player.state.name == 'moving'
if player.state.direction == 'right'
if !player.state.target
player.state.target = player.x + 1
player.x += delta * player.speed
if player.x >= player.state.target
player.x = Math.round(player.state.target)
player.state =
name: 'standing'
if player.state.direction == 'left'
if !player.state.target
player.state.target = player.x - 1
player.x -= delta * player.speed
if player.x <= player.state.target
player.x = Math.round(player.state.target)
player.state =
name: 'standing'
if player.state.direction == 'up'
if !player.state.target
player.state.target = player.y - 1
player.y -= delta * player.speed
if player.y <= player.state.target
player.y = Math.round(player.state.target)
player.state =
name: 'standing'
if player.state.direction == 'down'
if !player.state.target
player.state.target = player.y + 1
player.y += delta * player.speed
if player.y >= player.state.target
player.y = Math.round(player.state.target)
player.state =
name: 'standing'
for popup in popups
popup.delay -= delta
if popup.delay <= 0
popup.y -= delta * popup.speed
popup.duration -= delta
if popup.duration <= 0
popup.delete = true
for popup, i in popups
if popup.delete
popups.splice(i, 1)
break
if Math.random() < 0.001
player.sanity -= Math.random() * 6
if player.hunger >= 80
player.sanity -= Math.random() * 8
if player.energy <= 10
player.sanity -= Math.random() * 8
makePopup('-sanity', 'red')
if Math.random() < 0.001
player.energy = clamp(player.energy - Math.random() * 6, 0, 100)
if player.hunger >= 80
player.energy = clamp(player.energy - Math.random() * 8, 0, 100)
makePopup('-energy', 'red')
if Math.random() < 0.001
player.hunger = clamp(player.hunger + Math.random() * 6, 0, 100)
makePopup('+hunger', 'red')
frontTile =
x: player.x
y: player.y
if player.rotation == 0
frontTile.x += 1
else if player.rotation == 90
frontTile.y += 1
else if player.rotation == 180
frontTile.x -= 1
else if player.rotation == -90
frontTile.y -= 1
activeItem = getItemAt(frontTile.x, frontTile.y)
if player.sanity <= 0
fadeOut(->
ogre = true)
if fade.state == 'out'
fade.step += delta
if fade.step >= 1.0
fade.state = 'none'
if fade.cb
fade.cb()
if fade.state == 'in'
fade.step -= delta
if fade.step <= 0.0
fade.state = 'none'
if fade.cb
fade.cb()
draw = (delta) ->
ctx.clearRect(0, 0, c.width, c.height)
ctx.save()
ctx.translate(32 * 10, 32 * 2)
for x in [0..room.width - 1]
drawRotatedImage(images['wall'], x * 32 + 16, -32 + 16, 0)
drawRotatedImage(images['wall_corner'], -32 + 16, -32 + 16, 0)
for y in [0..room.height - 1]
drawRotatedImage(images['wall'], room.width * 32 + 16, y * 32 + 16, 90)
drawRotatedImage(images['wall_corner'], room.width * 32 + 16, -32 + 16, 90)
for x in [0..room.width - 1]
drawRotatedImage(images['wall'], x * 32 + 16, room.height * 32 + 16, 180)
drawRotatedImage(images['wall_corner'], room.width * 32 + 16, room.height * 32 + 16, 180)
for y in [0..room.height - 1]
drawRotatedImage(images['wall'], -32 + 16, y * 32 + 16, 270)
drawRotatedImage(images['wall_corner'], -32 + 16, room.height * 32 + 16, 270)
for x in [0..room.width - 1]
for y in [0..room.height - 1]
ctx.drawImage(images['floor'], x * 32, y * 32)
drawRotatedImage(images['player'], player.x * 32 + 16, player.y * 32 + 16, player.rotation)
for item in itemsBought
ctx.drawImage(images[item.item.image], item.x * 32, item.y * 32)
if DEBUG
frontTile =
x: player.x
y: player.y
if player.rotation == 0
frontTile.x += 1
else if player.rotation == 90
frontTile.y += 1
else if player.rotation == 180
frontTile.x -= 1
else if player.rotation == -90
frontTile.y -= 1
ctx.fillStyle = 'rgba(0, 200, 0, 0.5)'
ctx.fillRect(frontTile.x * 32, frontTile.y * 32, 32, 32)
if itemPlaced
tintedName = itemPlaced.item.image + '-tinted'
if not images[tintedName]
fg = images[itemPlaced.item.image]
buffer = document.createElement('canvas')
buffer.width = fg.width
buffer.height = fg.height
bx = buffer.getContext('2d')
bx.fillStyle = '#FF0000'
bx.fillRect(0, 0, buffer.width, buffer.height)
bx.globalCompositeOperation = "destination-atop"
bx.drawImage(fg, 0, 0)
images[tintedName] = buffer
placeValid = true
for x in [0..itemPlaced.item.width - 1]
for y in [0..itemPlaced.item.height - 1]
if not tileFree(itemPlaced.x + x, itemPlaced.y + y)
placeValid = false
img = if placeValid then itemPlaced.item.image else tintedName
ctx.drawImage(images[img], itemPlaced.x * 32, itemPlaced.y * 32)
for popup in popups
if popup.delay <= 0
ctx.font = '14px Visitor'
ctx.textAlign = 'center'
ctx.fillStyle = popup.color
ctx.fillText(popup.text, popup.x, popup.y)
ctx.restore()
if buyMenu
ctx.fillStyle = '#222222'
ctx.fillRect(0, 400, 800, 600)
ctx.textAlign = 'center'
ctx.fillStyle = '#ffffff'
for item, i in itemsAvailable
ctx.fillText('[' + (i + 1) + ']', 160 * (i + 1), 430)
ctx.drawImage(images[item.image], 160 * (i + 1) - 16, 460)
ctx.fillText(item.name, 160 * (i + 1), 550)
ctx.fillText('$' + item.price, 160 * (i + 1), 566)
ctx.font = '14px Visitor'
ctx.textAlign = 'right'
ctx.fillStyle = '#ffffff'
ctx.fillText('energy', 50, 20)
ctx.fillText('hunger', 50, 32)
ctx.fillText('sanity', 50, 44)
ctx.fillText('$' + player.money, 700, 44)
ctx.fillRect(54, 14, player.energy, 6)
ctx.fillRect(54, 26, player.hunger, 6)
ctx.fillRect(54, 38, player.sanity, 6)
if DEBUG
ctx.fillText(Math.round(player.energy), 180, 20)
ctx.fillText(Math.round(player.hunger), 180, 32)
ctx.fillText(Math.round(player.sanity), 180, 44)
ctx.textAlign = 'left'
if itemPlaced
ctx.fillText('[space] place', 340, 344)
else if buyMenu
ctx.fillText('[B] close', 340, 344)
else
ctx.fillText('[B] buy', 340, 344)
buttons = ['H', 'J', 'K', 'L']
if activeItem
for action, i in activeItem.item.actions
ctx.fillText('[' + buttons[i] + '] ' + action.name, 340, 360 + i * 16)
ctx.textAlign = 'right'
if fade.state != 'none'
ctx.fillStyle = 'rgba(0, 0, 0, ' + fade.step + ')'
ctx.fillRect(0, 0, 800, 600)
if ogre
ctx.fillStyle = '#000000'
ctx.fillRect(0, 0, 800, 600)
ctx.font = '180px Visitor'
ctx.textAlign = 'center'
ctx.textBaseline = 'middle'
ctx.fillStyle = '#ffffff'
ctx.fillText('THE END', 400, 300)
do ->
w = window
for vendor in ['ms', 'moz', 'webkit', 'o']
break if w.requestAnimationFrame
w.requestAnimationFrame = w["#{vendor}RequestAnimationFrame"]
if not w.requestAnimationFrame
targetTime = 0
w.requestAnimationFrame = (callback) ->
targetTime = Math.max targetTime + 16, currentTime = +new Date
w.setTimeout (-> callback +new Date), targetTime - currentTime
for img in ['floor', 'lamp', 'computer', 'bed', 'fridge', 'player', 'wall', 'wall_corner']
loadImage(img)
load = ->
if loading
console.log(loading)
window.requestAnimationFrame(load)
else
console.log('All loaded!')
window.requestAnimationFrame(tick)
load()
| true | c = document.getElementById('draw')
ctx = c.getContext('2d')
delta = 0
now = 0
before = Date.now()
elapsed = 0
loading = 0
DEBUG = false
# c.width = window.innerWidth
# c.height = window.innerHeight
c.width = 800
c.height = 600
keysDown = {}
keysPressed = {}
window.addEventListener("keydown", (e) ->
keysDown[e.keyCode] = true
keysPressed[e.keyCode] = true
, false)
window.addEventListener("keyup", (e) ->
delete keysDown[e.keyCode]
, false)
setDelta = ->
now = Date.now()
delta = (now - before) / 1000
before = now
if not DEBUG
console.log = () ->
null
player =
x: 0
y: 0
speed: 4
color: '#ffffff'
rotation: 0
money: 750
energy: 100
hunger: 0
sanity: 100
state:
name: 'standing'
room =
width: 6
height: 8
images = {}
items = [
{
name: 'lamp',
image: 'lamp',
price: 20,
width: 1,
height: 1,
actions: [{
name: 'look at light'
f: ->
if Math.random() < 0.5
player.sanity = clamp(player.sanity + (Math.random() * 2), 0, 100)
makePopup('+sanity', 'green')
else
player.sanity = clamp(player.sanity - (Math.random() * 2), 0, 100)
makePopup('-sanity', 'red')
}]
requirements: []
},
{
name: 'computer',
image: 'computer',
price: 500,
width: 1,
height: 1,
actions: [{
name: 'work'
f: ->
if player.energy <= 20
makePopup('you\'re too tired to work', 'white')
return
player.money += 50
makePopup('+$50', 'green')
player.energy -= 20
makePopup('-energy', 'red', 0.5)
player.sanity -= (Math.random() * 14 + 6)
makePopup('-sanity', 'red', 1.0)
}, {
name: 'play'
f: ->
if player.energy <= 10
makePopup('you\'re too tired to play', 'white')
return
player.energy -= 10
makePopup('-energy', 'red')
player.sanity = clamp(player.sanity + Math.random() * 5, 0, 100)
makePopup('+sanity', 'green', 0.5)
},
]
requirements: ['lamp']
},
{
name: 'PI:NAME:<NAME>END_PI',
image: 'bed',
price: 400,
width: 1,
height: 2,
actions: [{
name: 'sleep',
f: ->
fadeOut(->
fadeIn()
)
player.energy = clamp(player.energy + Math.random() * 50 + 30, 0, 100)
player.hunger = clamp(player.hunger + Math.random() * 30, 0, 100)
makePopup('+energy', 'green', 2.0)
makePopup('+hunger', 'red', 2.5)
},
]
requirements: ['lamp']
},
{
name: 'PI:NAME:<NAME>END_PI',
image: 'fridge',
price: 800,
width: 1,
height: 1,
actions: [{
name: 'eat',
f: ->
if player.money <= 20
makePopup('you can\'t afford that', 'white')
return
player.hunger = clamp(player.hunger - (Math.random() * 20 + 10), 0, 100)
makePopup('-hunger', 'green')
},
]
requirements: ['lamp']
},
]
itemsAvailable = []
itemPlaced = null
itemsBought = []
activeItem = null
buyMenu = false
ogre = false
fade = {
state: 'none'
}
clamp = (v, min, max) ->
if v < min then min else if v > max then max else v
collides = (a, b, as, bs) ->
a.x + as > b.x and a.x < b.x + bs and a.y + as > b.y and a.y < b.y + bs
loadImage = (name) ->
img = new Image()
console.log 'loading'
loading += 1
img.onload = ->
console.log 'loaded'
images[name] = img
loading -= 1
img.src = 'img/' + name + '.png'
TO_RADIANS = Math.PI/180
drawRotatedImage = (image, x, y, angle) ->
ctx.save()
ctx.translate(x, y)
ctx.rotate(angle * TO_RADIANS)
ctx.drawImage(image, -(image.width/2), -(image.height/2))
ctx.restore()
fadeOut = (cb) ->
if fade.state == 'none'
fade.state = 'out'
fade.step = 0.0
fade.cb = cb
fadeIn = (cb) ->
if fade.state == 'none'
fade.state = 'in'
fade.step = 1.0
fade.cb = cb
tick = ->
setDelta()
elapsed += delta
update(delta)
draw(delta)
keysPressed = {}
if not ogre
window.requestAnimationFrame(tick)
tileFree = (x, y) ->
if x < 0 or x >= room.width or y < 0 or y >= room.height
return false
if x == player.x and y == player.y
return false
for item in itemsBought
if x >= item.x and x < item.x + item.item.width and
y >= item.y and y < item.y + item.item.height
return false
return true
itemAvailable = (item) ->
if item.price > player.money
return false
requiredMissing = item.requirements.length
for r in item.requirements
for item in itemsBought
if item.item.name == r
requiredMissing -= 1
break
return requiredMissing == 0
popups = []
makePopup = (text, color, delay=0) ->
if color == 'green'
color = '#408358'
else if color == 'red'
color = '#8c291b'
popups.push
text: text
x: player.x * 32 + 16,
y: player.y * 32,
color: color,
delay: delay,
speed: 20,
duration: 1.5
getItemAt = (x, y) ->
for item in itemsBought
if x >= item.x and x < item.x + item.item.width and
y >= item.y and y < item.y + item.item.height
return item
update = (delta) ->
console.log keysDown
if fade.state == 'none'
if buyMenu
if keysPressed[66]
buyMenu = false
else
itemsAvailable = (item for item in items when itemAvailable(item))
for i in [0..3]
if keysPressed[49 + i] and itemsAvailable[i]
itemPlaced =
x: 0,
y: 0,
item: itemsAvailable[i],
if itemPlaced
if keysPressed[68]
itemPlaced.x += 1
if keysPressed[65]
itemPlaced.x -= 1
if keysPressed[83]
itemPlaced.y += 1
if keysPressed[87]
itemPlaced.y -= 1
if keysPressed[32]
placeValid = true
for x in [0..itemPlaced.item.width - 1]
for y in [0..itemPlaced.item.height - 1]
if not tileFree(itemPlaced.x + x, itemPlaced.y + y)
placeValid = false
if placeValid
itemsBought.push(itemPlaced)
player.money -= itemPlaced.item.price
makePopup('-$' + itemPlaced.item.price, 'red')
itemPlaced = null
buyMenu = false
else
if keysPressed[66]
buyMenu = true
if player.state.name == 'standing'
if keysDown[68]
player.rotation = 0
if tileFree(player.x + 1, player.y)
player.state =
name: 'moving'
direction: 'right'
if keysDown[65]
player.rotation = 180
if tileFree(player.x - 1, player.y)
player.state =
name: 'moving'
direction: 'left'
if keysDown[83]
player.rotation = 90
if tileFree(player.x, player.y + 1)
player.state =
name: 'moving'
direction: 'down'
if keysDown[87]
player.rotation = -90
if tileFree(player.x, player.y - 1)
player.state =
name: 'moving'
direction: 'up'
if keysPressed[72] and activeItem and activeItem.item.actions[0]
activeItem.item.actions[0].f()
if keysPressed[74] and activeItem and activeItem.item.actions[1]
activeItem.item.actions[1].f()
if keysPressed[75] and activeItem and activeItem.item.actions[2]
activeItem.item.actions[2].f()
if keysPressed[76] and activeItem and activeItem.item.actions[3]
activeItem.item.actions[3].f()
if player.state.name == 'moving'
if player.state.direction == 'right'
if !player.state.target
player.state.target = player.x + 1
player.x += delta * player.speed
if player.x >= player.state.target
player.x = Math.round(player.state.target)
player.state =
name: 'standing'
if player.state.direction == 'left'
if !player.state.target
player.state.target = player.x - 1
player.x -= delta * player.speed
if player.x <= player.state.target
player.x = Math.round(player.state.target)
player.state =
name: 'standing'
if player.state.direction == 'up'
if !player.state.target
player.state.target = player.y - 1
player.y -= delta * player.speed
if player.y <= player.state.target
player.y = Math.round(player.state.target)
player.state =
name: 'standing'
if player.state.direction == 'down'
if !player.state.target
player.state.target = player.y + 1
player.y += delta * player.speed
if player.y >= player.state.target
player.y = Math.round(player.state.target)
player.state =
name: 'standing'
for popup in popups
popup.delay -= delta
if popup.delay <= 0
popup.y -= delta * popup.speed
popup.duration -= delta
if popup.duration <= 0
popup.delete = true
for popup, i in popups
if popup.delete
popups.splice(i, 1)
break
if Math.random() < 0.001
player.sanity -= Math.random() * 6
if player.hunger >= 80
player.sanity -= Math.random() * 8
if player.energy <= 10
player.sanity -= Math.random() * 8
makePopup('-sanity', 'red')
if Math.random() < 0.001
player.energy = clamp(player.energy - Math.random() * 6, 0, 100)
if player.hunger >= 80
player.energy = clamp(player.energy - Math.random() * 8, 0, 100)
makePopup('-energy', 'red')
if Math.random() < 0.001
player.hunger = clamp(player.hunger + Math.random() * 6, 0, 100)
makePopup('+hunger', 'red')
frontTile =
x: player.x
y: player.y
if player.rotation == 0
frontTile.x += 1
else if player.rotation == 90
frontTile.y += 1
else if player.rotation == 180
frontTile.x -= 1
else if player.rotation == -90
frontTile.y -= 1
activeItem = getItemAt(frontTile.x, frontTile.y)
if player.sanity <= 0
fadeOut(->
ogre = true)
if fade.state == 'out'
fade.step += delta
if fade.step >= 1.0
fade.state = 'none'
if fade.cb
fade.cb()
if fade.state == 'in'
fade.step -= delta
if fade.step <= 0.0
fade.state = 'none'
if fade.cb
fade.cb()
draw = (delta) ->
ctx.clearRect(0, 0, c.width, c.height)
ctx.save()
ctx.translate(32 * 10, 32 * 2)
for x in [0..room.width - 1]
drawRotatedImage(images['wall'], x * 32 + 16, -32 + 16, 0)
drawRotatedImage(images['wall_corner'], -32 + 16, -32 + 16, 0)
for y in [0..room.height - 1]
drawRotatedImage(images['wall'], room.width * 32 + 16, y * 32 + 16, 90)
drawRotatedImage(images['wall_corner'], room.width * 32 + 16, -32 + 16, 90)
for x in [0..room.width - 1]
drawRotatedImage(images['wall'], x * 32 + 16, room.height * 32 + 16, 180)
drawRotatedImage(images['wall_corner'], room.width * 32 + 16, room.height * 32 + 16, 180)
for y in [0..room.height - 1]
drawRotatedImage(images['wall'], -32 + 16, y * 32 + 16, 270)
drawRotatedImage(images['wall_corner'], -32 + 16, room.height * 32 + 16, 270)
for x in [0..room.width - 1]
for y in [0..room.height - 1]
ctx.drawImage(images['floor'], x * 32, y * 32)
drawRotatedImage(images['player'], player.x * 32 + 16, player.y * 32 + 16, player.rotation)
for item in itemsBought
ctx.drawImage(images[item.item.image], item.x * 32, item.y * 32)
if DEBUG
frontTile =
x: player.x
y: player.y
if player.rotation == 0
frontTile.x += 1
else if player.rotation == 90
frontTile.y += 1
else if player.rotation == 180
frontTile.x -= 1
else if player.rotation == -90
frontTile.y -= 1
ctx.fillStyle = 'rgba(0, 200, 0, 0.5)'
ctx.fillRect(frontTile.x * 32, frontTile.y * 32, 32, 32)
if itemPlaced
tintedName = itemPlaced.item.image + '-tinted'
if not images[tintedName]
fg = images[itemPlaced.item.image]
buffer = document.createElement('canvas')
buffer.width = fg.width
buffer.height = fg.height
bx = buffer.getContext('2d')
bx.fillStyle = '#FF0000'
bx.fillRect(0, 0, buffer.width, buffer.height)
bx.globalCompositeOperation = "destination-atop"
bx.drawImage(fg, 0, 0)
images[tintedName] = buffer
placeValid = true
for x in [0..itemPlaced.item.width - 1]
for y in [0..itemPlaced.item.height - 1]
if not tileFree(itemPlaced.x + x, itemPlaced.y + y)
placeValid = false
img = if placeValid then itemPlaced.item.image else tintedName
ctx.drawImage(images[img], itemPlaced.x * 32, itemPlaced.y * 32)
for popup in popups
if popup.delay <= 0
ctx.font = '14px Visitor'
ctx.textAlign = 'center'
ctx.fillStyle = popup.color
ctx.fillText(popup.text, popup.x, popup.y)
ctx.restore()
if buyMenu
ctx.fillStyle = '#222222'
ctx.fillRect(0, 400, 800, 600)
ctx.textAlign = 'center'
ctx.fillStyle = '#ffffff'
for item, i in itemsAvailable
ctx.fillText('[' + (i + 1) + ']', 160 * (i + 1), 430)
ctx.drawImage(images[item.image], 160 * (i + 1) - 16, 460)
ctx.fillText(item.name, 160 * (i + 1), 550)
ctx.fillText('$' + item.price, 160 * (i + 1), 566)
ctx.font = '14px Visitor'
ctx.textAlign = 'right'
ctx.fillStyle = '#ffffff'
ctx.fillText('energy', 50, 20)
ctx.fillText('hunger', 50, 32)
ctx.fillText('sanity', 50, 44)
ctx.fillText('$' + player.money, 700, 44)
ctx.fillRect(54, 14, player.energy, 6)
ctx.fillRect(54, 26, player.hunger, 6)
ctx.fillRect(54, 38, player.sanity, 6)
if DEBUG
ctx.fillText(Math.round(player.energy), 180, 20)
ctx.fillText(Math.round(player.hunger), 180, 32)
ctx.fillText(Math.round(player.sanity), 180, 44)
ctx.textAlign = 'left'
if itemPlaced
ctx.fillText('[space] place', 340, 344)
else if buyMenu
ctx.fillText('[B] close', 340, 344)
else
ctx.fillText('[B] buy', 340, 344)
buttons = ['H', 'J', 'K', 'L']
if activeItem
for action, i in activeItem.item.actions
ctx.fillText('[' + buttons[i] + '] ' + action.name, 340, 360 + i * 16)
ctx.textAlign = 'right'
if fade.state != 'none'
ctx.fillStyle = 'rgba(0, 0, 0, ' + fade.step + ')'
ctx.fillRect(0, 0, 800, 600)
if ogre
ctx.fillStyle = '#000000'
ctx.fillRect(0, 0, 800, 600)
ctx.font = '180px Visitor'
ctx.textAlign = 'center'
ctx.textBaseline = 'middle'
ctx.fillStyle = '#ffffff'
ctx.fillText('THE END', 400, 300)
do ->
w = window
for vendor in ['ms', 'moz', 'webkit', 'o']
break if w.requestAnimationFrame
w.requestAnimationFrame = w["#{vendor}RequestAnimationFrame"]
if not w.requestAnimationFrame
targetTime = 0
w.requestAnimationFrame = (callback) ->
targetTime = Math.max targetTime + 16, currentTime = +new Date
w.setTimeout (-> callback +new Date), targetTime - currentTime
for img in ['floor', 'lamp', 'computer', 'bed', 'fridge', 'player', 'wall', 'wall_corner']
loadImage(img)
load = ->
if loading
console.log(loading)
window.requestAnimationFrame(load)
else
console.log('All loaded!')
window.requestAnimationFrame(tick)
load()
|
[
{
"context": "'Dishes (Used)', category: 'Garbage'},\n {name: 'Dishwasher', category: 'Large Item'},\n {name: 'Dough', ca",
"end": 3126,
"score": 0.5718340277671814,
"start": 3119,
"tag": "NAME",
"value": "ishwash"
}
] | app/assets/javascripts/services/waste_items.js.coffee | DouglasSherk/Swapp | 1 | App.factory 'WasteItems', -> [
{name: 'Aerosol Cans (empty)', category: 'Recyclable'},
{name: 'Aerosol Cans (not empty)', category: 'Hazardous Waste'},
{name: 'Aluminum Cans & Foil', category: 'Recyclable'},
{name: 'Aluminum Foil (soiled)', category: 'Garbage'},
{name: 'Auto Parts', category: 'Drop-off'},
{name: 'Bakery Trays', category: 'Recyclable'},
{name: 'Batteries', category: 'Hazardous Waste'},
{name: 'Batteries - Hearing Aid', category: 'Drop-off'},
{name: 'Beverage Cans', category: 'Recyclable'},
{name: 'Beverage Cartons', category: 'Recyclable'},
{name: 'Bicycle', category: 'Drop-off'},
{name: 'Blankets', category: 'Drop-off'},
{name: 'Bleach tubs/lids', category: 'Recyclable'},
{name: 'Bones', category: 'Yard Waste'},
{name: 'Books', category: 'Drop-off'},
{name: 'Bottles', category: 'Recyclable'},
{name: 'Boxboard', category: 'Recyclable'},
{name: 'Bread', category: 'Yard Waste'},
{name: 'Bread bags', category: 'Recyclable'},
{name: 'Broken Glass', category: 'Garbage'},
{name: 'Broken Toys', category: 'Garbage'},
{name: 'Brown paper bags', category: 'Yard Waste'},
{name: 'Building Supplies', category: 'Drop-off'},
{name: 'Cake', category: 'Yard Waste'},
{name: 'Candies', category: 'Yard Waste'},
{name: 'Candy Wrappers', category: 'Garbage'},
{name: 'Carbon Paper', category: 'Garbage'},
{name: 'Cardboard', category: 'Recyclable'},
{name: 'Cardboard with metal bottoms', category: 'Recyclable'},
{name: 'Cards', category: 'Garbage'},
{name: 'Carpet', category: 'Large Item'},
{name: 'Catalogues', category: 'Recyclable'},
{name: 'CDs, DVDs, Videos, Video Games', category: 'Drop-off'},
{name: 'Ceramics', category: 'Garbage'},
{name: 'Cereal boxes', category: 'Recyclable'},
{name: 'CFL Lightbulbs', category: 'Drop-off'},
{name: 'Chicken', category: 'Yard Waste'},
{name: 'Children\'s Items', category: 'Drop-off'},
{name: 'Chip Bags', category: 'Garbage'},
{name: 'Cleaning Liquids (Corrosive)', category: 'Hazardous Waste'},
{name: 'Cloth items', category: 'Garbage'},
{name: 'Clothes Hangers', category: 'Drop-off'},
{name: 'Clothing (reusable)', category: 'Drop-off'},
{name: 'Coffee Cups (paper only)', category: 'Yard Waste'},
{name: 'Coffee filters', category: 'Yard Waste'},
{name: 'Coffee grounds', category: 'Yard Waste'},
{name: 'Comforters', category: 'Drop-off'},
{name: 'Commercial Waste', category: 'Drop-off'},
{name: 'Compost', category: 'Yard Waste'},
{name: 'Computer', category: 'Drop-off'},
{name: 'Construction Materials', category: 'Drop-off'},
{name: 'Construction Paper', category: 'Yard Waste'},
{name: 'Cookie Boxes', category: 'Recyclable'},
{name: 'Cool fireplace ashes', category: 'Yard Waste'},
{name: 'Cutlery', category: 'Garbage'},
{name: 'Dairy products', category: 'Yard Waste'},
{name: 'Dehumidifier', category: 'Large Item'},
{name: 'Detergent tubs/lids', category: 'Recyclable'},
{name: 'Diapers', category: 'Garbage'},
{name: 'Dishes (broken)', category: 'Garbage'},
{name: 'Dishes (Used)', category: 'Garbage'},
{name: 'Dishwasher', category: 'Large Item'},
{name: 'Dough', category: 'Yard Waste'},
{name: 'Draperies', category: 'Drop-off'},
{name: 'Drink Boxes', category: 'Recyclable'},
{name: 'Dry Cleaning bags', category: 'Recyclable'},
{name: 'Dryer lint', category: 'Yard Waste'},
{name: 'Egg Cartons - Paper', category: 'Yard Waste'},
{name: 'Egg Shells', category: 'Yard Waste'},
{name: 'Eggs', category: 'Yard Waste'},
{name: 'Electronics', category: 'Drop-off'},
{name: 'Envelopes', category: 'Recyclable'},
{name: 'Eyeglasses', category: 'Drop-off'},
{name: 'Fatty Foods', category: 'Yard Waste'},
{name: 'Feces (Pet)', category: 'Yard Waste'},
{name: 'Fireworks (used)', category: 'Garbage'},
{name: 'Fish Scraps', category: 'Yard Waste'},
{name: 'Floor sweepings', category: 'Garbage'},
{name: 'Flour and grains', category: 'Yard Waste'},
{name: 'Flowers (cut)', category: 'Yard Waste'},
{name: 'Fluorescent light tubes', category: 'Hazardous Waste'},
{name: 'Flyers', category: 'Recyclable'},
{name: 'Foam Packaging', category: 'Recyclable'},
{name: 'Foil Containers', category: 'Recyclable'},
{name: 'Fridge / Freezer', category: 'Large Item'},
{name: 'Frozen juice cans', category: 'Recyclable'},
{name: 'Fruit scraps', category: 'Yard Waste'},
{name: 'Furniture', category: 'Drop-off'},
{name: 'Gasoline', category: 'Hazardous Waste'},
{name: 'Gift Boxes', category: 'Recyclable'},
{name: 'Gift Wrapping Paper', category: 'Garbage'},
{name: 'Glass Jars', category: 'Recyclable'},
{name: 'Glasses (drinking)', category: 'Garbage'},
{name: 'Glasses (prescription)', category: 'Drop-off'},
{name: 'Grass clippings', category: 'None'},
{name: 'Grocery bags', category: 'Recyclable'},
{name: 'Hair Clippings', category: 'Yard Waste'},
{name: 'Hardcover books (no cover)', category: 'Recyclable'},
{name: 'Hearing Aids', category: 'Drop-off'},
{name: 'Herbicides', category: 'Hazardous Waste'},
{name: 'Humidifier', category: 'Large Item'},
{name: 'Ice cream tubs/lids', category: 'Recyclable'},
{name: 'Infected Plants', category: 'Yard Waste'},
{name: 'Instruments', category: 'Drop-off'},
{name: 'Jewellery', category: 'Drop-off'},
{name: 'Juice bottles/tubs/lids', category: 'Recyclable'},
{name: 'Kitchen Appliances', category: 'Drop-off'},
{name: 'Kitchen Utensils', category: 'Drop-off'},
{name: 'Latex Gloves', category: 'Garbage'},
{name: 'Leaves', category: 'Yard Waste'},
{name: 'Legal Drugs', category: 'Hazardous Waste'},
{name: 'Lightbulbs', category: 'Garbage'},
{name: 'Lights, Fluorescent tubes', category: 'Hazardous Waste'},
{name: 'Liquid Paint', category: 'Hazardous Waste'},
{name: 'Magazines', category: 'Drop-off'},
{name: 'Margarine tubs/lids', category: 'Recyclable'},
{name: 'Meat', category: 'Yard Waste'},
{name: 'Microwave popcorn bags', category: 'Yard Waste'},
{name: 'Microwaves', category: 'Large Item'},
{name: 'Milk bags', category: 'Recyclable'},
{name: 'Milk Cartons', category: 'Recyclable'},
{name: 'Motor Oil', category: 'Hazardous Waste'},
{name: 'Motor Oil bottles/tubs/lids', category: 'Recyclable'},
{name: 'Muffins', category: 'Yard Waste'},
{name: 'Musical items', category: 'Drop-off'},
{name: 'Needles & Syringes', category: 'Hazardous Waste'},
{name: 'Newspapers', category: 'Recyclable'},
{name: 'Nuts and nutshells', category: 'Yard Waste'},
{name: 'Oil - Cooking', category: 'Yard Waste'},
{name: 'Oil - Motor', category: 'Hazardous Waste'},
{name: 'Organics', category: 'Yard Waste'},
{name: 'Paint Cans - empty', category: 'Recyclable'},
{name: 'Paint Cans - not empty', category: 'Hazardous Waste'},
{name: 'Paper', category: 'Recyclable'},
{name: 'Paper take-out trays/cups/plates', category: 'Yard Waste'},
{name: 'Paper Towel - not soiled', category: 'Yard Waste'},
{name: 'Paperbacks', category: 'Recyclable'},
{name: 'Pasta', category: 'Yard Waste'},
{name: 'Peelings', category: 'Yard Waste'},
{name: 'Pesticides', category: 'Hazardous Waste'},
{name: 'Pet hair and feathers', category: 'Yard Waste'},
{name: 'Pet waste and kitty litter bag', category: 'Yard Waste'},
{name: 'Pharmaceuticals', category: 'Hazardous Waste'},
{name: 'Phone Books', category: 'Recyclable'},
{name: 'Pies', category: 'Yard Waste'},
{name: 'Pizza boxes', category: 'Recyclable'},
{name: 'Plants (trimmings/house plants)', category: 'Yard Waste'},
{name: 'Plastic Bags', category: 'Recyclable'},
{name: 'Plastic Bottles', category: 'Recyclable'},
{name: 'Plastic Cups/lids', category: 'Recyclable'},
{name: 'Plastic Liners', category: 'Garbage'},
{name: 'Plastic Wrap (used)', category: 'Garbage'},
{name: 'Polystyrene Foam Plastic', category: 'Recyclable'},
{name: 'Pop cases', category: 'Recyclable'},
{name: 'Popsicle sticks', category: 'Yard Waste'},
{name: 'Potato Chip Bags', category: 'Garbage'},
{name: 'Pots and Pans', category: 'Drop-off'},
{name: 'Printer Cartridges', category: 'Drop-off'},
{name: 'Propane Tanks', category: 'Hazardous Waste'},
{name: 'Purses', category: 'Drop-off'},
{name: 'Pyrex', category: 'Garbage'},
{name: 'Rice', category: 'Yard Waste'},
{name: 'Rubber Gloves', category: 'Garbage'},
{name: 'Sauces, gravy and dressings', category: 'Yard Waste'},
{name: 'Saw dust', category: 'Yard Waste'},
{name: 'Scrap Metal', category: 'Drop-off'},
{name: 'Shampoo bottles/lids', category: 'Recyclable'},
{name: 'Sharps', category: 'Hazardous Waste'},
{name: 'Shellfish', category: 'Yard Waste'},
{name: 'Shoes', category: 'Drop-off'},
{name: 'Shredded Paper', category: 'Recyclable'},
{name: 'Smoke Detectors', category: 'Garbage'},
{name: 'Soiled newsprint', category: 'Yard Waste'},
{name: 'Solvents', category: 'Hazardous Waste'},
{name: 'Sport equipment', category: 'Drop-off'},
{name: 'Spray Paint (empty)', category: 'Recyclable'},
{name: 'Spray Paint (not empty)', category: 'Hazardous Waste'},
{name: 'Stove', category: 'Large Item'},
{name: 'Straw', category: 'Yard Waste'},
{name: 'Styrofoam', category: 'Recyclable'},
{name: 'Tea Bags', category: 'Yard Waste'},
{name: 'Thinners', category: 'Hazardous Waste'},
{name: 'Tire Rims', category: 'Drop-off'},
{name: 'Tires', category: 'Drop-off'},
{name: 'Toothpaste Tubes', category: 'Garbage'},
{name: 'Toothpicks', category: 'Yard Waste'},
{name: 'Toys', category: 'Drop-off'},
{name: 'Twigs', category: 'Yard Waste'},
{name: 'Vacuum Bags', category: 'Garbage'},
{name: 'Vacuums', category: 'Drop-off'},
{name: 'Vegetable scraps', category: 'Yard Waste'},
{name: 'Wallpaper', category: 'Garbage'},
{name: 'Water bottles/tubs/lids', category: 'Recyclable'},
{name: 'Waxed Paper', category: 'Garbage'},
{name: 'Weeds', category: 'Yard Waste'},
{name: 'Windshield washer bottles', category: 'Recyclable'},
{name: 'Wood Chips', category: 'Yard Waste'},
{name: 'Wooden Stir Sticks', category: 'Yard Waste'},
{name: 'Yogurt Lids (foil)', category: 'Garbage'},
{name: 'Yogurt Lids (plastic)', category: 'Recyclable'},
]
| 168951 | App.factory 'WasteItems', -> [
{name: 'Aerosol Cans (empty)', category: 'Recyclable'},
{name: 'Aerosol Cans (not empty)', category: 'Hazardous Waste'},
{name: 'Aluminum Cans & Foil', category: 'Recyclable'},
{name: 'Aluminum Foil (soiled)', category: 'Garbage'},
{name: 'Auto Parts', category: 'Drop-off'},
{name: 'Bakery Trays', category: 'Recyclable'},
{name: 'Batteries', category: 'Hazardous Waste'},
{name: 'Batteries - Hearing Aid', category: 'Drop-off'},
{name: 'Beverage Cans', category: 'Recyclable'},
{name: 'Beverage Cartons', category: 'Recyclable'},
{name: 'Bicycle', category: 'Drop-off'},
{name: 'Blankets', category: 'Drop-off'},
{name: 'Bleach tubs/lids', category: 'Recyclable'},
{name: 'Bones', category: 'Yard Waste'},
{name: 'Books', category: 'Drop-off'},
{name: 'Bottles', category: 'Recyclable'},
{name: 'Boxboard', category: 'Recyclable'},
{name: 'Bread', category: 'Yard Waste'},
{name: 'Bread bags', category: 'Recyclable'},
{name: 'Broken Glass', category: 'Garbage'},
{name: 'Broken Toys', category: 'Garbage'},
{name: 'Brown paper bags', category: 'Yard Waste'},
{name: 'Building Supplies', category: 'Drop-off'},
{name: 'Cake', category: 'Yard Waste'},
{name: 'Candies', category: 'Yard Waste'},
{name: 'Candy Wrappers', category: 'Garbage'},
{name: 'Carbon Paper', category: 'Garbage'},
{name: 'Cardboard', category: 'Recyclable'},
{name: 'Cardboard with metal bottoms', category: 'Recyclable'},
{name: 'Cards', category: 'Garbage'},
{name: 'Carpet', category: 'Large Item'},
{name: 'Catalogues', category: 'Recyclable'},
{name: 'CDs, DVDs, Videos, Video Games', category: 'Drop-off'},
{name: 'Ceramics', category: 'Garbage'},
{name: 'Cereal boxes', category: 'Recyclable'},
{name: 'CFL Lightbulbs', category: 'Drop-off'},
{name: 'Chicken', category: 'Yard Waste'},
{name: 'Children\'s Items', category: 'Drop-off'},
{name: 'Chip Bags', category: 'Garbage'},
{name: 'Cleaning Liquids (Corrosive)', category: 'Hazardous Waste'},
{name: 'Cloth items', category: 'Garbage'},
{name: 'Clothes Hangers', category: 'Drop-off'},
{name: 'Clothing (reusable)', category: 'Drop-off'},
{name: 'Coffee Cups (paper only)', category: 'Yard Waste'},
{name: 'Coffee filters', category: 'Yard Waste'},
{name: 'Coffee grounds', category: 'Yard Waste'},
{name: 'Comforters', category: 'Drop-off'},
{name: 'Commercial Waste', category: 'Drop-off'},
{name: 'Compost', category: 'Yard Waste'},
{name: 'Computer', category: 'Drop-off'},
{name: 'Construction Materials', category: 'Drop-off'},
{name: 'Construction Paper', category: 'Yard Waste'},
{name: 'Cookie Boxes', category: 'Recyclable'},
{name: 'Cool fireplace ashes', category: 'Yard Waste'},
{name: 'Cutlery', category: 'Garbage'},
{name: 'Dairy products', category: 'Yard Waste'},
{name: 'Dehumidifier', category: 'Large Item'},
{name: 'Detergent tubs/lids', category: 'Recyclable'},
{name: 'Diapers', category: 'Garbage'},
{name: 'Dishes (broken)', category: 'Garbage'},
{name: 'Dishes (Used)', category: 'Garbage'},
{name: 'D<NAME>er', category: 'Large Item'},
{name: 'Dough', category: 'Yard Waste'},
{name: 'Draperies', category: 'Drop-off'},
{name: 'Drink Boxes', category: 'Recyclable'},
{name: 'Dry Cleaning bags', category: 'Recyclable'},
{name: 'Dryer lint', category: 'Yard Waste'},
{name: 'Egg Cartons - Paper', category: 'Yard Waste'},
{name: 'Egg Shells', category: 'Yard Waste'},
{name: 'Eggs', category: 'Yard Waste'},
{name: 'Electronics', category: 'Drop-off'},
{name: 'Envelopes', category: 'Recyclable'},
{name: 'Eyeglasses', category: 'Drop-off'},
{name: 'Fatty Foods', category: 'Yard Waste'},
{name: 'Feces (Pet)', category: 'Yard Waste'},
{name: 'Fireworks (used)', category: 'Garbage'},
{name: 'Fish Scraps', category: 'Yard Waste'},
{name: 'Floor sweepings', category: 'Garbage'},
{name: 'Flour and grains', category: 'Yard Waste'},
{name: 'Flowers (cut)', category: 'Yard Waste'},
{name: 'Fluorescent light tubes', category: 'Hazardous Waste'},
{name: 'Flyers', category: 'Recyclable'},
{name: 'Foam Packaging', category: 'Recyclable'},
{name: 'Foil Containers', category: 'Recyclable'},
{name: 'Fridge / Freezer', category: 'Large Item'},
{name: 'Frozen juice cans', category: 'Recyclable'},
{name: 'Fruit scraps', category: 'Yard Waste'},
{name: 'Furniture', category: 'Drop-off'},
{name: 'Gasoline', category: 'Hazardous Waste'},
{name: 'Gift Boxes', category: 'Recyclable'},
{name: 'Gift Wrapping Paper', category: 'Garbage'},
{name: 'Glass Jars', category: 'Recyclable'},
{name: 'Glasses (drinking)', category: 'Garbage'},
{name: 'Glasses (prescription)', category: 'Drop-off'},
{name: 'Grass clippings', category: 'None'},
{name: 'Grocery bags', category: 'Recyclable'},
{name: 'Hair Clippings', category: 'Yard Waste'},
{name: 'Hardcover books (no cover)', category: 'Recyclable'},
{name: 'Hearing Aids', category: 'Drop-off'},
{name: 'Herbicides', category: 'Hazardous Waste'},
{name: 'Humidifier', category: 'Large Item'},
{name: 'Ice cream tubs/lids', category: 'Recyclable'},
{name: 'Infected Plants', category: 'Yard Waste'},
{name: 'Instruments', category: 'Drop-off'},
{name: 'Jewellery', category: 'Drop-off'},
{name: 'Juice bottles/tubs/lids', category: 'Recyclable'},
{name: 'Kitchen Appliances', category: 'Drop-off'},
{name: 'Kitchen Utensils', category: 'Drop-off'},
{name: 'Latex Gloves', category: 'Garbage'},
{name: 'Leaves', category: 'Yard Waste'},
{name: 'Legal Drugs', category: 'Hazardous Waste'},
{name: 'Lightbulbs', category: 'Garbage'},
{name: 'Lights, Fluorescent tubes', category: 'Hazardous Waste'},
{name: 'Liquid Paint', category: 'Hazardous Waste'},
{name: 'Magazines', category: 'Drop-off'},
{name: 'Margarine tubs/lids', category: 'Recyclable'},
{name: 'Meat', category: 'Yard Waste'},
{name: 'Microwave popcorn bags', category: 'Yard Waste'},
{name: 'Microwaves', category: 'Large Item'},
{name: 'Milk bags', category: 'Recyclable'},
{name: 'Milk Cartons', category: 'Recyclable'},
{name: 'Motor Oil', category: 'Hazardous Waste'},
{name: 'Motor Oil bottles/tubs/lids', category: 'Recyclable'},
{name: 'Muffins', category: 'Yard Waste'},
{name: 'Musical items', category: 'Drop-off'},
{name: 'Needles & Syringes', category: 'Hazardous Waste'},
{name: 'Newspapers', category: 'Recyclable'},
{name: 'Nuts and nutshells', category: 'Yard Waste'},
{name: 'Oil - Cooking', category: 'Yard Waste'},
{name: 'Oil - Motor', category: 'Hazardous Waste'},
{name: 'Organics', category: 'Yard Waste'},
{name: 'Paint Cans - empty', category: 'Recyclable'},
{name: 'Paint Cans - not empty', category: 'Hazardous Waste'},
{name: 'Paper', category: 'Recyclable'},
{name: 'Paper take-out trays/cups/plates', category: 'Yard Waste'},
{name: 'Paper Towel - not soiled', category: 'Yard Waste'},
{name: 'Paperbacks', category: 'Recyclable'},
{name: 'Pasta', category: 'Yard Waste'},
{name: 'Peelings', category: 'Yard Waste'},
{name: 'Pesticides', category: 'Hazardous Waste'},
{name: 'Pet hair and feathers', category: 'Yard Waste'},
{name: 'Pet waste and kitty litter bag', category: 'Yard Waste'},
{name: 'Pharmaceuticals', category: 'Hazardous Waste'},
{name: 'Phone Books', category: 'Recyclable'},
{name: 'Pies', category: 'Yard Waste'},
{name: 'Pizza boxes', category: 'Recyclable'},
{name: 'Plants (trimmings/house plants)', category: 'Yard Waste'},
{name: 'Plastic Bags', category: 'Recyclable'},
{name: 'Plastic Bottles', category: 'Recyclable'},
{name: 'Plastic Cups/lids', category: 'Recyclable'},
{name: 'Plastic Liners', category: 'Garbage'},
{name: 'Plastic Wrap (used)', category: 'Garbage'},
{name: 'Polystyrene Foam Plastic', category: 'Recyclable'},
{name: 'Pop cases', category: 'Recyclable'},
{name: 'Popsicle sticks', category: 'Yard Waste'},
{name: 'Potato Chip Bags', category: 'Garbage'},
{name: 'Pots and Pans', category: 'Drop-off'},
{name: 'Printer Cartridges', category: 'Drop-off'},
{name: 'Propane Tanks', category: 'Hazardous Waste'},
{name: 'Purses', category: 'Drop-off'},
{name: 'Pyrex', category: 'Garbage'},
{name: 'Rice', category: 'Yard Waste'},
{name: 'Rubber Gloves', category: 'Garbage'},
{name: 'Sauces, gravy and dressings', category: 'Yard Waste'},
{name: 'Saw dust', category: 'Yard Waste'},
{name: 'Scrap Metal', category: 'Drop-off'},
{name: 'Shampoo bottles/lids', category: 'Recyclable'},
{name: 'Sharps', category: 'Hazardous Waste'},
{name: 'Shellfish', category: 'Yard Waste'},
{name: 'Shoes', category: 'Drop-off'},
{name: 'Shredded Paper', category: 'Recyclable'},
{name: 'Smoke Detectors', category: 'Garbage'},
{name: 'Soiled newsprint', category: 'Yard Waste'},
{name: 'Solvents', category: 'Hazardous Waste'},
{name: 'Sport equipment', category: 'Drop-off'},
{name: 'Spray Paint (empty)', category: 'Recyclable'},
{name: 'Spray Paint (not empty)', category: 'Hazardous Waste'},
{name: 'Stove', category: 'Large Item'},
{name: 'Straw', category: 'Yard Waste'},
{name: 'Styrofoam', category: 'Recyclable'},
{name: 'Tea Bags', category: 'Yard Waste'},
{name: 'Thinners', category: 'Hazardous Waste'},
{name: 'Tire Rims', category: 'Drop-off'},
{name: 'Tires', category: 'Drop-off'},
{name: 'Toothpaste Tubes', category: 'Garbage'},
{name: 'Toothpicks', category: 'Yard Waste'},
{name: 'Toys', category: 'Drop-off'},
{name: 'Twigs', category: 'Yard Waste'},
{name: 'Vacuum Bags', category: 'Garbage'},
{name: 'Vacuums', category: 'Drop-off'},
{name: 'Vegetable scraps', category: 'Yard Waste'},
{name: 'Wallpaper', category: 'Garbage'},
{name: 'Water bottles/tubs/lids', category: 'Recyclable'},
{name: 'Waxed Paper', category: 'Garbage'},
{name: 'Weeds', category: 'Yard Waste'},
{name: 'Windshield washer bottles', category: 'Recyclable'},
{name: 'Wood Chips', category: 'Yard Waste'},
{name: 'Wooden Stir Sticks', category: 'Yard Waste'},
{name: 'Yogurt Lids (foil)', category: 'Garbage'},
{name: 'Yogurt Lids (plastic)', category: 'Recyclable'},
]
| true | App.factory 'WasteItems', -> [
{name: 'Aerosol Cans (empty)', category: 'Recyclable'},
{name: 'Aerosol Cans (not empty)', category: 'Hazardous Waste'},
{name: 'Aluminum Cans & Foil', category: 'Recyclable'},
{name: 'Aluminum Foil (soiled)', category: 'Garbage'},
{name: 'Auto Parts', category: 'Drop-off'},
{name: 'Bakery Trays', category: 'Recyclable'},
{name: 'Batteries', category: 'Hazardous Waste'},
{name: 'Batteries - Hearing Aid', category: 'Drop-off'},
{name: 'Beverage Cans', category: 'Recyclable'},
{name: 'Beverage Cartons', category: 'Recyclable'},
{name: 'Bicycle', category: 'Drop-off'},
{name: 'Blankets', category: 'Drop-off'},
{name: 'Bleach tubs/lids', category: 'Recyclable'},
{name: 'Bones', category: 'Yard Waste'},
{name: 'Books', category: 'Drop-off'},
{name: 'Bottles', category: 'Recyclable'},
{name: 'Boxboard', category: 'Recyclable'},
{name: 'Bread', category: 'Yard Waste'},
{name: 'Bread bags', category: 'Recyclable'},
{name: 'Broken Glass', category: 'Garbage'},
{name: 'Broken Toys', category: 'Garbage'},
{name: 'Brown paper bags', category: 'Yard Waste'},
{name: 'Building Supplies', category: 'Drop-off'},
{name: 'Cake', category: 'Yard Waste'},
{name: 'Candies', category: 'Yard Waste'},
{name: 'Candy Wrappers', category: 'Garbage'},
{name: 'Carbon Paper', category: 'Garbage'},
{name: 'Cardboard', category: 'Recyclable'},
{name: 'Cardboard with metal bottoms', category: 'Recyclable'},
{name: 'Cards', category: 'Garbage'},
{name: 'Carpet', category: 'Large Item'},
{name: 'Catalogues', category: 'Recyclable'},
{name: 'CDs, DVDs, Videos, Video Games', category: 'Drop-off'},
{name: 'Ceramics', category: 'Garbage'},
{name: 'Cereal boxes', category: 'Recyclable'},
{name: 'CFL Lightbulbs', category: 'Drop-off'},
{name: 'Chicken', category: 'Yard Waste'},
{name: 'Children\'s Items', category: 'Drop-off'},
{name: 'Chip Bags', category: 'Garbage'},
{name: 'Cleaning Liquids (Corrosive)', category: 'Hazardous Waste'},
{name: 'Cloth items', category: 'Garbage'},
{name: 'Clothes Hangers', category: 'Drop-off'},
{name: 'Clothing (reusable)', category: 'Drop-off'},
{name: 'Coffee Cups (paper only)', category: 'Yard Waste'},
{name: 'Coffee filters', category: 'Yard Waste'},
{name: 'Coffee grounds', category: 'Yard Waste'},
{name: 'Comforters', category: 'Drop-off'},
{name: 'Commercial Waste', category: 'Drop-off'},
{name: 'Compost', category: 'Yard Waste'},
{name: 'Computer', category: 'Drop-off'},
{name: 'Construction Materials', category: 'Drop-off'},
{name: 'Construction Paper', category: 'Yard Waste'},
{name: 'Cookie Boxes', category: 'Recyclable'},
{name: 'Cool fireplace ashes', category: 'Yard Waste'},
{name: 'Cutlery', category: 'Garbage'},
{name: 'Dairy products', category: 'Yard Waste'},
{name: 'Dehumidifier', category: 'Large Item'},
{name: 'Detergent tubs/lids', category: 'Recyclable'},
{name: 'Diapers', category: 'Garbage'},
{name: 'Dishes (broken)', category: 'Garbage'},
{name: 'Dishes (Used)', category: 'Garbage'},
{name: 'DPI:NAME:<NAME>END_PIer', category: 'Large Item'},
{name: 'Dough', category: 'Yard Waste'},
{name: 'Draperies', category: 'Drop-off'},
{name: 'Drink Boxes', category: 'Recyclable'},
{name: 'Dry Cleaning bags', category: 'Recyclable'},
{name: 'Dryer lint', category: 'Yard Waste'},
{name: 'Egg Cartons - Paper', category: 'Yard Waste'},
{name: 'Egg Shells', category: 'Yard Waste'},
{name: 'Eggs', category: 'Yard Waste'},
{name: 'Electronics', category: 'Drop-off'},
{name: 'Envelopes', category: 'Recyclable'},
{name: 'Eyeglasses', category: 'Drop-off'},
{name: 'Fatty Foods', category: 'Yard Waste'},
{name: 'Feces (Pet)', category: 'Yard Waste'},
{name: 'Fireworks (used)', category: 'Garbage'},
{name: 'Fish Scraps', category: 'Yard Waste'},
{name: 'Floor sweepings', category: 'Garbage'},
{name: 'Flour and grains', category: 'Yard Waste'},
{name: 'Flowers (cut)', category: 'Yard Waste'},
{name: 'Fluorescent light tubes', category: 'Hazardous Waste'},
{name: 'Flyers', category: 'Recyclable'},
{name: 'Foam Packaging', category: 'Recyclable'},
{name: 'Foil Containers', category: 'Recyclable'},
{name: 'Fridge / Freezer', category: 'Large Item'},
{name: 'Frozen juice cans', category: 'Recyclable'},
{name: 'Fruit scraps', category: 'Yard Waste'},
{name: 'Furniture', category: 'Drop-off'},
{name: 'Gasoline', category: 'Hazardous Waste'},
{name: 'Gift Boxes', category: 'Recyclable'},
{name: 'Gift Wrapping Paper', category: 'Garbage'},
{name: 'Glass Jars', category: 'Recyclable'},
{name: 'Glasses (drinking)', category: 'Garbage'},
{name: 'Glasses (prescription)', category: 'Drop-off'},
{name: 'Grass clippings', category: 'None'},
{name: 'Grocery bags', category: 'Recyclable'},
{name: 'Hair Clippings', category: 'Yard Waste'},
{name: 'Hardcover books (no cover)', category: 'Recyclable'},
{name: 'Hearing Aids', category: 'Drop-off'},
{name: 'Herbicides', category: 'Hazardous Waste'},
{name: 'Humidifier', category: 'Large Item'},
{name: 'Ice cream tubs/lids', category: 'Recyclable'},
{name: 'Infected Plants', category: 'Yard Waste'},
{name: 'Instruments', category: 'Drop-off'},
{name: 'Jewellery', category: 'Drop-off'},
{name: 'Juice bottles/tubs/lids', category: 'Recyclable'},
{name: 'Kitchen Appliances', category: 'Drop-off'},
{name: 'Kitchen Utensils', category: 'Drop-off'},
{name: 'Latex Gloves', category: 'Garbage'},
{name: 'Leaves', category: 'Yard Waste'},
{name: 'Legal Drugs', category: 'Hazardous Waste'},
{name: 'Lightbulbs', category: 'Garbage'},
{name: 'Lights, Fluorescent tubes', category: 'Hazardous Waste'},
{name: 'Liquid Paint', category: 'Hazardous Waste'},
{name: 'Magazines', category: 'Drop-off'},
{name: 'Margarine tubs/lids', category: 'Recyclable'},
{name: 'Meat', category: 'Yard Waste'},
{name: 'Microwave popcorn bags', category: 'Yard Waste'},
{name: 'Microwaves', category: 'Large Item'},
{name: 'Milk bags', category: 'Recyclable'},
{name: 'Milk Cartons', category: 'Recyclable'},
{name: 'Motor Oil', category: 'Hazardous Waste'},
{name: 'Motor Oil bottles/tubs/lids', category: 'Recyclable'},
{name: 'Muffins', category: 'Yard Waste'},
{name: 'Musical items', category: 'Drop-off'},
{name: 'Needles & Syringes', category: 'Hazardous Waste'},
{name: 'Newspapers', category: 'Recyclable'},
{name: 'Nuts and nutshells', category: 'Yard Waste'},
{name: 'Oil - Cooking', category: 'Yard Waste'},
{name: 'Oil - Motor', category: 'Hazardous Waste'},
{name: 'Organics', category: 'Yard Waste'},
{name: 'Paint Cans - empty', category: 'Recyclable'},
{name: 'Paint Cans - not empty', category: 'Hazardous Waste'},
{name: 'Paper', category: 'Recyclable'},
{name: 'Paper take-out trays/cups/plates', category: 'Yard Waste'},
{name: 'Paper Towel - not soiled', category: 'Yard Waste'},
{name: 'Paperbacks', category: 'Recyclable'},
{name: 'Pasta', category: 'Yard Waste'},
{name: 'Peelings', category: 'Yard Waste'},
{name: 'Pesticides', category: 'Hazardous Waste'},
{name: 'Pet hair and feathers', category: 'Yard Waste'},
{name: 'Pet waste and kitty litter bag', category: 'Yard Waste'},
{name: 'Pharmaceuticals', category: 'Hazardous Waste'},
{name: 'Phone Books', category: 'Recyclable'},
{name: 'Pies', category: 'Yard Waste'},
{name: 'Pizza boxes', category: 'Recyclable'},
{name: 'Plants (trimmings/house plants)', category: 'Yard Waste'},
{name: 'Plastic Bags', category: 'Recyclable'},
{name: 'Plastic Bottles', category: 'Recyclable'},
{name: 'Plastic Cups/lids', category: 'Recyclable'},
{name: 'Plastic Liners', category: 'Garbage'},
{name: 'Plastic Wrap (used)', category: 'Garbage'},
{name: 'Polystyrene Foam Plastic', category: 'Recyclable'},
{name: 'Pop cases', category: 'Recyclable'},
{name: 'Popsicle sticks', category: 'Yard Waste'},
{name: 'Potato Chip Bags', category: 'Garbage'},
{name: 'Pots and Pans', category: 'Drop-off'},
{name: 'Printer Cartridges', category: 'Drop-off'},
{name: 'Propane Tanks', category: 'Hazardous Waste'},
{name: 'Purses', category: 'Drop-off'},
{name: 'Pyrex', category: 'Garbage'},
{name: 'Rice', category: 'Yard Waste'},
{name: 'Rubber Gloves', category: 'Garbage'},
{name: 'Sauces, gravy and dressings', category: 'Yard Waste'},
{name: 'Saw dust', category: 'Yard Waste'},
{name: 'Scrap Metal', category: 'Drop-off'},
{name: 'Shampoo bottles/lids', category: 'Recyclable'},
{name: 'Sharps', category: 'Hazardous Waste'},
{name: 'Shellfish', category: 'Yard Waste'},
{name: 'Shoes', category: 'Drop-off'},
{name: 'Shredded Paper', category: 'Recyclable'},
{name: 'Smoke Detectors', category: 'Garbage'},
{name: 'Soiled newsprint', category: 'Yard Waste'},
{name: 'Solvents', category: 'Hazardous Waste'},
{name: 'Sport equipment', category: 'Drop-off'},
{name: 'Spray Paint (empty)', category: 'Recyclable'},
{name: 'Spray Paint (not empty)', category: 'Hazardous Waste'},
{name: 'Stove', category: 'Large Item'},
{name: 'Straw', category: 'Yard Waste'},
{name: 'Styrofoam', category: 'Recyclable'},
{name: 'Tea Bags', category: 'Yard Waste'},
{name: 'Thinners', category: 'Hazardous Waste'},
{name: 'Tire Rims', category: 'Drop-off'},
{name: 'Tires', category: 'Drop-off'},
{name: 'Toothpaste Tubes', category: 'Garbage'},
{name: 'Toothpicks', category: 'Yard Waste'},
{name: 'Toys', category: 'Drop-off'},
{name: 'Twigs', category: 'Yard Waste'},
{name: 'Vacuum Bags', category: 'Garbage'},
{name: 'Vacuums', category: 'Drop-off'},
{name: 'Vegetable scraps', category: 'Yard Waste'},
{name: 'Wallpaper', category: 'Garbage'},
{name: 'Water bottles/tubs/lids', category: 'Recyclable'},
{name: 'Waxed Paper', category: 'Garbage'},
{name: 'Weeds', category: 'Yard Waste'},
{name: 'Windshield washer bottles', category: 'Recyclable'},
{name: 'Wood Chips', category: 'Yard Waste'},
{name: 'Wooden Stir Sticks', category: 'Yard Waste'},
{name: 'Yogurt Lids (foil)', category: 'Garbage'},
{name: 'Yogurt Lids (plastic)', category: 'Recyclable'},
]
|
[
{
"context": " key_arr.push arr[pos]\n key = key_arr.join('.')\n token_value_map[key] ?= []\n ",
"end": 3892,
"score": 0.8159177899360657,
"start": 3889,
"tag": "KEY",
"value": "arr"
},
{
"context": " key_arr.push arr[pos]\n key = key_arr.join('... | src/gram_cache.coffee | hu2prod/gram | 0 | fs = require 'fs'
module = @
class @Super_serializer
ref_obj_root : [] # temp for build
obj_root : []
constructor:()->
@ref_obj_root = []
@obj_root = []
class_serialize : (t)->
if t instanceof RegExp
ret =
_class : 'RegExp'
toString: t.toString()
else if t instanceof Array
ret = {} # Внезапно
ret._class = 'Array'
for v,k in t
throw new Error("_class exists") if k == '_class'
throw new Error("can't serialize function in array (no way for ressurect correctly)") if typeof v == 'function'
ret[k] = v
else
ret = {}
_class = t.constructor.name
ret._class = _class if _class != 'Object' and _class != false # Array
for k,v of t
throw new Error("_class exists") if k == '_class'
continue if typeof v == 'function'
ret[k] = v
ret
class_deserialize : (t)->
if t._class?
if t._class == 'RegExp'
[skip, body, tail] = /^\/(.*)\/([a-z]*)$/.exec t.toString
return new RegExp body, tail
ret = eval "new #{t._class}"
else
ret = {}
for k,v of t
continue if k == '_class'
ret[k] = v
ret
serialize : (t)->
@ref_obj_root = []
@obj_root = []
if typeof t != 'object' or t == null
@obj_root.push t
else
@_serialize t
@obj_root
ref : (t)->
for v,k in @ref_obj_root
return "Sref_#{k}" if v == t
return null
unref : (id)->@obj_root[id]
_serialize : (t)->
return t if typeof t != 'object' or t == null
return r if r = @ref t
ret = @ref_obj_root.length
@ref_obj_root.push t
cs_t = @class_serialize t
for k,v of cs_t # по-любому of
cs_t[k] = @_serialize v
@obj_root[ret] = cs_t
"Sref_#{ret}"
deserialize : (obj)->
@obj_root = obj
@ref_obj_root = new Array @obj_root.length # some mem optimize
for v,k in @obj_root
if typeof v == 'object' and v != null
@ref_obj_root[k] = @class_deserialize v
else
@ref_obj_root[k] = v
for v,k in @obj_root
for k2,v2 of v
if ret = /^Sref_(\d+)$/.exec v2
@ref_obj_root[k][k2] = @ref_obj_root[ret[1]] # parseInt не обязателен
@ref_obj_root[0]
Super_serializer = new @Super_serializer
class @Gram_cache
file : ''
gram_crc : ''
gram_hash : {}
constructor:()->
token_serialize : (t)->
res = {
value_array : []
rule : null
a : t.a
b : t.b
}
for v in t.value_array
if v.serialize_uid?
res.value_array.push v.serialize_uid
else if v.serialize_ab_uid?
res.value_array.push v.serialize_ab_uid
else
puts v
throw new Error("missing token.serialize_uid and token.serialize_ab_uid")
if !t.rule.serialize_uid?
puts t.rule
throw new Error("missing t.rule.serialize_uid")
res.rule = {
len: t.rule.serialize_length
uid: t.rule.serialize_uid
}
res
get_cache_serialize : ()->
(gram, list)=>
@gram_hash = {
token_list : []
merge_history : []
}
gram.rule_enumerate()
gram.token_enumerate()
for v in gram.merge_history
@gram_hash.merge_history.push @token_serialize v
for v in gram.token_list
@gram_hash.token_list.push v[0].value
return
get_cache_deserialize : ()->
(gram, list)=>
return if !@gram_hash.token_list?
# build 3-token value map
cache_token_value_length = 3
token_value_map = {} # {[]}
arr = @gram_hash.token_list
(()-># защитим отечество, защитим scope
for i in [0 .. arr.length-cache_token_value_length]
key_arr = []
for pos in [i ... i+cache_token_value_length] by 1
puts pos if !arr[pos]?
key_arr.push arr[pos]
key = key_arr.join('.')
token_value_map[key] ?= []
token_value_map[key].push i
)()
# find first in list
find_first_position = 0
find_first_offset = 0
find_first = ()=>
arr = list
find_first_offset = 0
# puts "find_first_position=#{find_first_position}"
# puts "from=#{find_first_position}"
# puts "to =#{arr.length-cache_token_value_length}"
for i in [find_first_position .. arr.length-cache_token_value_length] by 1 # COPYPASTE
# puts "i=#{i}"
key_arr = []
for pos in [i ... i+cache_token_value_length] by 1
key_arr.push arr[pos][0].value
key = key_arr.join('.')
# puts "search key=#{key}"
# puts "search tvm=#{token_value_map[key]}"
break if cache_position_array = token_value_map[key]
find_first_offset++
# puts "find_first_offset =#{find_first_offset}"
# puts "cache_position_array =#{JSON.stringify cache_position_array}"
# puts "key =#{key}"
# puts "token_value_map[key] =#{token_value_map[key]}"
return cache_position_array
# make group longer than 3
longer = (list_position, cache_position_array)=>
# puts "longer(#{list_position}, #{cache_position_array})"
res = [-1,-1]
for orig_cache_position in cache_position_array
cache_position = orig_cache_position
count = 0
FAtoken_list = @gram_hash.token_list
loop
break if !list[list_position]? # end of token list
p_list = list[list_position++]
c_list = FAtoken_list[cache_position++]
# puts "'#{p_list[0].value}' != '#{c_list[0]}'"
# break if p_list[0].value != c_list[0]
if p_list[0].value != c_list[0]
# puts "break p_list[0].value != c_list[0]"
break
count++
res = [count, orig_cache_position] if res[0] < count
# puts "longer count =#{res[0]}"
# puts "longer orig_cache_position=#{res[1]}" # TRUE
res
get_list_hint = (count)->
(list[i][0].value for i in [last_position ... last_position+count]).join(",")
last_position = 0
fill_list = (count)->
# puts "fill_list #{last_position}..#{last_position+count} #{get_list_hint(count)}"
# nothing to do (see gram rule)
last_position += count
fill_cache = (count, cache_position)=>
# puts "fill_cache #{last_position}..#{last_position+count} #{get_list_hint(count)}"
# adjust a,b positions and insert @ list_position
offset = last_position-cache_position
c2r = (t)-> # cache2real
t+offset
# r2c = (t)-> # real2cache
# t-offset
re_merge_history = {}
for merge_token,merge_token_position in @gram_hash.merge_history
# select only in range
# TODO check edge cases
continue if !(merge_token.a >= cache_position and merge_token.b <= cache_position+count)
value_array = []
for v in merge_token.value_array
if typeof v == 'number'
v = re_merge_history[v]
if !v?
throw new Error("cache fail: bad merge_history position")
else if reg_ret = /^ab(\d+)_(\d+)\[(\d+)\]$/.exec v # text "ab#{ab_k}[#{k}]"
[skip, a, b, k] = reg_ret
a = c2r parseInt a
b = c2r parseInt b
if !gram.token_a_b_list["#{a}_#{b}"]?
throw new Error("cache fail: bad gram.token_a_b_list[#{a}_#{b}] position")
v = gram.token_a_b_list["#{a}_#{b}"][k]
if !v?
puts gram.token_a_b_list["#{a}_#{b}"]
throw new Error("cache fail: bad gram.token_a_b_list[#{a}_#{b}][#{k}] position")
value_array.push v
throw new Error("cache fail: check_and_mix arg count != 2 != 1") if value_array.length != 2 and value_array.length != 1
rule = gram.rule_by_length_list[merge_token.rule.len][merge_token.rule.uid]
# COPYPASTE check_and_mix
ret = rule.check_and_mix value_array #, {no_strict_check:true}
if ret?
if re_merge_history[merge_token_position]?
throw new Error("cache fail: re_merge_history rewrite")
re_merge_history[merge_token_position] = ret
ret.a = c2r merge_token.a
ret.b = c2r merge_token.b
# puts "#{ret.a}_#{ret.b}", ret.mx_hash.hash_key, ret.value
gram.merge_history.push ret
gram.merge_register ret, {no_new:true}
gram.stat_merge_pair++
gram.stat_merge_pair_mix++
gram.stat_merge_pair_mix_hit++
else
puts value_array
throw new Error("cache fail: check_and_mix returns null")
# clear new in cache positions
for a in [last_position ... last_position+count]
gram.token_a_list_new[a] = []
gram.token_b_list_new[a+1] = []
for b in [a+1 .. last_position+count] by 1
gram.token_a_b_list_new["#{a}_#{b}"] = []
last_position += count
while cache_position_array = find_first()
# puts "find_first_offset=#{find_first_offset}"
fill_list find_first_offset if find_first_offset > 0
find_first_position += find_first_offset
# puts "longer_find_first_position=#{find_first_position}"
[ins_count, cache_position] = longer(find_first_position, cache_position_array)
if ins_count
fill_cache ins_count, cache_position
else
fill_list 1
# puts "last_position=#{last_position}"
find_first_position = last_position
# puts "find_first_position=#{find_first_position}"
fill_list list.length - find_first_position if list.length - find_first_position > 0
# puts gram.token_a_b_list_new
# gram.stat()
return
# ###################################################################################################
# real interface
# ###################################################################################################
toFile : ()->
@gram_pack()
blob = {}
for v in @export_list
blob[v] = @[v]
fs.writeFileSync @file, JSON.stringify blob
fromFile : ()->
blob = JSON.parse (fs.readFileSync @file).toString()
for v in @export_list
@[v] = blob[v]
return
bind_opt : (opt={})->
opt.cache_serialize = @get_cache_serialize()
opt.cache_deserialize = @get_cache_deserialize()
opt
| 187875 | fs = require 'fs'
module = @
class @Super_serializer
ref_obj_root : [] # temp for build
obj_root : []
constructor:()->
@ref_obj_root = []
@obj_root = []
class_serialize : (t)->
if t instanceof RegExp
ret =
_class : 'RegExp'
toString: t.toString()
else if t instanceof Array
ret = {} # Внезапно
ret._class = 'Array'
for v,k in t
throw new Error("_class exists") if k == '_class'
throw new Error("can't serialize function in array (no way for ressurect correctly)") if typeof v == 'function'
ret[k] = v
else
ret = {}
_class = t.constructor.name
ret._class = _class if _class != 'Object' and _class != false # Array
for k,v of t
throw new Error("_class exists") if k == '_class'
continue if typeof v == 'function'
ret[k] = v
ret
class_deserialize : (t)->
if t._class?
if t._class == 'RegExp'
[skip, body, tail] = /^\/(.*)\/([a-z]*)$/.exec t.toString
return new RegExp body, tail
ret = eval "new #{t._class}"
else
ret = {}
for k,v of t
continue if k == '_class'
ret[k] = v
ret
serialize : (t)->
@ref_obj_root = []
@obj_root = []
if typeof t != 'object' or t == null
@obj_root.push t
else
@_serialize t
@obj_root
ref : (t)->
for v,k in @ref_obj_root
return "Sref_#{k}" if v == t
return null
unref : (id)->@obj_root[id]
_serialize : (t)->
return t if typeof t != 'object' or t == null
return r if r = @ref t
ret = @ref_obj_root.length
@ref_obj_root.push t
cs_t = @class_serialize t
for k,v of cs_t # по-любому of
cs_t[k] = @_serialize v
@obj_root[ret] = cs_t
"Sref_#{ret}"
deserialize : (obj)->
@obj_root = obj
@ref_obj_root = new Array @obj_root.length # some mem optimize
for v,k in @obj_root
if typeof v == 'object' and v != null
@ref_obj_root[k] = @class_deserialize v
else
@ref_obj_root[k] = v
for v,k in @obj_root
for k2,v2 of v
if ret = /^Sref_(\d+)$/.exec v2
@ref_obj_root[k][k2] = @ref_obj_root[ret[1]] # parseInt не обязателен
@ref_obj_root[0]
Super_serializer = new @Super_serializer
class @Gram_cache
file : ''
gram_crc : ''
gram_hash : {}
constructor:()->
token_serialize : (t)->
res = {
value_array : []
rule : null
a : t.a
b : t.b
}
for v in t.value_array
if v.serialize_uid?
res.value_array.push v.serialize_uid
else if v.serialize_ab_uid?
res.value_array.push v.serialize_ab_uid
else
puts v
throw new Error("missing token.serialize_uid and token.serialize_ab_uid")
if !t.rule.serialize_uid?
puts t.rule
throw new Error("missing t.rule.serialize_uid")
res.rule = {
len: t.rule.serialize_length
uid: t.rule.serialize_uid
}
res
get_cache_serialize : ()->
(gram, list)=>
@gram_hash = {
token_list : []
merge_history : []
}
gram.rule_enumerate()
gram.token_enumerate()
for v in gram.merge_history
@gram_hash.merge_history.push @token_serialize v
for v in gram.token_list
@gram_hash.token_list.push v[0].value
return
get_cache_deserialize : ()->
(gram, list)=>
return if !@gram_hash.token_list?
# build 3-token value map
cache_token_value_length = 3
token_value_map = {} # {[]}
arr = @gram_hash.token_list
(()-># защитим отечество, защитим scope
for i in [0 .. arr.length-cache_token_value_length]
key_arr = []
for pos in [i ... i+cache_token_value_length] by 1
puts pos if !arr[pos]?
key_arr.push arr[pos]
key = key_<KEY>.<KEY>
token_value_map[key] ?= []
token_value_map[key].push i
)()
# find first in list
find_first_position = 0
find_first_offset = 0
find_first = ()=>
arr = list
find_first_offset = 0
# puts "find_first_position=#{find_first_position}"
# puts "from=#{find_first_position}"
# puts "to =#{arr.length-cache_token_value_length}"
for i in [find_first_position .. arr.length-cache_token_value_length] by 1 # COPYPASTE
# puts "i=#{i}"
key_arr = []
for pos in [i ... i+cache_token_value_length] by 1
key_arr.push arr[pos][0].value
key = key_<KEY>.<KEY>
# puts "search key=#{key}"
# puts "search tvm=#{token_value_map[key]}"
break if cache_position_array = token_value_map[key]
find_first_offset++
# puts "find_first_offset =#{find_first_offset}"
# puts "cache_position_array =#{JSON.stringify cache_position_array}"
# puts "key =#{key}"
# puts "token_value_map[key] =#{token_value_map[key]}"
return cache_position_array
# make group longer than 3
longer = (list_position, cache_position_array)=>
# puts "longer(#{list_position}, #{cache_position_array})"
res = [-1,-1]
for orig_cache_position in cache_position_array
cache_position = orig_cache_position
count = 0
FAtoken_list = @gram_hash.token_list
loop
break if !list[list_position]? # end of token list
p_list = list[list_position++]
c_list = FAtoken_list[cache_position++]
# puts "'#{p_list[0].value}' != '#{c_list[0]}'"
# break if p_list[0].value != c_list[0]
if p_list[0].value != c_list[0]
# puts "break p_list[0].value != c_list[0]"
break
count++
res = [count, orig_cache_position] if res[0] < count
# puts "longer count =#{res[0]}"
# puts "longer orig_cache_position=#{res[1]}" # TRUE
res
get_list_hint = (count)->
(list[i][0].value for i in [last_position ... last_position+count]).join(",")
last_position = 0
fill_list = (count)->
# puts "fill_list #{last_position}..#{last_position+count} #{get_list_hint(count)}"
# nothing to do (see gram rule)
last_position += count
fill_cache = (count, cache_position)=>
# puts "fill_cache #{last_position}..#{last_position+count} #{get_list_hint(count)}"
# adjust a,b positions and insert @ list_position
offset = last_position-cache_position
c2r = (t)-> # cache2real
t+offset
# r2c = (t)-> # real2cache
# t-offset
re_merge_history = {}
for merge_token,merge_token_position in @gram_hash.merge_history
# select only in range
# TODO check edge cases
continue if !(merge_token.a >= cache_position and merge_token.b <= cache_position+count)
value_array = []
for v in merge_token.value_array
if typeof v == 'number'
v = re_merge_history[v]
if !v?
throw new Error("cache fail: bad merge_history position")
else if reg_ret = /^ab(\d+)_(\d+)\[(\d+)\]$/.exec v # text "ab#{ab_k}[#{k}]"
[skip, a, b, k] = reg_ret
a = c2r parseInt a
b = c2r parseInt b
if !gram.token_a_b_list["#{a}_#{b}"]?
throw new Error("cache fail: bad gram.token_a_b_list[#{a}_#{b}] position")
v = gram.token_a_b_list["#{a}_#{b}"][k]
if !v?
puts gram.token_a_b_list["#{a}_#{b}"]
throw new Error("cache fail: bad gram.token_a_b_list[#{a}_#{b}][#{k}] position")
value_array.push v
throw new Error("cache fail: check_and_mix arg count != 2 != 1") if value_array.length != 2 and value_array.length != 1
rule = gram.rule_by_length_list[merge_token.rule.len][merge_token.rule.uid]
# COPYPASTE check_and_mix
ret = rule.check_and_mix value_array #, {no_strict_check:true}
if ret?
if re_merge_history[merge_token_position]?
throw new Error("cache fail: re_merge_history rewrite")
re_merge_history[merge_token_position] = ret
ret.a = c2r merge_token.a
ret.b = c2r merge_token.b
# puts "#{ret.a}_#{ret.b}", ret.mx_hash.hash_key, ret.value
gram.merge_history.push ret
gram.merge_register ret, {no_new:true}
gram.stat_merge_pair++
gram.stat_merge_pair_mix++
gram.stat_merge_pair_mix_hit++
else
puts value_array
throw new Error("cache fail: check_and_mix returns null")
# clear new in cache positions
for a in [last_position ... last_position+count]
gram.token_a_list_new[a] = []
gram.token_b_list_new[a+1] = []
for b in [a+1 .. last_position+count] by 1
gram.token_a_b_list_new["#{a}_#{b}"] = []
last_position += count
while cache_position_array = find_first()
# puts "find_first_offset=#{find_first_offset}"
fill_list find_first_offset if find_first_offset > 0
find_first_position += find_first_offset
# puts "longer_find_first_position=#{find_first_position}"
[ins_count, cache_position] = longer(find_first_position, cache_position_array)
if ins_count
fill_cache ins_count, cache_position
else
fill_list 1
# puts "last_position=#{last_position}"
find_first_position = last_position
# puts "find_first_position=#{find_first_position}"
fill_list list.length - find_first_position if list.length - find_first_position > 0
# puts gram.token_a_b_list_new
# gram.stat()
return
# ###################################################################################################
# real interface
# ###################################################################################################
toFile : ()->
@gram_pack()
blob = {}
for v in @export_list
blob[v] = @[v]
fs.writeFileSync @file, JSON.stringify blob
fromFile : ()->
blob = JSON.parse (fs.readFileSync @file).toString()
for v in @export_list
@[v] = blob[v]
return
bind_opt : (opt={})->
opt.cache_serialize = @get_cache_serialize()
opt.cache_deserialize = @get_cache_deserialize()
opt
| true | fs = require 'fs'
module = @
class @Super_serializer
ref_obj_root : [] # temp for build
obj_root : []
constructor:()->
@ref_obj_root = []
@obj_root = []
class_serialize : (t)->
if t instanceof RegExp
ret =
_class : 'RegExp'
toString: t.toString()
else if t instanceof Array
ret = {} # Внезапно
ret._class = 'Array'
for v,k in t
throw new Error("_class exists") if k == '_class'
throw new Error("can't serialize function in array (no way for ressurect correctly)") if typeof v == 'function'
ret[k] = v
else
ret = {}
_class = t.constructor.name
ret._class = _class if _class != 'Object' and _class != false # Array
for k,v of t
throw new Error("_class exists") if k == '_class'
continue if typeof v == 'function'
ret[k] = v
ret
class_deserialize : (t)->
if t._class?
if t._class == 'RegExp'
[skip, body, tail] = /^\/(.*)\/([a-z]*)$/.exec t.toString
return new RegExp body, tail
ret = eval "new #{t._class}"
else
ret = {}
for k,v of t
continue if k == '_class'
ret[k] = v
ret
serialize : (t)->
@ref_obj_root = []
@obj_root = []
if typeof t != 'object' or t == null
@obj_root.push t
else
@_serialize t
@obj_root
ref : (t)->
for v,k in @ref_obj_root
return "Sref_#{k}" if v == t
return null
unref : (id)->@obj_root[id]
_serialize : (t)->
return t if typeof t != 'object' or t == null
return r if r = @ref t
ret = @ref_obj_root.length
@ref_obj_root.push t
cs_t = @class_serialize t
for k,v of cs_t # по-любому of
cs_t[k] = @_serialize v
@obj_root[ret] = cs_t
"Sref_#{ret}"
deserialize : (obj)->
@obj_root = obj
@ref_obj_root = new Array @obj_root.length # some mem optimize
for v,k in @obj_root
if typeof v == 'object' and v != null
@ref_obj_root[k] = @class_deserialize v
else
@ref_obj_root[k] = v
for v,k in @obj_root
for k2,v2 of v
if ret = /^Sref_(\d+)$/.exec v2
@ref_obj_root[k][k2] = @ref_obj_root[ret[1]] # parseInt не обязателен
@ref_obj_root[0]
Super_serializer = new @Super_serializer
class @Gram_cache
file : ''
gram_crc : ''
gram_hash : {}
constructor:()->
token_serialize : (t)->
res = {
value_array : []
rule : null
a : t.a
b : t.b
}
for v in t.value_array
if v.serialize_uid?
res.value_array.push v.serialize_uid
else if v.serialize_ab_uid?
res.value_array.push v.serialize_ab_uid
else
puts v
throw new Error("missing token.serialize_uid and token.serialize_ab_uid")
if !t.rule.serialize_uid?
puts t.rule
throw new Error("missing t.rule.serialize_uid")
res.rule = {
len: t.rule.serialize_length
uid: t.rule.serialize_uid
}
res
get_cache_serialize : ()->
(gram, list)=>
@gram_hash = {
token_list : []
merge_history : []
}
gram.rule_enumerate()
gram.token_enumerate()
for v in gram.merge_history
@gram_hash.merge_history.push @token_serialize v
for v in gram.token_list
@gram_hash.token_list.push v[0].value
return
get_cache_deserialize : ()->
(gram, list)=>
return if !@gram_hash.token_list?
# build 3-token value map
cache_token_value_length = 3
token_value_map = {} # {[]}
arr = @gram_hash.token_list
(()-># защитим отечество, защитим scope
for i in [0 .. arr.length-cache_token_value_length]
key_arr = []
for pos in [i ... i+cache_token_value_length] by 1
puts pos if !arr[pos]?
key_arr.push arr[pos]
key = key_PI:KEY:<KEY>END_PI.PI:KEY:<KEY>END_PI
token_value_map[key] ?= []
token_value_map[key].push i
)()
# find first in list
find_first_position = 0
find_first_offset = 0
find_first = ()=>
arr = list
find_first_offset = 0
# puts "find_first_position=#{find_first_position}"
# puts "from=#{find_first_position}"
# puts "to =#{arr.length-cache_token_value_length}"
for i in [find_first_position .. arr.length-cache_token_value_length] by 1 # COPYPASTE
# puts "i=#{i}"
key_arr = []
for pos in [i ... i+cache_token_value_length] by 1
key_arr.push arr[pos][0].value
key = key_PI:KEY:<KEY>END_PI.PI:KEY:<KEY>END_PI
# puts "search key=#{key}"
# puts "search tvm=#{token_value_map[key]}"
break if cache_position_array = token_value_map[key]
find_first_offset++
# puts "find_first_offset =#{find_first_offset}"
# puts "cache_position_array =#{JSON.stringify cache_position_array}"
# puts "key =#{key}"
# puts "token_value_map[key] =#{token_value_map[key]}"
return cache_position_array
# make group longer than 3
longer = (list_position, cache_position_array)=>
# puts "longer(#{list_position}, #{cache_position_array})"
res = [-1,-1]
for orig_cache_position in cache_position_array
cache_position = orig_cache_position
count = 0
FAtoken_list = @gram_hash.token_list
loop
break if !list[list_position]? # end of token list
p_list = list[list_position++]
c_list = FAtoken_list[cache_position++]
# puts "'#{p_list[0].value}' != '#{c_list[0]}'"
# break if p_list[0].value != c_list[0]
if p_list[0].value != c_list[0]
# puts "break p_list[0].value != c_list[0]"
break
count++
res = [count, orig_cache_position] if res[0] < count
# puts "longer count =#{res[0]}"
# puts "longer orig_cache_position=#{res[1]}" # TRUE
res
get_list_hint = (count)->
(list[i][0].value for i in [last_position ... last_position+count]).join(",")
last_position = 0
fill_list = (count)->
# puts "fill_list #{last_position}..#{last_position+count} #{get_list_hint(count)}"
# nothing to do (see gram rule)
last_position += count
fill_cache = (count, cache_position)=>
# puts "fill_cache #{last_position}..#{last_position+count} #{get_list_hint(count)}"
# adjust a,b positions and insert @ list_position
offset = last_position-cache_position
c2r = (t)-> # cache2real
t+offset
# r2c = (t)-> # real2cache
# t-offset
re_merge_history = {}
for merge_token,merge_token_position in @gram_hash.merge_history
# select only in range
# TODO check edge cases
continue if !(merge_token.a >= cache_position and merge_token.b <= cache_position+count)
value_array = []
for v in merge_token.value_array
if typeof v == 'number'
v = re_merge_history[v]
if !v?
throw new Error("cache fail: bad merge_history position")
else if reg_ret = /^ab(\d+)_(\d+)\[(\d+)\]$/.exec v # text "ab#{ab_k}[#{k}]"
[skip, a, b, k] = reg_ret
a = c2r parseInt a
b = c2r parseInt b
if !gram.token_a_b_list["#{a}_#{b}"]?
throw new Error("cache fail: bad gram.token_a_b_list[#{a}_#{b}] position")
v = gram.token_a_b_list["#{a}_#{b}"][k]
if !v?
puts gram.token_a_b_list["#{a}_#{b}"]
throw new Error("cache fail: bad gram.token_a_b_list[#{a}_#{b}][#{k}] position")
value_array.push v
throw new Error("cache fail: check_and_mix arg count != 2 != 1") if value_array.length != 2 and value_array.length != 1
rule = gram.rule_by_length_list[merge_token.rule.len][merge_token.rule.uid]
# COPYPASTE check_and_mix
ret = rule.check_and_mix value_array #, {no_strict_check:true}
if ret?
if re_merge_history[merge_token_position]?
throw new Error("cache fail: re_merge_history rewrite")
re_merge_history[merge_token_position] = ret
ret.a = c2r merge_token.a
ret.b = c2r merge_token.b
# puts "#{ret.a}_#{ret.b}", ret.mx_hash.hash_key, ret.value
gram.merge_history.push ret
gram.merge_register ret, {no_new:true}
gram.stat_merge_pair++
gram.stat_merge_pair_mix++
gram.stat_merge_pair_mix_hit++
else
puts value_array
throw new Error("cache fail: check_and_mix returns null")
# clear new in cache positions
for a in [last_position ... last_position+count]
gram.token_a_list_new[a] = []
gram.token_b_list_new[a+1] = []
for b in [a+1 .. last_position+count] by 1
gram.token_a_b_list_new["#{a}_#{b}"] = []
last_position += count
while cache_position_array = find_first()
# puts "find_first_offset=#{find_first_offset}"
fill_list find_first_offset if find_first_offset > 0
find_first_position += find_first_offset
# puts "longer_find_first_position=#{find_first_position}"
[ins_count, cache_position] = longer(find_first_position, cache_position_array)
if ins_count
fill_cache ins_count, cache_position
else
fill_list 1
# puts "last_position=#{last_position}"
find_first_position = last_position
# puts "find_first_position=#{find_first_position}"
fill_list list.length - find_first_position if list.length - find_first_position > 0
# puts gram.token_a_b_list_new
# gram.stat()
return
# ###################################################################################################
# real interface
# ###################################################################################################
toFile : ()->
@gram_pack()
blob = {}
for v in @export_list
blob[v] = @[v]
fs.writeFileSync @file, JSON.stringify blob
fromFile : ()->
blob = JSON.parse (fs.readFileSync @file).toString()
for v in @export_list
@[v] = blob[v]
return
bind_opt : (opt={})->
opt.cache_serialize = @get_cache_serialize()
opt.cache_deserialize = @get_cache_deserialize()
opt
|
[
{
"context": "d feels like. Design is how it works.'\r\n '-Steve Jobs'\r\n 'I look forward to helping you with your ",
"end": 160,
"score": 0.998181164264679,
"start": 150,
"tag": "NAME",
"value": "Steve Jobs"
}
] | app/assets/javascripts/typed-implementation.coffee | shownola/shownola-portfolio | 0 | ready = ->
Typed.new '.element',
strings: [
'Design is not just what it looks like and feels like. Design is how it works.'
'-Steve Jobs'
'I look forward to helping you with your projects!'
]
typeSpeed: 0
return
$(document).ready ready
$(document).on 'turbolinks:load', ready
| 165067 | ready = ->
Typed.new '.element',
strings: [
'Design is not just what it looks like and feels like. Design is how it works.'
'-<NAME>'
'I look forward to helping you with your projects!'
]
typeSpeed: 0
return
$(document).ready ready
$(document).on 'turbolinks:load', ready
| true | ready = ->
Typed.new '.element',
strings: [
'Design is not just what it looks like and feels like. Design is how it works.'
'-PI:NAME:<NAME>END_PI'
'I look forward to helping you with your projects!'
]
typeSpeed: 0
return
$(document).ready ready
$(document).on 'turbolinks:load', ready
|
[
{
"context": " server:\n port: 8080\n passwords:\n user : 'pass'\n zones:\n \"askldhasjkldh==\" :\n hostname ",
"end": 71,
"score": 0.9957558512687683,
"start": 67,
"tag": "PASSWORD",
"value": "pass"
},
{
"context": ": '60'\n type : 'A'\n admins : [... | src/config.sample.coffee | eerrecart/http-to-nsupdate | 1 | module.exports =
server:
port: 8080
passwords:
user : 'pass'
zones:
"askldhasjkldh==" :
hostname : 'subdomain.example.com'
server : 'ns1.example.com'
zone : 'example.com.'
ttl : '60'
type : 'A'
admins : [ 'user'] | 72369 | module.exports =
server:
port: 8080
passwords:
user : '<PASSWORD>'
zones:
"askldhasjkldh==" :
hostname : 'subdomain.example.com'
server : 'ns1.example.com'
zone : 'example.com.'
ttl : '60'
type : 'A'
admins : [ 'user'] | true | module.exports =
server:
port: 8080
passwords:
user : 'PI:PASSWORD:<PASSWORD>END_PI'
zones:
"askldhasjkldh==" :
hostname : 'subdomain.example.com'
server : 'ns1.example.com'
zone : 'example.com.'
ttl : '60'
type : 'A'
admins : [ 'user'] |
[
{
"context": " = @owner\n name = @name\n className = owner.className()",
"end": 1944,
"score": 0.8740230798721313,
"start": 1944,
"tag": "NAME",
"value": ""
},
{
"context": " = @owner\n name = @name\n ... | node_modules/tower/packages/tower-model/shared/relation.coffee | MagicPower2/Power | 1 | _ = Tower._
class Tower.ModelRelation extends Tower.Class
@reopen
isCollection: false
# Construct a new relation.
#
# @param [Function] owner Tower.Model class this relation is defined on.
# @param [String] name name of the relation.
# @param [Object] options options hash.
#
# @option options [String] type name of the associated class.
# @option options [Boolean] readonly (false)
# @option options [Boolean] validate (false)
# @option options [Boolean] autosave (false)
# @option options [Boolean] touch (false)
# @option options [Boolean] dependent (false) if true, relationship records
# will be destroyed if the owner record is destroyed.
# @option options [String] inverseOf (undefined)
# @option options [Boolean] polymorphic (false)
# @option options [String] foreignKey Defaults to "#{as}Id" if polymorphic, else "#{singularName}Id"
# @option options [String] foreignType Defaults to "#{as}Type" if polymorphic, otherwise it's undefined
# @option options [Boolean|String] idCache (false)
# @option options [String] idCacheKey Set to the value of the `idCache` option if it's a string,
# otherwise it's `"#{singularTargetName}Ids"`.
# @option options [Boolean] counterCache (false) if true, will increment `relationshipCount` variable
# when relationship is created/destroyed.
# @option options [String] counterCacheKey Set to the value of the `counterCache` option if it's a string,
# otherwise it's `"#{singularTargetName}Count"`.
# @option options [Boolean] autobuild (false)
#
# @see Tower.ModelRelations.#hasMany
init: (owner, name, options = {}) ->
@_super()
@[key] = value for key, value of options
@owner = owner
@name = name
@initialize(options)
initialize: (options) ->
owner = @owner
name = @name
className = owner.className()
# @type = Tower.namespaced(options.type || _.camelize(_.singularize(name)))
@type = Tower.namespaced(options.type || _.camelize(_.singularize(name)))
@ownerType = Tower.namespaced(className)
@dependent ||= false
@counterCache ||= false
@idCache = false unless @hasOwnProperty('idCache')
@readonly = false unless @hasOwnProperty('readonly')
# from rails' autosave_association and reflection.rb validate?
@validate = @autosave == true unless @hasOwnProperty('validate')
# @autosave is undefined has a different meaning that true/false
# @autosave = false unless @hasOwnProperty('autosave')
@touch = false unless @hasOwnProperty('touch')
@inverseOf ||= undefined
@polymorphic = options.hasOwnProperty('as') || !!options.polymorphic
@default = false unless @hasOwnProperty('default')
@singularName = _.camelize(className, true)
@pluralName = _.pluralize(className) # collectionName?
@singularTargetName = _.singularize(name)
@pluralTargetName = _.pluralize(name)
@targetType = @type
@primaryKey = 'id'
# @todo
@autobuild = false unless @hasOwnProperty('autobuild')
# hasMany "posts", foreignKey: "postId", idCacheKey: "postIds"
unless @foreignKey
if @as
@foreignKey = "#{@as}Id"
else
if @className().match 'BelongsTo'
@foreignKey = "#{@singularTargetName}Id"
else
@foreignKey = "#{@singularName}Id"
@foreignType ||= "#{@as}Type" if @polymorphic
if @idCache
if typeof @idCache == 'string'
@idCacheKey = @idCache
@idCache = true
else
@idCacheKey = "#{@singularTargetName}Ids"
@owner.field @idCacheKey, type: 'Array', default: []
if @counterCache
if typeof @counterCache == 'string'
@counterCacheKey = @counterCache
@counterCache = true
else
@counterCacheKey = "#{@singularTargetName}Count"
@owner.field @counterCacheKey, type: 'Integer', default: 0
@_defineRelation(name)
#if @autosave
@owner._addAutosaveAssociationCallbacks(@)
# @todo refactor!
# http://stackoverflow.com/questions/4255379/dirty-tracking-of-embedded-document-on-the-parent-doc-in-mongoid
_defineRelation: (name) ->
object = {}
isHasMany = !@className().match(/HasOne|BelongsTo/)
@relationType = if isHasMany then 'collection' else 'singular'
object[name + 'AssociationScope'] = Ember.computed((key) ->
@constructor.relation(name).scoped(@)
).cacheable()
association = @
if isHasMany
# you can "set" collections directly, but whenever you "get" them
# you're going to get a Tower.ModelScope. To get the actual records call `.all`
object[name] = Ember.computed((key, value) ->
if arguments.length == 2
@_setHasManyAssociation(key, value, association)
else
@_getHasManyAssociation(name)
).property('data').cacheable()
else
if @className().match 'BelongsTo'
object[name] = Ember.computed((key, value) ->
if arguments.length is 2
@_setBelongsToAssociation(key, value, association)
else
@_getBelongsToAssociation(key)
).property('data', "#{name}Id").cacheable()
else # HasOne
object[name] = Ember.computed((key, value) ->
if arguments.length is 2
@_setHasOneAssociation(key, value, association)
else
@_getHasOneAssociation(key)
).property('data').cacheable()
@owner.reopen(object)
# @return [Tower.ModelRelationScope]
scoped: (record) ->
cursor = Tower[@constructor.className() + 'Cursor'].make()
#cursor.make(model: @klass(), owner: record, relation: @)
attributes = owner: record, relation: @
polymorphicBelongsTo = @polymorphic && @className().match(/BelongsTo/)
unless polymorphicBelongsTo
attributes.model = @klass()# unless @polymorphic
cursor.make(attributes)
klass = try @targetKlass()
if polymorphicBelongsTo
#id = record.get(@foreignKey)
type = record.get(@foreignType)
if type?
cursor.model = Tower.constant(type)
cursor.store = cursor.model.store()
else
cursor.where(type: klass.className()) if klass && klass.shouldIncludeTypeInScope()
new Tower.ModelScope(cursor)
# @return [Function]
targetKlass: ->
Tower.constant(@targetType)
# Class for model on the other side of this relationship.
#
# @return [Function]
klass: ->
Tower.constant(@type)
# Relation on the associated object that maps back to this relation.
#
# @return [Tower.ModelRelation]
inverse: (type) ->
return @_inverse if @_inverse
relations = @targetKlass().relations()
if @inverseOf
return relations[@inverseOf]
else
for name, relation of relations
# need a way to check if class extends another class in coffeescript...
return relation if relation.inverseOf == @name
for name, relation of relations
return relation if relation.targetType == @ownerType
null
_setForeignKey: ->
_setForeignType: ->
Tower.ModelRelationCursorMixin = Ember.Mixin.create
isConstructable: ->
!!!@relation.polymorphic
isLoaded: false
clone: (cloneContent = true) ->
#if Ember.EXTEND_PROTOTYPES
# clone = @clonePrototype()
#else
clone = @constructor.make()
if cloneContent
content = Ember.get(@, 'content') || Ember.A([])
clone.setProperties(content: content) if content
unless content
clone.setProperties(content: Ember.A([]))
clone.make(model: @model, owner: @owner, relation: @relation, instantiate: @instantiate)
clone.merge(@)
clone
clonePrototype: ->
clone = @concat()
clone.isCursor = true
Tower.ModelRelationCursorMixin.apply(clone)
load: (records) ->
owner = @owner
relation = @relation.inverse()
if !relation
throw new Error("Inverse relation has not been defined for `#{@relation.owner.className()}.#{_.camelize(@relation.className(), true)}('#{@relation.name}')`")
for record in records
record.set(relation.name, owner)
@_super(records)
reset: ->
owner = @owner
relation = @relation.inverse()
records = Ember.get(@, 'content')#if Ember.EXTEND_PROTOTYPES then @ else Ember.get(@, 'content')
# this + ember computed cacheable() is causing issues with run loop, not sure this needs to be here.
#for record in records
# record.set(relation.name, undefined)
@_super()
setInverseInstance: (record) ->
if record && @invertibleFor(record)
inverse = record.relation(@inverseReflectionFor(record).name)
inverse.target = owner
invertibleFor: (record) ->
true
inverse: (record) ->
_teardown: ->
_.teardown(@, 'relation', 'records', 'owner', 'model', 'criteria')
addToTarget: (record) ->
# Adds record to array of items to remove later in the persistence lifecycle.
removeFromTarget: (record) ->
@removed().push(record)
removed: ->
@_removed ||= []
class Tower.ModelRelationCursor extends Tower.ModelCursor
@reopenClass
makeOld: ->
array = []
array.isCursor = true
Tower.ModelRelationCursorMixin.apply(array)
@include Tower.ModelRelationCursorMixin
require './relation/belongsTo'
require './relation/hasMany'
require './relation/hasManyThrough'
require './relation/hasOne'
module.exports = Tower.ModelRelation
| 60708 | _ = Tower._
class Tower.ModelRelation extends Tower.Class
@reopen
isCollection: false
# Construct a new relation.
#
# @param [Function] owner Tower.Model class this relation is defined on.
# @param [String] name name of the relation.
# @param [Object] options options hash.
#
# @option options [String] type name of the associated class.
# @option options [Boolean] readonly (false)
# @option options [Boolean] validate (false)
# @option options [Boolean] autosave (false)
# @option options [Boolean] touch (false)
# @option options [Boolean] dependent (false) if true, relationship records
# will be destroyed if the owner record is destroyed.
# @option options [String] inverseOf (undefined)
# @option options [Boolean] polymorphic (false)
# @option options [String] foreignKey Defaults to "#{as}Id" if polymorphic, else "#{singularName}Id"
# @option options [String] foreignType Defaults to "#{as}Type" if polymorphic, otherwise it's undefined
# @option options [Boolean|String] idCache (false)
# @option options [String] idCacheKey Set to the value of the `idCache` option if it's a string,
# otherwise it's `"#{singularTargetName}Ids"`.
# @option options [Boolean] counterCache (false) if true, will increment `relationshipCount` variable
# when relationship is created/destroyed.
# @option options [String] counterCacheKey Set to the value of the `counterCache` option if it's a string,
# otherwise it's `"#{singularTargetName}Count"`.
# @option options [Boolean] autobuild (false)
#
# @see Tower.ModelRelations.#hasMany
init: (owner, name, options = {}) ->
@_super()
@[key] = value for key, value of options
@owner = owner
@name = name
@initialize(options)
initialize: (options) ->
owner = @owner
name =<NAME> @name
className = owner.className()
# @type = Tower.namespaced(options.type || _.camelize(_.singularize(name)))
@type = Tower.namespaced(options.type || _.camelize(_.singularize(name)))
@ownerType = Tower.namespaced(className)
@dependent ||= false
@counterCache ||= false
@idCache = false unless @hasOwnProperty('idCache')
@readonly = false unless @hasOwnProperty('readonly')
# from rails' autosave_association and reflection.rb validate?
@validate = @autosave == true unless @hasOwnProperty('validate')
# @autosave is undefined has a different meaning that true/false
# @autosave = false unless @hasOwnProperty('autosave')
@touch = false unless @hasOwnProperty('touch')
@inverseOf ||= undefined
@polymorphic = options.hasOwnProperty('as') || !!options.polymorphic
@default = false unless @hasOwnProperty('default')
@singularName = _.camelize(className, true)
@pluralName = _.pluralize(className) # collectionName?
@singularTargetName = _.singularize(name)
@pluralTargetName = _.pluralize(name)
@targetType = @type
@primaryKey = 'id'
# @todo
@autobuild = false unless @hasOwnProperty('autobuild')
# hasMany "posts", foreignKey: "postId", idCacheKey: "postIds"
unless @foreignKey
if @as
@foreignKey = "#{@as}Id"
else
if @className().match 'BelongsTo'
@foreignKey = "#{@singularTargetName}Id"
else
@foreignKey = "#{@singularName}Id"
@foreignType ||= "#{@as}Type" if @polymorphic
if @idCache
if typeof @idCache == 'string'
@idCacheKey = @idCache
@idCache = true
else
@idCacheKey = "#{@singularTargetName}Ids"
@owner.field @idCacheKey, type: 'Array', default: []
if @counterCache
if typeof @counterCache == 'string'
@counterCacheKey = @counterCache
@counterCache = true
else
@counterCacheKey = "#{@singularTargetName}Count"
@owner.field @counterCacheKey, type: 'Integer', default: 0
@_defineRelation(name)
#if @autosave
@owner._addAutosaveAssociationCallbacks(@)
# @todo refactor!
# http://stackoverflow.com/questions/4255379/dirty-tracking-of-embedded-document-on-the-parent-doc-in-mongoid
_defineRelation: (name) ->
object = {}
isHasMany = !@className().match(/HasOne|BelongsTo/)
@relationType = if isHasMany then 'collection' else 'singular'
object[name + 'AssociationScope'] = Ember.computed((key) ->
@constructor.relation(name).scoped(@)
).cacheable()
association = @
if isHasMany
# you can "set" collections directly, but whenever you "get" them
# you're going to get a Tower.ModelScope. To get the actual records call `.all`
object[name] = Ember.computed((key, value) ->
if arguments.length == 2
@_setHasManyAssociation(key, value, association)
else
@_getHasManyAssociation(name)
).property('data').cacheable()
else
if @className().match 'BelongsTo'
object[name] = Ember.computed((key, value) ->
if arguments.length is 2
@_setBelongsToAssociation(key, value, association)
else
@_getBelongsToAssociation(key)
).property('data', "#{name}Id").cacheable()
else # HasOne
object[name] = Ember.computed((key, value) ->
if arguments.length is 2
@_setHasOneAssociation(key, value, association)
else
@_getHasOneAssociation(key)
).property('data').cacheable()
@owner.reopen(object)
# @return [Tower.ModelRelationScope]
scoped: (record) ->
cursor = Tower[@constructor.className() + 'Cursor'].make()
#cursor.make(model: @klass(), owner: record, relation: @)
attributes = owner: record, relation: @
polymorphicBelongsTo = @polymorphic && @className().match(/BelongsTo/)
unless polymorphicBelongsTo
attributes.model = @klass()# unless @polymorphic
cursor.make(attributes)
klass = try @targetKlass()
if polymorphicBelongsTo
#id = record.get(@foreignKey)
type = record.get(@foreignType)
if type?
cursor.model = Tower.constant(type)
cursor.store = cursor.model.store()
else
cursor.where(type: klass.className()) if klass && klass.shouldIncludeTypeInScope()
new Tower.ModelScope(cursor)
# @return [Function]
targetKlass: ->
Tower.constant(@targetType)
# Class for model on the other side of this relationship.
#
# @return [Function]
klass: ->
Tower.constant(@type)
# Relation on the associated object that maps back to this relation.
#
# @return [Tower.ModelRelation]
inverse: (type) ->
return @_inverse if @_inverse
relations = @targetKlass().relations()
if @inverseOf
return relations[@inverseOf]
else
for name, relation of relations
# need a way to check if class extends another class in coffeescript...
return relation if relation.inverseOf == @name
for name, relation of relations
return relation if relation.targetType == @ownerType
null
_setForeignKey: ->
_setForeignType: ->
Tower.ModelRelationCursorMixin = Ember.Mixin.create
isConstructable: ->
!!!@relation.polymorphic
isLoaded: false
clone: (cloneContent = true) ->
#if Ember.EXTEND_PROTOTYPES
# clone = @clonePrototype()
#else
clone = @constructor.make()
if cloneContent
content = Ember.get(@, 'content') || Ember.A([])
clone.setProperties(content: content) if content
unless content
clone.setProperties(content: Ember.A([]))
clone.make(model: @model, owner: @owner, relation: @relation, instantiate: @instantiate)
clone.merge(@)
clone
clonePrototype: ->
clone = @concat()
clone.isCursor = true
Tower.ModelRelationCursorMixin.apply(clone)
load: (records) ->
owner = @owner
relation = @relation.inverse()
if !relation
throw new Error("Inverse relation has not been defined for `#{@relation.owner.className()}.#{_.camelize(@relation.className(), true)}('#{@relation.name}')`")
for record in records
record.set(relation.name, owner)
@_super(records)
reset: ->
owner = @owner
relation = @relation.inverse()
records = Ember.get(@, 'content')#if Ember.EXTEND_PROTOTYPES then @ else Ember.get(@, 'content')
# this + ember computed cacheable() is causing issues with run loop, not sure this needs to be here.
#for record in records
# record.set(relation.name, undefined)
@_super()
setInverseInstance: (record) ->
if record && @invertibleFor(record)
inverse = record.relation(@inverseReflectionFor(record).name)
inverse.target = owner
invertibleFor: (record) ->
true
inverse: (record) ->
_teardown: ->
_.teardown(@, 'relation', 'records', 'owner', 'model', 'criteria')
addToTarget: (record) ->
# Adds record to array of items to remove later in the persistence lifecycle.
removeFromTarget: (record) ->
@removed().push(record)
removed: ->
@_removed ||= []
class Tower.ModelRelationCursor extends Tower.ModelCursor
@reopenClass
makeOld: ->
array = []
array.isCursor = true
Tower.ModelRelationCursorMixin.apply(array)
@include Tower.ModelRelationCursorMixin
require './relation/belongsTo'
require './relation/hasMany'
require './relation/hasManyThrough'
require './relation/hasOne'
module.exports = Tower.ModelRelation
| true | _ = Tower._
class Tower.ModelRelation extends Tower.Class
@reopen
isCollection: false
# Construct a new relation.
#
# @param [Function] owner Tower.Model class this relation is defined on.
# @param [String] name name of the relation.
# @param [Object] options options hash.
#
# @option options [String] type name of the associated class.
# @option options [Boolean] readonly (false)
# @option options [Boolean] validate (false)
# @option options [Boolean] autosave (false)
# @option options [Boolean] touch (false)
# @option options [Boolean] dependent (false) if true, relationship records
# will be destroyed if the owner record is destroyed.
# @option options [String] inverseOf (undefined)
# @option options [Boolean] polymorphic (false)
# @option options [String] foreignKey Defaults to "#{as}Id" if polymorphic, else "#{singularName}Id"
# @option options [String] foreignType Defaults to "#{as}Type" if polymorphic, otherwise it's undefined
# @option options [Boolean|String] idCache (false)
# @option options [String] idCacheKey Set to the value of the `idCache` option if it's a string,
# otherwise it's `"#{singularTargetName}Ids"`.
# @option options [Boolean] counterCache (false) if true, will increment `relationshipCount` variable
# when relationship is created/destroyed.
# @option options [String] counterCacheKey Set to the value of the `counterCache` option if it's a string,
# otherwise it's `"#{singularTargetName}Count"`.
# @option options [Boolean] autobuild (false)
#
# @see Tower.ModelRelations.#hasMany
init: (owner, name, options = {}) ->
@_super()
@[key] = value for key, value of options
@owner = owner
@name = name
@initialize(options)
initialize: (options) ->
owner = @owner
name =PI:NAME:<NAME>END_PI @name
className = owner.className()
# @type = Tower.namespaced(options.type || _.camelize(_.singularize(name)))
@type = Tower.namespaced(options.type || _.camelize(_.singularize(name)))
@ownerType = Tower.namespaced(className)
@dependent ||= false
@counterCache ||= false
@idCache = false unless @hasOwnProperty('idCache')
@readonly = false unless @hasOwnProperty('readonly')
# from rails' autosave_association and reflection.rb validate?
@validate = @autosave == true unless @hasOwnProperty('validate')
# @autosave is undefined has a different meaning that true/false
# @autosave = false unless @hasOwnProperty('autosave')
@touch = false unless @hasOwnProperty('touch')
@inverseOf ||= undefined
@polymorphic = options.hasOwnProperty('as') || !!options.polymorphic
@default = false unless @hasOwnProperty('default')
@singularName = _.camelize(className, true)
@pluralName = _.pluralize(className) # collectionName?
@singularTargetName = _.singularize(name)
@pluralTargetName = _.pluralize(name)
@targetType = @type
@primaryKey = 'id'
# @todo
@autobuild = false unless @hasOwnProperty('autobuild')
# hasMany "posts", foreignKey: "postId", idCacheKey: "postIds"
unless @foreignKey
if @as
@foreignKey = "#{@as}Id"
else
if @className().match 'BelongsTo'
@foreignKey = "#{@singularTargetName}Id"
else
@foreignKey = "#{@singularName}Id"
@foreignType ||= "#{@as}Type" if @polymorphic
if @idCache
if typeof @idCache == 'string'
@idCacheKey = @idCache
@idCache = true
else
@idCacheKey = "#{@singularTargetName}Ids"
@owner.field @idCacheKey, type: 'Array', default: []
if @counterCache
if typeof @counterCache == 'string'
@counterCacheKey = @counterCache
@counterCache = true
else
@counterCacheKey = "#{@singularTargetName}Count"
@owner.field @counterCacheKey, type: 'Integer', default: 0
@_defineRelation(name)
#if @autosave
@owner._addAutosaveAssociationCallbacks(@)
# @todo refactor!
# http://stackoverflow.com/questions/4255379/dirty-tracking-of-embedded-document-on-the-parent-doc-in-mongoid
_defineRelation: (name) ->
object = {}
isHasMany = !@className().match(/HasOne|BelongsTo/)
@relationType = if isHasMany then 'collection' else 'singular'
object[name + 'AssociationScope'] = Ember.computed((key) ->
@constructor.relation(name).scoped(@)
).cacheable()
association = @
if isHasMany
# you can "set" collections directly, but whenever you "get" them
# you're going to get a Tower.ModelScope. To get the actual records call `.all`
object[name] = Ember.computed((key, value) ->
if arguments.length == 2
@_setHasManyAssociation(key, value, association)
else
@_getHasManyAssociation(name)
).property('data').cacheable()
else
if @className().match 'BelongsTo'
object[name] = Ember.computed((key, value) ->
if arguments.length is 2
@_setBelongsToAssociation(key, value, association)
else
@_getBelongsToAssociation(key)
).property('data', "#{name}Id").cacheable()
else # HasOne
object[name] = Ember.computed((key, value) ->
if arguments.length is 2
@_setHasOneAssociation(key, value, association)
else
@_getHasOneAssociation(key)
).property('data').cacheable()
@owner.reopen(object)
# @return [Tower.ModelRelationScope]
scoped: (record) ->
cursor = Tower[@constructor.className() + 'Cursor'].make()
#cursor.make(model: @klass(), owner: record, relation: @)
attributes = owner: record, relation: @
polymorphicBelongsTo = @polymorphic && @className().match(/BelongsTo/)
unless polymorphicBelongsTo
attributes.model = @klass()# unless @polymorphic
cursor.make(attributes)
klass = try @targetKlass()
if polymorphicBelongsTo
#id = record.get(@foreignKey)
type = record.get(@foreignType)
if type?
cursor.model = Tower.constant(type)
cursor.store = cursor.model.store()
else
cursor.where(type: klass.className()) if klass && klass.shouldIncludeTypeInScope()
new Tower.ModelScope(cursor)
# @return [Function]
targetKlass: ->
Tower.constant(@targetType)
# Class for model on the other side of this relationship.
#
# @return [Function]
klass: ->
Tower.constant(@type)
# Relation on the associated object that maps back to this relation.
#
# @return [Tower.ModelRelation]
inverse: (type) ->
return @_inverse if @_inverse
relations = @targetKlass().relations()
if @inverseOf
return relations[@inverseOf]
else
for name, relation of relations
# need a way to check if class extends another class in coffeescript...
return relation if relation.inverseOf == @name
for name, relation of relations
return relation if relation.targetType == @ownerType
null
_setForeignKey: ->
_setForeignType: ->
Tower.ModelRelationCursorMixin = Ember.Mixin.create
isConstructable: ->
!!!@relation.polymorphic
isLoaded: false
clone: (cloneContent = true) ->
#if Ember.EXTEND_PROTOTYPES
# clone = @clonePrototype()
#else
clone = @constructor.make()
if cloneContent
content = Ember.get(@, 'content') || Ember.A([])
clone.setProperties(content: content) if content
unless content
clone.setProperties(content: Ember.A([]))
clone.make(model: @model, owner: @owner, relation: @relation, instantiate: @instantiate)
clone.merge(@)
clone
clonePrototype: ->
clone = @concat()
clone.isCursor = true
Tower.ModelRelationCursorMixin.apply(clone)
load: (records) ->
owner = @owner
relation = @relation.inverse()
if !relation
throw new Error("Inverse relation has not been defined for `#{@relation.owner.className()}.#{_.camelize(@relation.className(), true)}('#{@relation.name}')`")
for record in records
record.set(relation.name, owner)
@_super(records)
reset: ->
owner = @owner
relation = @relation.inverse()
records = Ember.get(@, 'content')#if Ember.EXTEND_PROTOTYPES then @ else Ember.get(@, 'content')
# this + ember computed cacheable() is causing issues with run loop, not sure this needs to be here.
#for record in records
# record.set(relation.name, undefined)
@_super()
setInverseInstance: (record) ->
if record && @invertibleFor(record)
inverse = record.relation(@inverseReflectionFor(record).name)
inverse.target = owner
invertibleFor: (record) ->
true
inverse: (record) ->
_teardown: ->
_.teardown(@, 'relation', 'records', 'owner', 'model', 'criteria')
addToTarget: (record) ->
# Adds record to array of items to remove later in the persistence lifecycle.
removeFromTarget: (record) ->
@removed().push(record)
removed: ->
@_removed ||= []
class Tower.ModelRelationCursor extends Tower.ModelCursor
@reopenClass
makeOld: ->
array = []
array.isCursor = true
Tower.ModelRelationCursorMixin.apply(array)
@include Tower.ModelRelationCursorMixin
require './relation/belongsTo'
require './relation/hasMany'
require './relation/hasManyThrough'
require './relation/hasOne'
module.exports = Tower.ModelRelation
|
[
{
"context": "emList.poe = { meta: {\n\t\tActive: false\n\t\tAuthor: 'Edgar Allan Poe'\n\t\tTitle: 'The Raven'\n\t\tYear: '1845'\n\t\tLanguage: ",
"end": 246,
"score": 0.999889075756073,
"start": 231,
"tag": "NAME",
"value": "Edgar Allan Poe"
},
{
"context": "mList.vrch = { meta: {\n... | source/tests/services/filter.js.coffee | tasuki/side-by-side | 29 | poems = {}
filter = {}
poemList = {}
module "filter", {
setup: ->
injector = getInjector()
poems = injector.get('poems')
filter = injector.get('filter')
}
setPoems = ->
poemList.poe = { meta: {
Active: false
Author: 'Edgar Allan Poe'
Title: 'The Raven'
Year: '1845'
Language: 'English'
Code: 'poe'
}}
poemList.vrch = { meta: {
Active: false
Author: 'Jaroslav Vrchlický'
Title: 'Havran'
Year: '1845'
Language: 'Czech'
Code: 'vrch'
}}
poemList.muz = { meta: {
Active: false
Author: 'Augustin Eugen Mužík'
Title: 'Havran'
Year: '1885'
Language: 'Czech'
Code: 'muz'
}}
poemList.dolu = { meta: {
Active: false
Author: 'Karel Dostál Lutinov'
Title: 'Havran'
Year: '1918'
Language: 'Czech'
Code: 'dolu'
}}
poems.all = [
poemList.poe
poemList.vrch
poemList.muz
poemList.dolu
]
test "get filter for poems that share a common property", ->
setPoems()
poemList.dolu.meta.Active = true
poemList.vrch.meta.Active = true
poemList.muz.meta.Active = true
filter.update()
deepEqual filter.getFilter(), {
'Language': [ 'Czech' ]
}
test "get filter for arbitrary poems using shortest key", ->
setPoems()
poemList.vrch.meta.Active = true
poemList.muz.meta.Active = true
filter.update()
deepEqual filter.getFilter(), {
'Code': [ 'vrch', 'muz' ]
}
test "set filter", ->
setPoems()
poemList.vrch.meta.Active = true
filter.update()
filter.setFilter {
'Language': [ 'English' ]
}
equal poemList.vrch.meta.Active, false
equal poemList.poe.meta.Active, true
deepEqual filter.getFilter(), {
'Code': [ 'poe' ]
}
| 10570 | poems = {}
filter = {}
poemList = {}
module "filter", {
setup: ->
injector = getInjector()
poems = injector.get('poems')
filter = injector.get('filter')
}
setPoems = ->
poemList.poe = { meta: {
Active: false
Author: '<NAME>'
Title: 'The Raven'
Year: '1845'
Language: 'English'
Code: 'poe'
}}
poemList.vrch = { meta: {
Active: false
Author: '<NAME>'
Title: 'Havran'
Year: '1845'
Language: 'Czech'
Code: 'vrch'
}}
poemList.muz = { meta: {
Active: false
Author: '<NAME>'
Title: 'Havran'
Year: '1885'
Language: 'Czech'
Code: 'muz'
}}
poemList.dolu = { meta: {
Active: false
Author: '<NAME>'
Title: 'Havran'
Year: '1918'
Language: 'Czech'
Code: 'dolu'
}}
poems.all = [
poemList.poe
poemList.vrch
poemList.muz
poemList.dolu
]
test "get filter for poems that share a common property", ->
setPoems()
poemList.dolu.meta.Active = true
poemList.vrch.meta.Active = true
poemList.muz.meta.Active = true
filter.update()
deepEqual filter.getFilter(), {
'Language': [ 'Czech' ]
}
test "get filter for arbitrary poems using shortest key", ->
setPoems()
poemList.vrch.meta.Active = true
poemList.muz.meta.Active = true
filter.update()
deepEqual filter.getFilter(), {
'Code': [ 'vrch', 'muz' ]
}
test "set filter", ->
setPoems()
poemList.vrch.meta.Active = true
filter.update()
filter.setFilter {
'Language': [ 'English' ]
}
equal poemList.vrch.meta.Active, false
equal poemList.poe.meta.Active, true
deepEqual filter.getFilter(), {
'Code': [ 'poe' ]
}
| true | poems = {}
filter = {}
poemList = {}
module "filter", {
setup: ->
injector = getInjector()
poems = injector.get('poems')
filter = injector.get('filter')
}
setPoems = ->
poemList.poe = { meta: {
Active: false
Author: 'PI:NAME:<NAME>END_PI'
Title: 'The Raven'
Year: '1845'
Language: 'English'
Code: 'poe'
}}
poemList.vrch = { meta: {
Active: false
Author: 'PI:NAME:<NAME>END_PI'
Title: 'Havran'
Year: '1845'
Language: 'Czech'
Code: 'vrch'
}}
poemList.muz = { meta: {
Active: false
Author: 'PI:NAME:<NAME>END_PI'
Title: 'Havran'
Year: '1885'
Language: 'Czech'
Code: 'muz'
}}
poemList.dolu = { meta: {
Active: false
Author: 'PI:NAME:<NAME>END_PI'
Title: 'Havran'
Year: '1918'
Language: 'Czech'
Code: 'dolu'
}}
poems.all = [
poemList.poe
poemList.vrch
poemList.muz
poemList.dolu
]
test "get filter for poems that share a common property", ->
setPoems()
poemList.dolu.meta.Active = true
poemList.vrch.meta.Active = true
poemList.muz.meta.Active = true
filter.update()
deepEqual filter.getFilter(), {
'Language': [ 'Czech' ]
}
test "get filter for arbitrary poems using shortest key", ->
setPoems()
poemList.vrch.meta.Active = true
poemList.muz.meta.Active = true
filter.update()
deepEqual filter.getFilter(), {
'Code': [ 'vrch', 'muz' ]
}
test "set filter", ->
setPoems()
poemList.vrch.meta.Active = true
filter.update()
filter.setFilter {
'Language': [ 'English' ]
}
equal poemList.vrch.meta.Active, false
equal poemList.poe.meta.Active, true
deepEqual filter.getFilter(), {
'Code': [ 'poe' ]
}
|
[
{
"context": " SPACE (U+200B) have a column width of 0.\n# * - Hangul Jamo medial vowels and final consonants (U+1160-U",
"end": 616,
"score": 0.7176671624183655,
"start": 610,
"tag": "NAME",
"value": "Hangul"
}
] | deps/npm/node_modules/columnify/node_modules/wcwidth/index.coffee | lxe/io.coffee | 0 |
#
# * The following functions define the column width of an ISO 10646
# * character as follows:
# * - The null character (U+0000) has a column width of 0.
# * - Other C0/C1 control characters and DEL will lead to a return value
# * of -1.
# * - Non-spacing and enclosing combining characters (general category
# * code Mn or Me in the
# * Unicode database) have a column width of 0.
# * - SOFT HYPHEN (U+00AD) has a column width of 1.
# * - Other format characters (general category code Cf in the Unicode
# * database) and ZERO WIDTH
# * SPACE (U+200B) have a column width of 0.
# * - Hangul Jamo medial vowels and final consonants (U+1160-U+11FF)
# * have a column width of 0.
# * - Spacing characters in the East Asian Wide (W) or East Asian
# * Full-width (F) category as
# * defined in Unicode Technical Report #11 have a column width of 2.
# * - All remaining characters (including all printable ISO 8859-1 and
# * WGL4 characters, Unicode control characters, etc.) have a column
# * width of 1.
# * This implementation assumes that characters are encoded in ISO 10646.
#
wcswidth = (str, opts) ->
return wcwidth(str, opts) if typeof str isnt "string"
s = 0
i = 0
while i < str.length
n = wcwidth(str.charCodeAt(i), opts)
return -1 if n < 0
s += n
i++
s
wcwidth = (ucs, opts) ->
# test for 8-bit control characters
return opts.nul if ucs is 0
return opts.control if ucs < 32 or (ucs >= 0x7f and ucs < 0xa0)
# binary search in table of non-spacing characters
return 0 if bisearch(ucs)
# if we arrive here, ucs is not a combining or C0/C1 control character
# Hangul Jamo init. consonants
# CJK ... Yi
# Hangul Syllables
# CJK Compatibility Ideographs
# Vertical forms
# CJK Compatibility Forms
# Fullwidth Forms
1 + (ucs >= 0x1100 and (ucs <= 0x115f or ucs is 0x2329 or ucs is 0x232a or (ucs >= 0x2e80 and ucs <= 0xa4cf and ucs isnt 0x303f) or (ucs >= 0xac00 and ucs <= 0xd7a3) or (ucs >= 0xf900 and ucs <= 0xfaff) or (ucs >= 0xfe10 and ucs <= 0xfe19) or (ucs >= 0xfe30 and ucs <= 0xfe6f) or (ucs >= 0xff00 and ucs <= 0xff60) or (ucs >= 0xffe0 and ucs <= 0xffe6) or (ucs >= 0x20000 and ucs <= 0x2fffd) or (ucs >= 0x30000 and ucs <= 0x3fffd)))
bisearch = (ucs) ->
min = 0
max = combining.length - 1
mid = undefined
return false if ucs < combining[0][0] or ucs > combining[max][1]
while max >= min
mid = Math.floor((min + max) / 2)
if ucs > combining[mid][1]
min = mid + 1
else if ucs < combining[mid][0]
max = mid - 1
else
return true
false
"use strict"
defaults = require("defaults")
combining = require("./combining")
DEFAULTS =
nul: 0
control: 0
module.exports = wcwidth = (str) ->
wcswidth str, DEFAULTS
module.exports.config = (opts) ->
opts = defaults(opts or {}, DEFAULTS)
wcwidth = (str) ->
wcswidth str, opts
| 27649 |
#
# * The following functions define the column width of an ISO 10646
# * character as follows:
# * - The null character (U+0000) has a column width of 0.
# * - Other C0/C1 control characters and DEL will lead to a return value
# * of -1.
# * - Non-spacing and enclosing combining characters (general category
# * code Mn or Me in the
# * Unicode database) have a column width of 0.
# * - SOFT HYPHEN (U+00AD) has a column width of 1.
# * - Other format characters (general category code Cf in the Unicode
# * database) and ZERO WIDTH
# * SPACE (U+200B) have a column width of 0.
# * - <NAME> Jamo medial vowels and final consonants (U+1160-U+11FF)
# * have a column width of 0.
# * - Spacing characters in the East Asian Wide (W) or East Asian
# * Full-width (F) category as
# * defined in Unicode Technical Report #11 have a column width of 2.
# * - All remaining characters (including all printable ISO 8859-1 and
# * WGL4 characters, Unicode control characters, etc.) have a column
# * width of 1.
# * This implementation assumes that characters are encoded in ISO 10646.
#
wcswidth = (str, opts) ->
return wcwidth(str, opts) if typeof str isnt "string"
s = 0
i = 0
while i < str.length
n = wcwidth(str.charCodeAt(i), opts)
return -1 if n < 0
s += n
i++
s
wcwidth = (ucs, opts) ->
# test for 8-bit control characters
return opts.nul if ucs is 0
return opts.control if ucs < 32 or (ucs >= 0x7f and ucs < 0xa0)
# binary search in table of non-spacing characters
return 0 if bisearch(ucs)
# if we arrive here, ucs is not a combining or C0/C1 control character
# Hangul Jamo init. consonants
# CJK ... Yi
# Hangul Syllables
# CJK Compatibility Ideographs
# Vertical forms
# CJK Compatibility Forms
# Fullwidth Forms
1 + (ucs >= 0x1100 and (ucs <= 0x115f or ucs is 0x2329 or ucs is 0x232a or (ucs >= 0x2e80 and ucs <= 0xa4cf and ucs isnt 0x303f) or (ucs >= 0xac00 and ucs <= 0xd7a3) or (ucs >= 0xf900 and ucs <= 0xfaff) or (ucs >= 0xfe10 and ucs <= 0xfe19) or (ucs >= 0xfe30 and ucs <= 0xfe6f) or (ucs >= 0xff00 and ucs <= 0xff60) or (ucs >= 0xffe0 and ucs <= 0xffe6) or (ucs >= 0x20000 and ucs <= 0x2fffd) or (ucs >= 0x30000 and ucs <= 0x3fffd)))
bisearch = (ucs) ->
min = 0
max = combining.length - 1
mid = undefined
return false if ucs < combining[0][0] or ucs > combining[max][1]
while max >= min
mid = Math.floor((min + max) / 2)
if ucs > combining[mid][1]
min = mid + 1
else if ucs < combining[mid][0]
max = mid - 1
else
return true
false
"use strict"
defaults = require("defaults")
combining = require("./combining")
DEFAULTS =
nul: 0
control: 0
module.exports = wcwidth = (str) ->
wcswidth str, DEFAULTS
module.exports.config = (opts) ->
opts = defaults(opts or {}, DEFAULTS)
wcwidth = (str) ->
wcswidth str, opts
| true |
#
# * The following functions define the column width of an ISO 10646
# * character as follows:
# * - The null character (U+0000) has a column width of 0.
# * - Other C0/C1 control characters and DEL will lead to a return value
# * of -1.
# * - Non-spacing and enclosing combining characters (general category
# * code Mn or Me in the
# * Unicode database) have a column width of 0.
# * - SOFT HYPHEN (U+00AD) has a column width of 1.
# * - Other format characters (general category code Cf in the Unicode
# * database) and ZERO WIDTH
# * SPACE (U+200B) have a column width of 0.
# * - PI:NAME:<NAME>END_PI Jamo medial vowels and final consonants (U+1160-U+11FF)
# * have a column width of 0.
# * - Spacing characters in the East Asian Wide (W) or East Asian
# * Full-width (F) category as
# * defined in Unicode Technical Report #11 have a column width of 2.
# * - All remaining characters (including all printable ISO 8859-1 and
# * WGL4 characters, Unicode control characters, etc.) have a column
# * width of 1.
# * This implementation assumes that characters are encoded in ISO 10646.
#
wcswidth = (str, opts) ->
return wcwidth(str, opts) if typeof str isnt "string"
s = 0
i = 0
while i < str.length
n = wcwidth(str.charCodeAt(i), opts)
return -1 if n < 0
s += n
i++
s
wcwidth = (ucs, opts) ->
# test for 8-bit control characters
return opts.nul if ucs is 0
return opts.control if ucs < 32 or (ucs >= 0x7f and ucs < 0xa0)
# binary search in table of non-spacing characters
return 0 if bisearch(ucs)
# if we arrive here, ucs is not a combining or C0/C1 control character
# Hangul Jamo init. consonants
# CJK ... Yi
# Hangul Syllables
# CJK Compatibility Ideographs
# Vertical forms
# CJK Compatibility Forms
# Fullwidth Forms
1 + (ucs >= 0x1100 and (ucs <= 0x115f or ucs is 0x2329 or ucs is 0x232a or (ucs >= 0x2e80 and ucs <= 0xa4cf and ucs isnt 0x303f) or (ucs >= 0xac00 and ucs <= 0xd7a3) or (ucs >= 0xf900 and ucs <= 0xfaff) or (ucs >= 0xfe10 and ucs <= 0xfe19) or (ucs >= 0xfe30 and ucs <= 0xfe6f) or (ucs >= 0xff00 and ucs <= 0xff60) or (ucs >= 0xffe0 and ucs <= 0xffe6) or (ucs >= 0x20000 and ucs <= 0x2fffd) or (ucs >= 0x30000 and ucs <= 0x3fffd)))
bisearch = (ucs) ->
min = 0
max = combining.length - 1
mid = undefined
return false if ucs < combining[0][0] or ucs > combining[max][1]
while max >= min
mid = Math.floor((min + max) / 2)
if ucs > combining[mid][1]
min = mid + 1
else if ucs < combining[mid][0]
max = mid - 1
else
return true
false
"use strict"
defaults = require("defaults")
combining = require("./combining")
DEFAULTS =
nul: 0
control: 0
module.exports = wcwidth = (str) ->
wcswidth str, DEFAULTS
module.exports.config = (opts) ->
opts = defaults(opts or {}, DEFAULTS)
wcwidth = (str) ->
wcswidth str, opts
|
[
{
"context": "e 'user',\n username: Sequelize.STRING\n password: Sequelize.STRING\n\nVote.hasMany(Option)\nOption.belongsTo(Vote)\n\nOpt",
"end": 772,
"score": 0.9986947178840637,
"start": 756,
"tag": "PASSWORD",
"value": "Sequelize.STRING"
}
] | lib/models.coffee | liuxiong332/node-vote-server | 0 | Sequelize = require 'sequelize'
DBConfig = require('../config/database.js').DBConfig
config = if process.env.NODE_ENV is 'test' then DBConfig.test else DBConfig.development
sequelize = new Sequelize(config.dbName, config.user, config.password, config.config)
Vote = sequelize.define 'vote',
stage: Sequelize.STRING
actionSubject: Sequelize.STRING
actionContent: Sequelize.STRING
actionPromotor: Sequelize.STRING
Option = sequelize.define 'option',
value: Sequelize.STRING
Receiver = sequelize.define 'receiver',
email: Sequelize.STRING
lastAction: Sequelize.STRING
Promotor = sequelize.define 'promotor',
email: Sequelize.STRING
lastAction: Sequelize.STRING
User = sequelize.define 'user',
username: Sequelize.STRING
password: Sequelize.STRING
Vote.hasMany(Option)
Option.belongsTo(Vote)
Option.hasMany(Receiver)
Receiver.belongsTo(Option)
Vote.hasMany(Receiver)
Receiver.belongsTo(Vote)
Vote.hasOne(Promotor)
Promotor.belongsTo(Vote)
User.hasMany(Receiver)
Receiver.belongsTo(User)
User.hasMany(Promotor)
Promotor.belongsTo(User)
# sync = require('q').all([Vote.sync(), Option.sync(), Receiver.sync()])
module.exports = {Vote, Receiver, Option}
| 34377 | Sequelize = require 'sequelize'
DBConfig = require('../config/database.js').DBConfig
config = if process.env.NODE_ENV is 'test' then DBConfig.test else DBConfig.development
sequelize = new Sequelize(config.dbName, config.user, config.password, config.config)
Vote = sequelize.define 'vote',
stage: Sequelize.STRING
actionSubject: Sequelize.STRING
actionContent: Sequelize.STRING
actionPromotor: Sequelize.STRING
Option = sequelize.define 'option',
value: Sequelize.STRING
Receiver = sequelize.define 'receiver',
email: Sequelize.STRING
lastAction: Sequelize.STRING
Promotor = sequelize.define 'promotor',
email: Sequelize.STRING
lastAction: Sequelize.STRING
User = sequelize.define 'user',
username: Sequelize.STRING
password: <PASSWORD>
Vote.hasMany(Option)
Option.belongsTo(Vote)
Option.hasMany(Receiver)
Receiver.belongsTo(Option)
Vote.hasMany(Receiver)
Receiver.belongsTo(Vote)
Vote.hasOne(Promotor)
Promotor.belongsTo(Vote)
User.hasMany(Receiver)
Receiver.belongsTo(User)
User.hasMany(Promotor)
Promotor.belongsTo(User)
# sync = require('q').all([Vote.sync(), Option.sync(), Receiver.sync()])
module.exports = {Vote, Receiver, Option}
| true | Sequelize = require 'sequelize'
DBConfig = require('../config/database.js').DBConfig
config = if process.env.NODE_ENV is 'test' then DBConfig.test else DBConfig.development
sequelize = new Sequelize(config.dbName, config.user, config.password, config.config)
Vote = sequelize.define 'vote',
stage: Sequelize.STRING
actionSubject: Sequelize.STRING
actionContent: Sequelize.STRING
actionPromotor: Sequelize.STRING
Option = sequelize.define 'option',
value: Sequelize.STRING
Receiver = sequelize.define 'receiver',
email: Sequelize.STRING
lastAction: Sequelize.STRING
Promotor = sequelize.define 'promotor',
email: Sequelize.STRING
lastAction: Sequelize.STRING
User = sequelize.define 'user',
username: Sequelize.STRING
password: PI:PASSWORD:<PASSWORD>END_PI
Vote.hasMany(Option)
Option.belongsTo(Vote)
Option.hasMany(Receiver)
Receiver.belongsTo(Option)
Vote.hasMany(Receiver)
Receiver.belongsTo(Vote)
Vote.hasOne(Promotor)
Promotor.belongsTo(Vote)
User.hasMany(Receiver)
Receiver.belongsTo(User)
User.hasMany(Promotor)
Promotor.belongsTo(User)
# sync = require('q').all([Vote.sync(), Option.sync(), Receiver.sync()])
module.exports = {Vote, Receiver, Option}
|
[
{
"context": "multiple sources\", (t) ->\n original =\n name: 'akonwi'\n another =\n age: 22\n\n t.deepEqual {name: 'a",
"end": 731,
"score": 0.9990964531898499,
"start": 725,
"tag": "NAME",
"value": "akonwi"
},
{
"context": "i'\n another =\n age: 22\n\n t.deepEqual {na... | src/spec/deepAssign-spec.coffee | akonwi/flow | 3 | test = require 'tape'
deepAssign = require '../deepAssign'
test "deep assign", (t) ->
original =
foo: 'bar'
baz:
boom: 'flip'
nested:
array: [1]
copy = deepAssign {}, original
t.deepEqual original, copy, "objects are equal without changes"
original.nested.array = [2]
t.notDeepEqual original, copy, "arrays are different after changes"
copy = deepAssign {}, original
original.foo = 'baz'
t.notDeepEqual original, copy, "foo values are different after changes"
copy = deepAssign {}, original
original.baz.boom = 'flop'
t.notDeepEqual original, copy, "baz values are different after changes"
t.end()
test "deep assign with multiple sources", (t) ->
original =
name: 'akonwi'
another =
age: 22
t.deepEqual {name: 'akonwi', age: 22}, deepAssign({}, original, another)
t.end()
| 45603 | test = require 'tape'
deepAssign = require '../deepAssign'
test "deep assign", (t) ->
original =
foo: 'bar'
baz:
boom: 'flip'
nested:
array: [1]
copy = deepAssign {}, original
t.deepEqual original, copy, "objects are equal without changes"
original.nested.array = [2]
t.notDeepEqual original, copy, "arrays are different after changes"
copy = deepAssign {}, original
original.foo = 'baz'
t.notDeepEqual original, copy, "foo values are different after changes"
copy = deepAssign {}, original
original.baz.boom = 'flop'
t.notDeepEqual original, copy, "baz values are different after changes"
t.end()
test "deep assign with multiple sources", (t) ->
original =
name: '<NAME>'
another =
age: 22
t.deepEqual {name: '<NAME>', age: 22}, deepAssign({}, original, another)
t.end()
| true | test = require 'tape'
deepAssign = require '../deepAssign'
test "deep assign", (t) ->
original =
foo: 'bar'
baz:
boom: 'flip'
nested:
array: [1]
copy = deepAssign {}, original
t.deepEqual original, copy, "objects are equal without changes"
original.nested.array = [2]
t.notDeepEqual original, copy, "arrays are different after changes"
copy = deepAssign {}, original
original.foo = 'baz'
t.notDeepEqual original, copy, "foo values are different after changes"
copy = deepAssign {}, original
original.baz.boom = 'flop'
t.notDeepEqual original, copy, "baz values are different after changes"
t.end()
test "deep assign with multiple sources", (t) ->
original =
name: 'PI:NAME:<NAME>END_PI'
another =
age: 22
t.deepEqual {name: 'PI:NAME:<NAME>END_PI', age: 22}, deepAssign({}, original, another)
t.end()
|
[
{
"context": "#\n# Utility functions for recipes\n#\n# @author Torstein Thune\n# @copyright 2016 Microbrew.it\nangular.module('Mi",
"end": 60,
"score": 0.9998695850372314,
"start": 46,
"tag": "NAME",
"value": "Torstein Thune"
}
] | app/services/RecipeUtilityService.coffee | Microbrewit/microbrewit-recipe-calculator | 0 | #
# Utility functions for recipes
#
# @author Torstein Thune
# @copyright 2016 Microbrew.it
angular.module('Microbrewit').factory('mbit/services/RecipeUtilityService', [
() ->
# Get previous step with type in recipe
# @note recursive
# @return [Step, undefined]
_getStepBefore = (recipe, index) ->
if index is 0
return undefined
stepBefore = recipe.steps[index - 1]
if stepBefore.type
return stepBefore
else
return _getStepBefore(recipe, index - 1)
# Get valid step types
# @return [Array<String>]
getValidStepTypes = (recipe, step, index) ->
index ?= recipe.steps.indexOf(step)
stepBefore = _getStepBefore(recipe, index)
unless stepBefore
return ['mash']
switch stepBefore.type
when 'mash'
return ['mash', 'sparge', 'boil']
when 'sparge'
return ['boil']
when 'boil'
return ['boil', 'fermentation']
when 'fermentation'
return ['fermentation']
getMeasureSettings = (type) ->
switch type
when 'imperial'
return {
liquid:
unit: 'oz'
fermentable:
unit: 'lbs'
hop:
unit: 'lbs'
other:
unit: 'lbs'
yeast:
unit: 'lbs'
}
when 'metric'
return {
liquid:
unit: 'liters'
fermentable:
unit: 'grams'
hop:
unit: 'grams'
other:
unit: 'grams'
yeast:
unit: 'grams'
}
getRandomArbitrary = (min, max) ->
return Math.random() * (max - min) + min
calcFermentableValues = (fermentable, recipe) ->
mcu = mbFormulas.colour.mcu(mbFormulas.convert.convert(fermentable.amount, 'grams', 'kg'), fermentable.lovibond, recipe.volume)
console.log fermentable
srm = {}
for formula in mbFormulas.colour.available()
if formula isnt 'mcu'
srm[formula] = mbFormulas.colour[formula]({mcu})
srm.standard = srm.morey
gp = mbFormulas.gravity.gravityPoints
type: fermentable.subType
efficiency: recipe.efficiency
amount: fermentable.amount
volume: recipe.volume
ppg: fermentable.ppg
rgb = mbFormulas.convert.convert fermentable.lovibond, 'lovibond', 'rgb'
return {
mcu, srm, gp, rgb
}
calcHopValues = (hop, step, recipe) ->
calcObj =
boilGravity: recipe.og
utilisation: step.length
boilVolume: step.volume
aa: hop.aaValue
amount: hop.amount
boilTime: step.length
# @todo remove once steps are properly reimplemented
calcObj.boilVolume = recipe.volume if calcObj.boilVolume is 0
calculated =
bitterness: {}
for formula in mbFormulas.bitterness.available()
calculated.bitterness[formula] = mbFormulas.bitterness[formula](calcObj)
return calculated
calcYeastValues = (yeast, step, recipe) ->
return {}
# Calculate OG for a recipe
# @param [Recipe] recipe
# @return [Float] OG
calcOG = (recipe) ->
totalGP = 0
for step in recipe.steps
for ingredient in step.ingredients
if ingredient.type is 'fermentable'
totalGP += ingredient.calculated.gp
return mbFormulas.gravity.og(totalGP)
# Calculate FG for a recipe
# @param [Recipe] recipe
# @return [Float fg]
# @todo Send in Yeast attenuation to get more exact result
calcFG = (recipe) ->
return mbFormulas.gravity.fg(recipe.og)
calcAbv = (recipe) ->
abv = {}
for formula in mbFormulas.abv.available()
console.log formula
abv[formula] = mbFormulas.abv[formula]?(recipe.og, recipe.fg)
abv.standard = abv.microbrewit
return abv
# Calculate the IBU value for the recipe
# i.e gather all the IBU values from hops
# @param [Recipe] recipe
# @return [Object] ibu All the different formulas
calcIbu = (recipe) ->
ibu = {}
for step in recipe.steps
for ingredient in step.ingredients
if ingredient.type is 'hop'
for key, val of ingredient.calculated?.bitterness
unless ibu[key]
ibu[key] = val.ibu
else
ibu[key] += val.ibu
ibu.standard = ibu.tinseth
return ibu
# Calculate the SRM value for the recipe
# i.e gather all MCU values and use formulas to calculate
# @param [Recipe] recipe
# @return [Object] srm
calcSrm = (recipe) ->
mcu = 0
for step in recipe.steps
for ingredient in step.ingredients
if ingredient.type is 'fermentable'
mcu += ingredient.calculated.mcu
srm = {}
for formula in mbFormulas.colour.available()
if formula isnt 'mcu'
srm[formula] = mbFormulas.colour[formula]({mcu})
srm.standard = srm.morey
return srm
return {
calcOG, calcFG, calcSrm, calcIbu, calcAbv, calcFermentableValues, calcHopValues, getMeasureSettings, getValidStepTypes
}
])
| 173516 | #
# Utility functions for recipes
#
# @author <NAME>
# @copyright 2016 Microbrew.it
angular.module('Microbrewit').factory('mbit/services/RecipeUtilityService', [
() ->
# Get previous step with type in recipe
# @note recursive
# @return [Step, undefined]
_getStepBefore = (recipe, index) ->
if index is 0
return undefined
stepBefore = recipe.steps[index - 1]
if stepBefore.type
return stepBefore
else
return _getStepBefore(recipe, index - 1)
# Get valid step types
# @return [Array<String>]
getValidStepTypes = (recipe, step, index) ->
index ?= recipe.steps.indexOf(step)
stepBefore = _getStepBefore(recipe, index)
unless stepBefore
return ['mash']
switch stepBefore.type
when 'mash'
return ['mash', 'sparge', 'boil']
when 'sparge'
return ['boil']
when 'boil'
return ['boil', 'fermentation']
when 'fermentation'
return ['fermentation']
getMeasureSettings = (type) ->
switch type
when 'imperial'
return {
liquid:
unit: 'oz'
fermentable:
unit: 'lbs'
hop:
unit: 'lbs'
other:
unit: 'lbs'
yeast:
unit: 'lbs'
}
when 'metric'
return {
liquid:
unit: 'liters'
fermentable:
unit: 'grams'
hop:
unit: 'grams'
other:
unit: 'grams'
yeast:
unit: 'grams'
}
getRandomArbitrary = (min, max) ->
return Math.random() * (max - min) + min
calcFermentableValues = (fermentable, recipe) ->
mcu = mbFormulas.colour.mcu(mbFormulas.convert.convert(fermentable.amount, 'grams', 'kg'), fermentable.lovibond, recipe.volume)
console.log fermentable
srm = {}
for formula in mbFormulas.colour.available()
if formula isnt 'mcu'
srm[formula] = mbFormulas.colour[formula]({mcu})
srm.standard = srm.morey
gp = mbFormulas.gravity.gravityPoints
type: fermentable.subType
efficiency: recipe.efficiency
amount: fermentable.amount
volume: recipe.volume
ppg: fermentable.ppg
rgb = mbFormulas.convert.convert fermentable.lovibond, 'lovibond', 'rgb'
return {
mcu, srm, gp, rgb
}
calcHopValues = (hop, step, recipe) ->
calcObj =
boilGravity: recipe.og
utilisation: step.length
boilVolume: step.volume
aa: hop.aaValue
amount: hop.amount
boilTime: step.length
# @todo remove once steps are properly reimplemented
calcObj.boilVolume = recipe.volume if calcObj.boilVolume is 0
calculated =
bitterness: {}
for formula in mbFormulas.bitterness.available()
calculated.bitterness[formula] = mbFormulas.bitterness[formula](calcObj)
return calculated
calcYeastValues = (yeast, step, recipe) ->
return {}
# Calculate OG for a recipe
# @param [Recipe] recipe
# @return [Float] OG
calcOG = (recipe) ->
totalGP = 0
for step in recipe.steps
for ingredient in step.ingredients
if ingredient.type is 'fermentable'
totalGP += ingredient.calculated.gp
return mbFormulas.gravity.og(totalGP)
# Calculate FG for a recipe
# @param [Recipe] recipe
# @return [Float fg]
# @todo Send in Yeast attenuation to get more exact result
calcFG = (recipe) ->
return mbFormulas.gravity.fg(recipe.og)
calcAbv = (recipe) ->
abv = {}
for formula in mbFormulas.abv.available()
console.log formula
abv[formula] = mbFormulas.abv[formula]?(recipe.og, recipe.fg)
abv.standard = abv.microbrewit
return abv
# Calculate the IBU value for the recipe
# i.e gather all the IBU values from hops
# @param [Recipe] recipe
# @return [Object] ibu All the different formulas
calcIbu = (recipe) ->
ibu = {}
for step in recipe.steps
for ingredient in step.ingredients
if ingredient.type is 'hop'
for key, val of ingredient.calculated?.bitterness
unless ibu[key]
ibu[key] = val.ibu
else
ibu[key] += val.ibu
ibu.standard = ibu.tinseth
return ibu
# Calculate the SRM value for the recipe
# i.e gather all MCU values and use formulas to calculate
# @param [Recipe] recipe
# @return [Object] srm
calcSrm = (recipe) ->
mcu = 0
for step in recipe.steps
for ingredient in step.ingredients
if ingredient.type is 'fermentable'
mcu += ingredient.calculated.mcu
srm = {}
for formula in mbFormulas.colour.available()
if formula isnt 'mcu'
srm[formula] = mbFormulas.colour[formula]({mcu})
srm.standard = srm.morey
return srm
return {
calcOG, calcFG, calcSrm, calcIbu, calcAbv, calcFermentableValues, calcHopValues, getMeasureSettings, getValidStepTypes
}
])
| true | #
# Utility functions for recipes
#
# @author PI:NAME:<NAME>END_PI
# @copyright 2016 Microbrew.it
angular.module('Microbrewit').factory('mbit/services/RecipeUtilityService', [
() ->
# Get previous step with type in recipe
# @note recursive
# @return [Step, undefined]
_getStepBefore = (recipe, index) ->
if index is 0
return undefined
stepBefore = recipe.steps[index - 1]
if stepBefore.type
return stepBefore
else
return _getStepBefore(recipe, index - 1)
# Get valid step types
# @return [Array<String>]
getValidStepTypes = (recipe, step, index) ->
index ?= recipe.steps.indexOf(step)
stepBefore = _getStepBefore(recipe, index)
unless stepBefore
return ['mash']
switch stepBefore.type
when 'mash'
return ['mash', 'sparge', 'boil']
when 'sparge'
return ['boil']
when 'boil'
return ['boil', 'fermentation']
when 'fermentation'
return ['fermentation']
getMeasureSettings = (type) ->
switch type
when 'imperial'
return {
liquid:
unit: 'oz'
fermentable:
unit: 'lbs'
hop:
unit: 'lbs'
other:
unit: 'lbs'
yeast:
unit: 'lbs'
}
when 'metric'
return {
liquid:
unit: 'liters'
fermentable:
unit: 'grams'
hop:
unit: 'grams'
other:
unit: 'grams'
yeast:
unit: 'grams'
}
getRandomArbitrary = (min, max) ->
return Math.random() * (max - min) + min
calcFermentableValues = (fermentable, recipe) ->
mcu = mbFormulas.colour.mcu(mbFormulas.convert.convert(fermentable.amount, 'grams', 'kg'), fermentable.lovibond, recipe.volume)
console.log fermentable
srm = {}
for formula in mbFormulas.colour.available()
if formula isnt 'mcu'
srm[formula] = mbFormulas.colour[formula]({mcu})
srm.standard = srm.morey
gp = mbFormulas.gravity.gravityPoints
type: fermentable.subType
efficiency: recipe.efficiency
amount: fermentable.amount
volume: recipe.volume
ppg: fermentable.ppg
rgb = mbFormulas.convert.convert fermentable.lovibond, 'lovibond', 'rgb'
return {
mcu, srm, gp, rgb
}
calcHopValues = (hop, step, recipe) ->
calcObj =
boilGravity: recipe.og
utilisation: step.length
boilVolume: step.volume
aa: hop.aaValue
amount: hop.amount
boilTime: step.length
# @todo remove once steps are properly reimplemented
calcObj.boilVolume = recipe.volume if calcObj.boilVolume is 0
calculated =
bitterness: {}
for formula in mbFormulas.bitterness.available()
calculated.bitterness[formula] = mbFormulas.bitterness[formula](calcObj)
return calculated
calcYeastValues = (yeast, step, recipe) ->
return {}
# Calculate OG for a recipe
# @param [Recipe] recipe
# @return [Float] OG
calcOG = (recipe) ->
totalGP = 0
for step in recipe.steps
for ingredient in step.ingredients
if ingredient.type is 'fermentable'
totalGP += ingredient.calculated.gp
return mbFormulas.gravity.og(totalGP)
# Calculate FG for a recipe
# @param [Recipe] recipe
# @return [Float fg]
# @todo Send in Yeast attenuation to get more exact result
calcFG = (recipe) ->
return mbFormulas.gravity.fg(recipe.og)
calcAbv = (recipe) ->
abv = {}
for formula in mbFormulas.abv.available()
console.log formula
abv[formula] = mbFormulas.abv[formula]?(recipe.og, recipe.fg)
abv.standard = abv.microbrewit
return abv
# Calculate the IBU value for the recipe
# i.e gather all the IBU values from hops
# @param [Recipe] recipe
# @return [Object] ibu All the different formulas
calcIbu = (recipe) ->
ibu = {}
for step in recipe.steps
for ingredient in step.ingredients
if ingredient.type is 'hop'
for key, val of ingredient.calculated?.bitterness
unless ibu[key]
ibu[key] = val.ibu
else
ibu[key] += val.ibu
ibu.standard = ibu.tinseth
return ibu
# Calculate the SRM value for the recipe
# i.e gather all MCU values and use formulas to calculate
# @param [Recipe] recipe
# @return [Object] srm
calcSrm = (recipe) ->
mcu = 0
for step in recipe.steps
for ingredient in step.ingredients
if ingredient.type is 'fermentable'
mcu += ingredient.calculated.mcu
srm = {}
for formula in mbFormulas.colour.available()
if formula isnt 'mcu'
srm[formula] = mbFormulas.colour[formula]({mcu})
srm.standard = srm.morey
return srm
return {
calcOG, calcFG, calcSrm, calcIbu, calcAbv, calcFermentableValues, calcHopValues, getMeasureSettings, getValidStepTypes
}
])
|
[
{
"context": "# Copyright (c) 2008-2013 Michael Dvorkin and contributors.\n#\n# Fat Free CRM is freely dist",
"end": 41,
"score": 0.99986732006073,
"start": 26,
"tag": "NAME",
"value": "Michael Dvorkin"
}
] | app/assets/javascripts/crm_title_tools.js.coffee | bongbot/mozart | 0 | # Copyright (c) 2008-2013 Michael Dvorkin and contributors.
#
# Fat Free CRM is freely distributable under the terms of MIT license.
# See MIT-LICENSE file or http://www.opensource.org/licenses/mit-license.php
#------------------------------------------------------------------------------
(($) ->
window.crm ||= {}
) jQuery
| 134994 | # Copyright (c) 2008-2013 <NAME> and contributors.
#
# Fat Free CRM is freely distributable under the terms of MIT license.
# See MIT-LICENSE file or http://www.opensource.org/licenses/mit-license.php
#------------------------------------------------------------------------------
(($) ->
window.crm ||= {}
) jQuery
| true | # Copyright (c) 2008-2013 PI:NAME:<NAME>END_PI and contributors.
#
# Fat Free CRM is freely distributable under the terms of MIT license.
# See MIT-LICENSE file or http://www.opensource.org/licenses/mit-license.php
#------------------------------------------------------------------------------
(($) ->
window.crm ||= {}
) jQuery
|
[
{
"context": ", vec2, vec3, quat} from 'gl-matrix'`\n\n###\nAuthor: Jens G. Magnus\n###\n\ncanvas = null\ndrawFunction = null\n\ncameraMou",
"end": 78,
"score": 0.9974858164787292,
"start": 64,
"tag": "NAME",
"value": "Jens G. Magnus"
}
] | gl-camera.coffee | JensGM/gl-camera | 0 | `import {mat4, vec2, vec3, quat} from 'gl-matrix'`
###
Author: Jens G. Magnus
###
canvas = null
drawFunction = null
cameraMouseCapture = off
smoothingThreshold = 0.0001
###
Distance
###
distance_springiness = 50
distance_sensitivity = 0.005
current_distance = 600
target_distance = 600
###
Translation
###
translation_springiness = 15
translation_sensitivity = 0.005
current_position = vec3.create()
target_position = vec3.create()
###
Rotation
###
rotation_springiness = 15
rotation_sensitivity = 0.015
limitPitch = true
min_pitch = -Math.PI / 2.0
max_pitch = Math.PI / 2.0
current_pitch = 0.35
current_yaw = -0.35
current_roll = 0.0
target_pitch = 0.35
target_yaw = -0.35
target_roll = 0.0
rotation_matrix = (() =>
qy = quat.create()
qp = quat.create()
quat.rotateZ qy, qy, -current_yaw
quat.rotateX qp, qp, current_pitch
qc = quat.multiply quat.create(), qy, qp
return mat4.fromQuat mat4.create(), qc
)()
###
Mouse
###
LEFT_MOUSE_BUTTON = 0
MIDDLE_MOUSE_BUTTON = 1
RIGHT_MOUSE_BUTTON = 2
lastMouseX = 0.0
lastMouseY = 0.0
currentMouseX = 0.0
currentMouseY = 0.0
###
Touch
###
lastTouch1 = vec2.create()
lastTouch2 = vec2.create()
currentTouch1 = vec2.create()
currentTouch2 = vec2.create()
updateCameraInterval = null
bindMouseEvents = (element) ->
canvas = element
canvas.onmousedown = onMouseDown
canvas.onmouseup = onMouseUp
canvas.onmouseleave = onMouseUp
canvas.onmousemove = onMouseMove
canvas.onwheel = onWheel
canvas.ontouchstart = onTouchStart
canvas.ontouchend = onTouchEnd
canvas.ontouchmove = onTouchMove
canvas.oncontextmenu = (ev) ->
ev.preventDefault()
false
setDrawCallback = (cb) -> drawFunction = cb
getCanvasSizeAndRelativeMouseLocation = (ev) ->
rect = canvas.getBoundingClientRect()
left = rect.left + window.pageXOffset
right = rect.right + window.pageXOffset
top = rect.top + window.pageYOffset
bottom = rect.bottom + window.pageYOffset
width = right - left
height = bottom - top
x = ev.clientX - left
y = ev.clientY - top
{ width: width, height: height, x: x, y: y }
onTouchStart = (ev) ->
ev.preventDefault()
if ev.touches.length == 1
M = getCanvasSizeAndRelativeMouseLocation ev.touches[0]
vec2.set lastTouch1, M.x, M.y
vec2.copy currentTouch1, lastTouch1
else
M1 = getCanvasSizeAndRelativeMouseLocation ev.touches[0]
M2 = getCanvasSizeAndRelativeMouseLocation ev.touches[1]
vec2.set lastTouch1, M1.x, M1.y
vec2.set lastTouch2, M2.x, M2.y
vec2.copy currentTouch1, lastTouch1
vec2.copy currentTouch2, lastTouch2
onTouchEnd = (ev) -> ev.preventDefault()
onTouchMove = (ev) ->
ev.preventDefault()
if ev.touches.length == 1
M = getCanvasSizeAndRelativeMouseLocation ev.touches[0]
vec2.set currentTouch1, M.x, M.y
deltaTouch = vec2.subtract vec2.create(), currentTouch1, lastTouch1
addRotationInput deltaTouch[0], deltaTouch[1]
vec2.copy lastTouch1, currentTouch1
unless updateCameraInterval
updateCameraInterval = setInterval updateCamera, 15
if ev.touches.length >= 2
M1 = getCanvasSizeAndRelativeMouseLocation ev.touches[0]
M2 = getCanvasSizeAndRelativeMouseLocation ev.touches[1]
vec2.set currentTouch1, M1.x, M1.y
vec2.set currentTouch2, M2.x, M2.y
# Distane
deltaTouchDistance = vec2.distance(lastTouch1, lastTouch2) - vec2.distance(currentTouch1, currentTouch2)
target_distance += deltaTouchDistance * distance_sensitivity * current_distance * 2.0
target_distance = Math.max target_distance, 0.0
# Translation
averageLastTouch = vec2.add vec2.create(), lastTouch1, lastTouch2
averageCurrentTouch = vec2.add vec2.create(), currentTouch1, currentTouch2
vec2.scale averageLastTouch, averageLastTouch, 0.5
vec2.scale averageCurrentTouch, averageCurrentTouch, 0.5
averageMovement = vec2.subtract vec2.create(), averageCurrentTouch, averageLastTouch
addTranslationInput averageMovement[0], averageMovement[1]
vec2.copy lastTouch1, currentTouch1
vec2.copy lastTouch2, currentTouch2
unless updateCameraInterval
updateCameraInterval = setInterval updateCamera, 15
onWheel = (ev) ->
ev.preventDefault()
target_distance += ev.deltaY * distance_sensitivity * Math.max(current_distance, 5.0)
target_distance = Math.max target_distance, 0.0
unless updateCameraInterval
updateCameraInterval = setInterval updateCamera, 15
onMouseUp = (ev) -> cameraMouseCapture = off
onMouseDown = (ev) ->
ev.preventDefault()
cameraMouseCapture = on
M = getCanvasSizeAndRelativeMouseLocation ev
lastMouseX = M.x
lastMouseY = M.y
currentMouseX = M.x
currentMouseY = M.y
onMouseMove = (ev) ->
ev.preventDefault()
unless cameraMouseCapture is on then return
M = getCanvasSizeAndRelativeMouseLocation ev
currentMouseX = M.x
currentMouseY = M.y
x = currentMouseX - lastMouseX
y = currentMouseY - lastMouseY
switch ev.button
when LEFT_MOUSE_BUTTON then addRotationInput x, y
when RIGHT_MOUSE_BUTTON then addTranslationInput x, y
lastMouseX = currentMouseX
lastMouseY = currentMouseY
unless updateCameraInterval
updateCameraInterval = setInterval updateCamera, 15
addRotationInput = (x, y) ->
target_yaw += x * rotation_sensitivity
target_pitch += y * rotation_sensitivity
if limitPitch
target_pitch = Math.min(Math.max(target_pitch, min_pitch), max_pitch)
addTranslationInput = (x, y) ->
deltaPosition = vec3.fromValues x * translation_sensitivity * current_distance, 0.0, y * translation_sensitivity * current_distance
inverse_rotation_matrix = mat4.invert mat4.create(), rotation_matrix
deltaPosition = vec3.transformMat4 vec3.create(), deltaPosition, rotation_matrix
vec3.add target_position, target_position, deltaPosition
updateCamera = (deltaTime) ->
deltaTime = 0.015
# Rotation
updateRotation = false
updateRotation |= Math.abs(target_pitch - current_pitch) > smoothingThreshold
updateRotation |= Math.abs(target_yaw - current_yaw) > smoothingThreshold
updateRotation |= Math.abs(target_roll - current_roll) > smoothingThreshold
if updateRotation
rotation_step = 1 - Math.exp(Math.log(0.5) * rotation_springiness * deltaTime)
current_pitch += (target_pitch - current_pitch) * rotation_step
current_yaw += (target_yaw - current_yaw) * rotation_step
current_roll += (target_roll - current_roll) * rotation_step
qy = quat.create()
qp = quat.create()
quat.rotateZ qy, qy, -current_yaw
quat.rotateX qp, qp, current_pitch
qc = quat.multiply quat.create(), qy, qp
mat4.fromQuat rotation_matrix, qc
# Translation
updateTranslation = vec3.squaredDistance(target_position, current_position) > smoothingThreshold
if updateTranslation
translation_step = 1 - Math.exp(Math.log(0.5) * translation_springiness * deltaTime)
delta_position = vec3.subtract vec3.create(), target_position, current_position
vec3.scaleAndAdd current_position, current_position, delta_position, translation_step
# Distance
updateDistance = Math.abs(target_distance - current_distance) > smoothingThreshold
if updateDistance
distance_step = 1 - Math.exp(Math.log(0.5) * distance_springiness * deltaTime)
current_distance += (target_distance - current_distance) * distance_step
done = !updateRotation && !updateTranslation && !updateDistance
if done && updateCameraInterval
clearInterval updateCameraInterval
updateCameraInterval = null
if drawFunction then drawFunction()
done
getCameraMatrix = () ->
aspectRatio = canvas.width / canvas.height
eye = vec3.transformMat4(vec3.create(), vec3.fromValues(0,current_distance,0), rotation_matrix)
vec3.add eye, eye, current_position
center = vec3.fromValues(0,0,0)
vec3.add center, center, current_position
up = vec3.transformMat4(vec3.create(), vec3.fromValues(0,0,1), rotation_matrix)
V = mat4.lookAt mat4.create(), eye, center, up
P = mat4.perspective mat4.create(), 70, aspectRatio, 0.01, 512.0 + current_distance * 2
mat4.multiply mat4.create(), P, V
getViewMatrix = () ->
eye = vec3.transformMat4(vec3.create(), vec3.fromValues(0,current_distance,0), rotation_matrix)
vec3.add eye, eye, current_position
center = vec3.fromValues(0,0,0)
vec3.add center, center, current_position
up = vec3.transformMat4(vec3.create(), vec3.fromValues(0,0,1), rotation_matrix)
V = mat4.lookAt mat4.create(), eye, center, up
V
glCamera =
bindMouseEvents: bindMouseEvents
setDrawCallback: setDrawCallback
getCameraMatrix: getCameraMatrix
getViewMatrix: getViewMatrix
`export default glCamera`
| 71693 | `import {mat4, vec2, vec3, quat} from 'gl-matrix'`
###
Author: <NAME>
###
canvas = null
drawFunction = null
cameraMouseCapture = off
smoothingThreshold = 0.0001
###
Distance
###
distance_springiness = 50
distance_sensitivity = 0.005
current_distance = 600
target_distance = 600
###
Translation
###
translation_springiness = 15
translation_sensitivity = 0.005
current_position = vec3.create()
target_position = vec3.create()
###
Rotation
###
rotation_springiness = 15
rotation_sensitivity = 0.015
limitPitch = true
min_pitch = -Math.PI / 2.0
max_pitch = Math.PI / 2.0
current_pitch = 0.35
current_yaw = -0.35
current_roll = 0.0
target_pitch = 0.35
target_yaw = -0.35
target_roll = 0.0
rotation_matrix = (() =>
qy = quat.create()
qp = quat.create()
quat.rotateZ qy, qy, -current_yaw
quat.rotateX qp, qp, current_pitch
qc = quat.multiply quat.create(), qy, qp
return mat4.fromQuat mat4.create(), qc
)()
###
Mouse
###
LEFT_MOUSE_BUTTON = 0
MIDDLE_MOUSE_BUTTON = 1
RIGHT_MOUSE_BUTTON = 2
lastMouseX = 0.0
lastMouseY = 0.0
currentMouseX = 0.0
currentMouseY = 0.0
###
Touch
###
lastTouch1 = vec2.create()
lastTouch2 = vec2.create()
currentTouch1 = vec2.create()
currentTouch2 = vec2.create()
updateCameraInterval = null
bindMouseEvents = (element) ->
canvas = element
canvas.onmousedown = onMouseDown
canvas.onmouseup = onMouseUp
canvas.onmouseleave = onMouseUp
canvas.onmousemove = onMouseMove
canvas.onwheel = onWheel
canvas.ontouchstart = onTouchStart
canvas.ontouchend = onTouchEnd
canvas.ontouchmove = onTouchMove
canvas.oncontextmenu = (ev) ->
ev.preventDefault()
false
setDrawCallback = (cb) -> drawFunction = cb
getCanvasSizeAndRelativeMouseLocation = (ev) ->
rect = canvas.getBoundingClientRect()
left = rect.left + window.pageXOffset
right = rect.right + window.pageXOffset
top = rect.top + window.pageYOffset
bottom = rect.bottom + window.pageYOffset
width = right - left
height = bottom - top
x = ev.clientX - left
y = ev.clientY - top
{ width: width, height: height, x: x, y: y }
onTouchStart = (ev) ->
ev.preventDefault()
if ev.touches.length == 1
M = getCanvasSizeAndRelativeMouseLocation ev.touches[0]
vec2.set lastTouch1, M.x, M.y
vec2.copy currentTouch1, lastTouch1
else
M1 = getCanvasSizeAndRelativeMouseLocation ev.touches[0]
M2 = getCanvasSizeAndRelativeMouseLocation ev.touches[1]
vec2.set lastTouch1, M1.x, M1.y
vec2.set lastTouch2, M2.x, M2.y
vec2.copy currentTouch1, lastTouch1
vec2.copy currentTouch2, lastTouch2
onTouchEnd = (ev) -> ev.preventDefault()
onTouchMove = (ev) ->
ev.preventDefault()
if ev.touches.length == 1
M = getCanvasSizeAndRelativeMouseLocation ev.touches[0]
vec2.set currentTouch1, M.x, M.y
deltaTouch = vec2.subtract vec2.create(), currentTouch1, lastTouch1
addRotationInput deltaTouch[0], deltaTouch[1]
vec2.copy lastTouch1, currentTouch1
unless updateCameraInterval
updateCameraInterval = setInterval updateCamera, 15
if ev.touches.length >= 2
M1 = getCanvasSizeAndRelativeMouseLocation ev.touches[0]
M2 = getCanvasSizeAndRelativeMouseLocation ev.touches[1]
vec2.set currentTouch1, M1.x, M1.y
vec2.set currentTouch2, M2.x, M2.y
# Distane
deltaTouchDistance = vec2.distance(lastTouch1, lastTouch2) - vec2.distance(currentTouch1, currentTouch2)
target_distance += deltaTouchDistance * distance_sensitivity * current_distance * 2.0
target_distance = Math.max target_distance, 0.0
# Translation
averageLastTouch = vec2.add vec2.create(), lastTouch1, lastTouch2
averageCurrentTouch = vec2.add vec2.create(), currentTouch1, currentTouch2
vec2.scale averageLastTouch, averageLastTouch, 0.5
vec2.scale averageCurrentTouch, averageCurrentTouch, 0.5
averageMovement = vec2.subtract vec2.create(), averageCurrentTouch, averageLastTouch
addTranslationInput averageMovement[0], averageMovement[1]
vec2.copy lastTouch1, currentTouch1
vec2.copy lastTouch2, currentTouch2
unless updateCameraInterval
updateCameraInterval = setInterval updateCamera, 15
onWheel = (ev) ->
ev.preventDefault()
target_distance += ev.deltaY * distance_sensitivity * Math.max(current_distance, 5.0)
target_distance = Math.max target_distance, 0.0
unless updateCameraInterval
updateCameraInterval = setInterval updateCamera, 15
onMouseUp = (ev) -> cameraMouseCapture = off
onMouseDown = (ev) ->
ev.preventDefault()
cameraMouseCapture = on
M = getCanvasSizeAndRelativeMouseLocation ev
lastMouseX = M.x
lastMouseY = M.y
currentMouseX = M.x
currentMouseY = M.y
onMouseMove = (ev) ->
ev.preventDefault()
unless cameraMouseCapture is on then return
M = getCanvasSizeAndRelativeMouseLocation ev
currentMouseX = M.x
currentMouseY = M.y
x = currentMouseX - lastMouseX
y = currentMouseY - lastMouseY
switch ev.button
when LEFT_MOUSE_BUTTON then addRotationInput x, y
when RIGHT_MOUSE_BUTTON then addTranslationInput x, y
lastMouseX = currentMouseX
lastMouseY = currentMouseY
unless updateCameraInterval
updateCameraInterval = setInterval updateCamera, 15
addRotationInput = (x, y) ->
target_yaw += x * rotation_sensitivity
target_pitch += y * rotation_sensitivity
if limitPitch
target_pitch = Math.min(Math.max(target_pitch, min_pitch), max_pitch)
addTranslationInput = (x, y) ->
deltaPosition = vec3.fromValues x * translation_sensitivity * current_distance, 0.0, y * translation_sensitivity * current_distance
inverse_rotation_matrix = mat4.invert mat4.create(), rotation_matrix
deltaPosition = vec3.transformMat4 vec3.create(), deltaPosition, rotation_matrix
vec3.add target_position, target_position, deltaPosition
updateCamera = (deltaTime) ->
deltaTime = 0.015
# Rotation
updateRotation = false
updateRotation |= Math.abs(target_pitch - current_pitch) > smoothingThreshold
updateRotation |= Math.abs(target_yaw - current_yaw) > smoothingThreshold
updateRotation |= Math.abs(target_roll - current_roll) > smoothingThreshold
if updateRotation
rotation_step = 1 - Math.exp(Math.log(0.5) * rotation_springiness * deltaTime)
current_pitch += (target_pitch - current_pitch) * rotation_step
current_yaw += (target_yaw - current_yaw) * rotation_step
current_roll += (target_roll - current_roll) * rotation_step
qy = quat.create()
qp = quat.create()
quat.rotateZ qy, qy, -current_yaw
quat.rotateX qp, qp, current_pitch
qc = quat.multiply quat.create(), qy, qp
mat4.fromQuat rotation_matrix, qc
# Translation
updateTranslation = vec3.squaredDistance(target_position, current_position) > smoothingThreshold
if updateTranslation
translation_step = 1 - Math.exp(Math.log(0.5) * translation_springiness * deltaTime)
delta_position = vec3.subtract vec3.create(), target_position, current_position
vec3.scaleAndAdd current_position, current_position, delta_position, translation_step
# Distance
updateDistance = Math.abs(target_distance - current_distance) > smoothingThreshold
if updateDistance
distance_step = 1 - Math.exp(Math.log(0.5) * distance_springiness * deltaTime)
current_distance += (target_distance - current_distance) * distance_step
done = !updateRotation && !updateTranslation && !updateDistance
if done && updateCameraInterval
clearInterval updateCameraInterval
updateCameraInterval = null
if drawFunction then drawFunction()
done
getCameraMatrix = () ->
aspectRatio = canvas.width / canvas.height
eye = vec3.transformMat4(vec3.create(), vec3.fromValues(0,current_distance,0), rotation_matrix)
vec3.add eye, eye, current_position
center = vec3.fromValues(0,0,0)
vec3.add center, center, current_position
up = vec3.transformMat4(vec3.create(), vec3.fromValues(0,0,1), rotation_matrix)
V = mat4.lookAt mat4.create(), eye, center, up
P = mat4.perspective mat4.create(), 70, aspectRatio, 0.01, 512.0 + current_distance * 2
mat4.multiply mat4.create(), P, V
getViewMatrix = () ->
eye = vec3.transformMat4(vec3.create(), vec3.fromValues(0,current_distance,0), rotation_matrix)
vec3.add eye, eye, current_position
center = vec3.fromValues(0,0,0)
vec3.add center, center, current_position
up = vec3.transformMat4(vec3.create(), vec3.fromValues(0,0,1), rotation_matrix)
V = mat4.lookAt mat4.create(), eye, center, up
V
glCamera =
bindMouseEvents: bindMouseEvents
setDrawCallback: setDrawCallback
getCameraMatrix: getCameraMatrix
getViewMatrix: getViewMatrix
`export default glCamera`
| true | `import {mat4, vec2, vec3, quat} from 'gl-matrix'`
###
Author: PI:NAME:<NAME>END_PI
###
canvas = null
drawFunction = null
cameraMouseCapture = off
smoothingThreshold = 0.0001
###
Distance
###
distance_springiness = 50
distance_sensitivity = 0.005
current_distance = 600
target_distance = 600
###
Translation
###
translation_springiness = 15
translation_sensitivity = 0.005
current_position = vec3.create()
target_position = vec3.create()
###
Rotation
###
rotation_springiness = 15
rotation_sensitivity = 0.015
limitPitch = true
min_pitch = -Math.PI / 2.0
max_pitch = Math.PI / 2.0
current_pitch = 0.35
current_yaw = -0.35
current_roll = 0.0
target_pitch = 0.35
target_yaw = -0.35
target_roll = 0.0
rotation_matrix = (() =>
qy = quat.create()
qp = quat.create()
quat.rotateZ qy, qy, -current_yaw
quat.rotateX qp, qp, current_pitch
qc = quat.multiply quat.create(), qy, qp
return mat4.fromQuat mat4.create(), qc
)()
###
Mouse
###
LEFT_MOUSE_BUTTON = 0
MIDDLE_MOUSE_BUTTON = 1
RIGHT_MOUSE_BUTTON = 2
lastMouseX = 0.0
lastMouseY = 0.0
currentMouseX = 0.0
currentMouseY = 0.0
###
Touch
###
lastTouch1 = vec2.create()
lastTouch2 = vec2.create()
currentTouch1 = vec2.create()
currentTouch2 = vec2.create()
updateCameraInterval = null
bindMouseEvents = (element) ->
canvas = element
canvas.onmousedown = onMouseDown
canvas.onmouseup = onMouseUp
canvas.onmouseleave = onMouseUp
canvas.onmousemove = onMouseMove
canvas.onwheel = onWheel
canvas.ontouchstart = onTouchStart
canvas.ontouchend = onTouchEnd
canvas.ontouchmove = onTouchMove
canvas.oncontextmenu = (ev) ->
ev.preventDefault()
false
setDrawCallback = (cb) -> drawFunction = cb
getCanvasSizeAndRelativeMouseLocation = (ev) ->
rect = canvas.getBoundingClientRect()
left = rect.left + window.pageXOffset
right = rect.right + window.pageXOffset
top = rect.top + window.pageYOffset
bottom = rect.bottom + window.pageYOffset
width = right - left
height = bottom - top
x = ev.clientX - left
y = ev.clientY - top
{ width: width, height: height, x: x, y: y }
onTouchStart = (ev) ->
ev.preventDefault()
if ev.touches.length == 1
M = getCanvasSizeAndRelativeMouseLocation ev.touches[0]
vec2.set lastTouch1, M.x, M.y
vec2.copy currentTouch1, lastTouch1
else
M1 = getCanvasSizeAndRelativeMouseLocation ev.touches[0]
M2 = getCanvasSizeAndRelativeMouseLocation ev.touches[1]
vec2.set lastTouch1, M1.x, M1.y
vec2.set lastTouch2, M2.x, M2.y
vec2.copy currentTouch1, lastTouch1
vec2.copy currentTouch2, lastTouch2
onTouchEnd = (ev) -> ev.preventDefault()
onTouchMove = (ev) ->
ev.preventDefault()
if ev.touches.length == 1
M = getCanvasSizeAndRelativeMouseLocation ev.touches[0]
vec2.set currentTouch1, M.x, M.y
deltaTouch = vec2.subtract vec2.create(), currentTouch1, lastTouch1
addRotationInput deltaTouch[0], deltaTouch[1]
vec2.copy lastTouch1, currentTouch1
unless updateCameraInterval
updateCameraInterval = setInterval updateCamera, 15
if ev.touches.length >= 2
M1 = getCanvasSizeAndRelativeMouseLocation ev.touches[0]
M2 = getCanvasSizeAndRelativeMouseLocation ev.touches[1]
vec2.set currentTouch1, M1.x, M1.y
vec2.set currentTouch2, M2.x, M2.y
# Distane
deltaTouchDistance = vec2.distance(lastTouch1, lastTouch2) - vec2.distance(currentTouch1, currentTouch2)
target_distance += deltaTouchDistance * distance_sensitivity * current_distance * 2.0
target_distance = Math.max target_distance, 0.0
# Translation
averageLastTouch = vec2.add vec2.create(), lastTouch1, lastTouch2
averageCurrentTouch = vec2.add vec2.create(), currentTouch1, currentTouch2
vec2.scale averageLastTouch, averageLastTouch, 0.5
vec2.scale averageCurrentTouch, averageCurrentTouch, 0.5
averageMovement = vec2.subtract vec2.create(), averageCurrentTouch, averageLastTouch
addTranslationInput averageMovement[0], averageMovement[1]
vec2.copy lastTouch1, currentTouch1
vec2.copy lastTouch2, currentTouch2
unless updateCameraInterval
updateCameraInterval = setInterval updateCamera, 15
onWheel = (ev) ->
ev.preventDefault()
target_distance += ev.deltaY * distance_sensitivity * Math.max(current_distance, 5.0)
target_distance = Math.max target_distance, 0.0
unless updateCameraInterval
updateCameraInterval = setInterval updateCamera, 15
onMouseUp = (ev) -> cameraMouseCapture = off
onMouseDown = (ev) ->
ev.preventDefault()
cameraMouseCapture = on
M = getCanvasSizeAndRelativeMouseLocation ev
lastMouseX = M.x
lastMouseY = M.y
currentMouseX = M.x
currentMouseY = M.y
onMouseMove = (ev) ->
ev.preventDefault()
unless cameraMouseCapture is on then return
M = getCanvasSizeAndRelativeMouseLocation ev
currentMouseX = M.x
currentMouseY = M.y
x = currentMouseX - lastMouseX
y = currentMouseY - lastMouseY
switch ev.button
when LEFT_MOUSE_BUTTON then addRotationInput x, y
when RIGHT_MOUSE_BUTTON then addTranslationInput x, y
lastMouseX = currentMouseX
lastMouseY = currentMouseY
unless updateCameraInterval
updateCameraInterval = setInterval updateCamera, 15
addRotationInput = (x, y) ->
target_yaw += x * rotation_sensitivity
target_pitch += y * rotation_sensitivity
if limitPitch
target_pitch = Math.min(Math.max(target_pitch, min_pitch), max_pitch)
addTranslationInput = (x, y) ->
deltaPosition = vec3.fromValues x * translation_sensitivity * current_distance, 0.0, y * translation_sensitivity * current_distance
inverse_rotation_matrix = mat4.invert mat4.create(), rotation_matrix
deltaPosition = vec3.transformMat4 vec3.create(), deltaPosition, rotation_matrix
vec3.add target_position, target_position, deltaPosition
updateCamera = (deltaTime) ->
deltaTime = 0.015
# Rotation
updateRotation = false
updateRotation |= Math.abs(target_pitch - current_pitch) > smoothingThreshold
updateRotation |= Math.abs(target_yaw - current_yaw) > smoothingThreshold
updateRotation |= Math.abs(target_roll - current_roll) > smoothingThreshold
if updateRotation
rotation_step = 1 - Math.exp(Math.log(0.5) * rotation_springiness * deltaTime)
current_pitch += (target_pitch - current_pitch) * rotation_step
current_yaw += (target_yaw - current_yaw) * rotation_step
current_roll += (target_roll - current_roll) * rotation_step
qy = quat.create()
qp = quat.create()
quat.rotateZ qy, qy, -current_yaw
quat.rotateX qp, qp, current_pitch
qc = quat.multiply quat.create(), qy, qp
mat4.fromQuat rotation_matrix, qc
# Translation
updateTranslation = vec3.squaredDistance(target_position, current_position) > smoothingThreshold
if updateTranslation
translation_step = 1 - Math.exp(Math.log(0.5) * translation_springiness * deltaTime)
delta_position = vec3.subtract vec3.create(), target_position, current_position
vec3.scaleAndAdd current_position, current_position, delta_position, translation_step
# Distance
updateDistance = Math.abs(target_distance - current_distance) > smoothingThreshold
if updateDistance
distance_step = 1 - Math.exp(Math.log(0.5) * distance_springiness * deltaTime)
current_distance += (target_distance - current_distance) * distance_step
done = !updateRotation && !updateTranslation && !updateDistance
if done && updateCameraInterval
clearInterval updateCameraInterval
updateCameraInterval = null
if drawFunction then drawFunction()
done
getCameraMatrix = () ->
aspectRatio = canvas.width / canvas.height
eye = vec3.transformMat4(vec3.create(), vec3.fromValues(0,current_distance,0), rotation_matrix)
vec3.add eye, eye, current_position
center = vec3.fromValues(0,0,0)
vec3.add center, center, current_position
up = vec3.transformMat4(vec3.create(), vec3.fromValues(0,0,1), rotation_matrix)
V = mat4.lookAt mat4.create(), eye, center, up
P = mat4.perspective mat4.create(), 70, aspectRatio, 0.01, 512.0 + current_distance * 2
mat4.multiply mat4.create(), P, V
getViewMatrix = () ->
eye = vec3.transformMat4(vec3.create(), vec3.fromValues(0,current_distance,0), rotation_matrix)
vec3.add eye, eye, current_position
center = vec3.fromValues(0,0,0)
vec3.add center, center, current_position
up = vec3.transformMat4(vec3.create(), vec3.fromValues(0,0,1), rotation_matrix)
V = mat4.lookAt mat4.create(), eye, center, up
V
glCamera =
bindMouseEvents: bindMouseEvents
setDrawCallback: setDrawCallback
getCameraMatrix: getCameraMatrix
getViewMatrix: getViewMatrix
`export default glCamera`
|
[
{
"context": "umber(ticketBlockID)\n customerName: customerName\n customerEmail: customerEmail\n ",
"end": 1861,
"score": 0.6793017983436584,
"start": 1849,
"tag": "NAME",
"value": "customerName"
}
] | bin/assets/javascripts/event_list_entry.coffee | boich249/EventTicketSim | 3 | define [
'react'
], (React) ->
EventListEntry = React.createClass
getInitialState: ->
expanded: false
gatherTicketBlocks: ->
ticketBlocksApi = jsRoutes.controllers.Events.ticketBlockForEvent(
@props.event.id
)
ticketBlocksApi.ajax()
.done (result) =>
if @isMounted()
availBlocks = (tb for tb in \
result.response when tb.availability > 0)
@setState
ticketBlocks: availBlocks
.fail (jqXHR, textStatus, errorThrown) =>
resultCode = jqXHR.status
if @isMounted()
@setState
ticketBlocks: null
toggleExpanded: ->
if @state.ticketBlocks == undefined
@gatherTicketBlocks()
@setState
ticketBlocks: null
@setState
expanded: !@state.expanded
placeOrder: ->
ticketBlockID = @refs.selectedTicketBlock.getDOMNode().value
ticketQuantity = @refs.ticketQuantity.getDOMNode().value
customerName = @refs.customerName.getDOMNode().value
customerEmail = @refs.customerEmail.getDOMNode().value
# This is pretty lame validation, but better than nothing
if customerName.length == 0
alert "Your name is required"
return
if customerEmail.length == 0
alert "Your email is required"
return
order =
ticketBlockID: Number(ticketBlockID)
customerName: customerName
customerEmail: customerEmail
ticketQuantity: Number(ticketQuantity)
ticketBlocksApi = jsRoutes.controllers.Orders.create()
ticketBlocksApi.ajax(
data: JSON.stringify order
contentType: 'application/json'
)
.done (result) =>
if @isMounted()
alert "Order placed. REF #{result.response.id}"
@setState
expanded: false
.fail (jqXHR, textStatus, errorThrown) =>
resultCode = jqXHR.status
result = jqXHR.responseJSON
if @isMounted()
alert "Error placing the order: #{result.error.message}"
renderEntryBlocks: ->
{ div, span, option, label, select, input, button } = React.DOM
eid = @props.event.id
if @state.ticketBlocks?
if @state.ticketBlocks.length > 0
options = @state.ticketBlocks.map (tb) ->
priceFormat = parseFloat(Math.round(tb.price * 100) / 100).toFixed(2)
option {
key: tb.id
ref: "selectedTicketBlock"
value: tb.id
}, "#{ tb.name } - $#{ priceFormat }"
blockChoice = select {
key: 'tbo'
id: "tbo#{eid}"
}, options
div { key: 'opnl' }, [
div { key: 'q'}, [
label {
key: 'lt'
htmlFor: "tbo#{eid}"
}, "Tickets:"
blockChoice
label {
key: 'lq'
htmlFor: "qty#{eid}"
}, "Quantity:"
input {
key: 'qty'
ref: "ticketQuantity"
id: "qty#{eid}"
type: "number"
max: 9999
min: 1
defaultValue: 1
}
],
div { key: 'n' }, [
label {
key: 'ln'
htmlFor: "name#{eid}"
}, "Name:"
input {
key: 'name'
ref: "customerName"
id: "name#{eid}"
}
label {
key: 'le'
htmlFor: "email#{eid}"
}, "Email:"
input {
key: 'email'
ref: "customerEmail"
id: "email#{eid}"
}
button {
key: 'o'
onClick: @placeOrder
}, "Place Order"
]
]
else
div { key: 'so' }, "No tickets available"
else
null
render: ->
{ div, span, button } = React.DOM
if @props.event?
eid = @props.event.id
eventDate = new Date(@props.event.start)
readableDate = eventDate.toDateString()
orderText = if @state.expanded then "Cancel" else "Order"
orderButton = button {
key: 'o'
onClick: @toggleExpanded
}, orderText
baseRow = div {
key: "er-#{ eid }"
className: "eventEntry"
}, [
span { key: 'evn' }, @props.event.name
span { key: 'evc' }, @props.event.city
span { key: 'evd' }, readableDate
span { key: 'order' }, orderButton
]
contents = [baseRow]
if @state.expanded
contents.push @renderEntryBlocks()
div {}, contents
else
null
EventListEntry | 183037 | define [
'react'
], (React) ->
EventListEntry = React.createClass
getInitialState: ->
expanded: false
gatherTicketBlocks: ->
ticketBlocksApi = jsRoutes.controllers.Events.ticketBlockForEvent(
@props.event.id
)
ticketBlocksApi.ajax()
.done (result) =>
if @isMounted()
availBlocks = (tb for tb in \
result.response when tb.availability > 0)
@setState
ticketBlocks: availBlocks
.fail (jqXHR, textStatus, errorThrown) =>
resultCode = jqXHR.status
if @isMounted()
@setState
ticketBlocks: null
toggleExpanded: ->
if @state.ticketBlocks == undefined
@gatherTicketBlocks()
@setState
ticketBlocks: null
@setState
expanded: !@state.expanded
placeOrder: ->
ticketBlockID = @refs.selectedTicketBlock.getDOMNode().value
ticketQuantity = @refs.ticketQuantity.getDOMNode().value
customerName = @refs.customerName.getDOMNode().value
customerEmail = @refs.customerEmail.getDOMNode().value
# This is pretty lame validation, but better than nothing
if customerName.length == 0
alert "Your name is required"
return
if customerEmail.length == 0
alert "Your email is required"
return
order =
ticketBlockID: Number(ticketBlockID)
customerName: <NAME>
customerEmail: customerEmail
ticketQuantity: Number(ticketQuantity)
ticketBlocksApi = jsRoutes.controllers.Orders.create()
ticketBlocksApi.ajax(
data: JSON.stringify order
contentType: 'application/json'
)
.done (result) =>
if @isMounted()
alert "Order placed. REF #{result.response.id}"
@setState
expanded: false
.fail (jqXHR, textStatus, errorThrown) =>
resultCode = jqXHR.status
result = jqXHR.responseJSON
if @isMounted()
alert "Error placing the order: #{result.error.message}"
renderEntryBlocks: ->
{ div, span, option, label, select, input, button } = React.DOM
eid = @props.event.id
if @state.ticketBlocks?
if @state.ticketBlocks.length > 0
options = @state.ticketBlocks.map (tb) ->
priceFormat = parseFloat(Math.round(tb.price * 100) / 100).toFixed(2)
option {
key: tb.id
ref: "selectedTicketBlock"
value: tb.id
}, "#{ tb.name } - $#{ priceFormat }"
blockChoice = select {
key: 'tbo'
id: "tbo#{eid}"
}, options
div { key: 'opnl' }, [
div { key: 'q'}, [
label {
key: 'lt'
htmlFor: "tbo#{eid}"
}, "Tickets:"
blockChoice
label {
key: 'lq'
htmlFor: "qty#{eid}"
}, "Quantity:"
input {
key: 'qty'
ref: "ticketQuantity"
id: "qty#{eid}"
type: "number"
max: 9999
min: 1
defaultValue: 1
}
],
div { key: 'n' }, [
label {
key: 'ln'
htmlFor: "name#{eid}"
}, "Name:"
input {
key: 'name'
ref: "customerName"
id: "name#{eid}"
}
label {
key: 'le'
htmlFor: "email#{eid}"
}, "Email:"
input {
key: 'email'
ref: "customerEmail"
id: "email#{eid}"
}
button {
key: 'o'
onClick: @placeOrder
}, "Place Order"
]
]
else
div { key: 'so' }, "No tickets available"
else
null
render: ->
{ div, span, button } = React.DOM
if @props.event?
eid = @props.event.id
eventDate = new Date(@props.event.start)
readableDate = eventDate.toDateString()
orderText = if @state.expanded then "Cancel" else "Order"
orderButton = button {
key: 'o'
onClick: @toggleExpanded
}, orderText
baseRow = div {
key: "er-#{ eid }"
className: "eventEntry"
}, [
span { key: 'evn' }, @props.event.name
span { key: 'evc' }, @props.event.city
span { key: 'evd' }, readableDate
span { key: 'order' }, orderButton
]
contents = [baseRow]
if @state.expanded
contents.push @renderEntryBlocks()
div {}, contents
else
null
EventListEntry | true | define [
'react'
], (React) ->
EventListEntry = React.createClass
getInitialState: ->
expanded: false
gatherTicketBlocks: ->
ticketBlocksApi = jsRoutes.controllers.Events.ticketBlockForEvent(
@props.event.id
)
ticketBlocksApi.ajax()
.done (result) =>
if @isMounted()
availBlocks = (tb for tb in \
result.response when tb.availability > 0)
@setState
ticketBlocks: availBlocks
.fail (jqXHR, textStatus, errorThrown) =>
resultCode = jqXHR.status
if @isMounted()
@setState
ticketBlocks: null
toggleExpanded: ->
if @state.ticketBlocks == undefined
@gatherTicketBlocks()
@setState
ticketBlocks: null
@setState
expanded: !@state.expanded
placeOrder: ->
ticketBlockID = @refs.selectedTicketBlock.getDOMNode().value
ticketQuantity = @refs.ticketQuantity.getDOMNode().value
customerName = @refs.customerName.getDOMNode().value
customerEmail = @refs.customerEmail.getDOMNode().value
# This is pretty lame validation, but better than nothing
if customerName.length == 0
alert "Your name is required"
return
if customerEmail.length == 0
alert "Your email is required"
return
order =
ticketBlockID: Number(ticketBlockID)
customerName: PI:NAME:<NAME>END_PI
customerEmail: customerEmail
ticketQuantity: Number(ticketQuantity)
ticketBlocksApi = jsRoutes.controllers.Orders.create()
ticketBlocksApi.ajax(
data: JSON.stringify order
contentType: 'application/json'
)
.done (result) =>
if @isMounted()
alert "Order placed. REF #{result.response.id}"
@setState
expanded: false
.fail (jqXHR, textStatus, errorThrown) =>
resultCode = jqXHR.status
result = jqXHR.responseJSON
if @isMounted()
alert "Error placing the order: #{result.error.message}"
renderEntryBlocks: ->
{ div, span, option, label, select, input, button } = React.DOM
eid = @props.event.id
if @state.ticketBlocks?
if @state.ticketBlocks.length > 0
options = @state.ticketBlocks.map (tb) ->
priceFormat = parseFloat(Math.round(tb.price * 100) / 100).toFixed(2)
option {
key: tb.id
ref: "selectedTicketBlock"
value: tb.id
}, "#{ tb.name } - $#{ priceFormat }"
blockChoice = select {
key: 'tbo'
id: "tbo#{eid}"
}, options
div { key: 'opnl' }, [
div { key: 'q'}, [
label {
key: 'lt'
htmlFor: "tbo#{eid}"
}, "Tickets:"
blockChoice
label {
key: 'lq'
htmlFor: "qty#{eid}"
}, "Quantity:"
input {
key: 'qty'
ref: "ticketQuantity"
id: "qty#{eid}"
type: "number"
max: 9999
min: 1
defaultValue: 1
}
],
div { key: 'n' }, [
label {
key: 'ln'
htmlFor: "name#{eid}"
}, "Name:"
input {
key: 'name'
ref: "customerName"
id: "name#{eid}"
}
label {
key: 'le'
htmlFor: "email#{eid}"
}, "Email:"
input {
key: 'email'
ref: "customerEmail"
id: "email#{eid}"
}
button {
key: 'o'
onClick: @placeOrder
}, "Place Order"
]
]
else
div { key: 'so' }, "No tickets available"
else
null
render: ->
{ div, span, button } = React.DOM
if @props.event?
eid = @props.event.id
eventDate = new Date(@props.event.start)
readableDate = eventDate.toDateString()
orderText = if @state.expanded then "Cancel" else "Order"
orderButton = button {
key: 'o'
onClick: @toggleExpanded
}, orderText
baseRow = div {
key: "er-#{ eid }"
className: "eventEntry"
}, [
span { key: 'evn' }, @props.event.name
span { key: 'evc' }, @props.event.city
span { key: 'evd' }, readableDate
span { key: 'order' }, orderButton
]
contents = [baseRow]
if @state.expanded
contents.push @renderEntryBlocks()
div {}, contents
else
null
EventListEntry |
[
{
"context": "dataKey = 'cropit'\n\nmethods =\n\n init: (options) ->\n @each ->\n ",
"end": 17,
"score": 0.966945230960846,
"start": 11,
"tag": "KEY",
"value": "cropit"
}
] | frontend/web/assets/cropit/src/plugin.coffee | udamuri/app_muribudiman | 1 | dataKey = 'cropit'
methods =
init: (options) ->
@each ->
# Only instantiate once per element
unless $.data @, dataKey
cropit = new Cropit @, options
$.data @, dataKey, cropit
destroy: ->
@each ->
$.removeData @, dataKey
isZoomable: ->
cropit = @first().data dataKey
cropit?.isZoomable()
export: (options) ->
cropit = @first().data dataKey
cropit?.getCroppedImageData options
imageState: ->
cropit = @first().data dataKey
cropit?.getImageState()
imageSrc: (newImageSrc) ->
if newImageSrc?
@each ->
cropit = $.data @, dataKey
cropit?.reset()
cropit?.loadImage newImageSrc
else
cropit = @first().data dataKey
cropit?.getImageSrc()
offset: (newOffset) ->
if newOffset? and newOffset.x? and newOffset.y?
@each ->
cropit = $.data @, dataKey
cropit?.setOffset newOffset
else
cropit = @first().data dataKey
cropit?.getOffset()
zoom: (newZoom) ->
if newZoom?
@each ->
cropit = $.data @, dataKey
cropit?.setZoom newZoom
else
cropit = @first().data dataKey
cropit?.getZoom()
imageSize: ->
cropit = @first().data dataKey
cropit?.getImageSize()
previewSize: (newSize) ->
if newSize?
@each ->
cropit = $.data @, dataKey
cropit?.setPreviewSize newSize
else
cropit = @first().data dataKey
cropit?.getPreviewSize()
disable: ->
@each ->
cropit = $.data @, dataKey
cropit.disable()
reenable: ->
@each ->
cropit = $.data @, dataKey
cropit.reenable()
$.fn.cropit = (method) ->
if methods[method]
methods[method].apply @, [].slice.call arguments, 1
else
methods.init.apply @, arguments
| 143399 | dataKey = '<KEY>'
methods =
init: (options) ->
@each ->
# Only instantiate once per element
unless $.data @, dataKey
cropit = new Cropit @, options
$.data @, dataKey, cropit
destroy: ->
@each ->
$.removeData @, dataKey
isZoomable: ->
cropit = @first().data dataKey
cropit?.isZoomable()
export: (options) ->
cropit = @first().data dataKey
cropit?.getCroppedImageData options
imageState: ->
cropit = @first().data dataKey
cropit?.getImageState()
imageSrc: (newImageSrc) ->
if newImageSrc?
@each ->
cropit = $.data @, dataKey
cropit?.reset()
cropit?.loadImage newImageSrc
else
cropit = @first().data dataKey
cropit?.getImageSrc()
offset: (newOffset) ->
if newOffset? and newOffset.x? and newOffset.y?
@each ->
cropit = $.data @, dataKey
cropit?.setOffset newOffset
else
cropit = @first().data dataKey
cropit?.getOffset()
zoom: (newZoom) ->
if newZoom?
@each ->
cropit = $.data @, dataKey
cropit?.setZoom newZoom
else
cropit = @first().data dataKey
cropit?.getZoom()
imageSize: ->
cropit = @first().data dataKey
cropit?.getImageSize()
previewSize: (newSize) ->
if newSize?
@each ->
cropit = $.data @, dataKey
cropit?.setPreviewSize newSize
else
cropit = @first().data dataKey
cropit?.getPreviewSize()
disable: ->
@each ->
cropit = $.data @, dataKey
cropit.disable()
reenable: ->
@each ->
cropit = $.data @, dataKey
cropit.reenable()
$.fn.cropit = (method) ->
if methods[method]
methods[method].apply @, [].slice.call arguments, 1
else
methods.init.apply @, arguments
| true | dataKey = 'PI:KEY:<KEY>END_PI'
methods =
init: (options) ->
@each ->
# Only instantiate once per element
unless $.data @, dataKey
cropit = new Cropit @, options
$.data @, dataKey, cropit
destroy: ->
@each ->
$.removeData @, dataKey
isZoomable: ->
cropit = @first().data dataKey
cropit?.isZoomable()
export: (options) ->
cropit = @first().data dataKey
cropit?.getCroppedImageData options
imageState: ->
cropit = @first().data dataKey
cropit?.getImageState()
imageSrc: (newImageSrc) ->
if newImageSrc?
@each ->
cropit = $.data @, dataKey
cropit?.reset()
cropit?.loadImage newImageSrc
else
cropit = @first().data dataKey
cropit?.getImageSrc()
offset: (newOffset) ->
if newOffset? and newOffset.x? and newOffset.y?
@each ->
cropit = $.data @, dataKey
cropit?.setOffset newOffset
else
cropit = @first().data dataKey
cropit?.getOffset()
zoom: (newZoom) ->
if newZoom?
@each ->
cropit = $.data @, dataKey
cropit?.setZoom newZoom
else
cropit = @first().data dataKey
cropit?.getZoom()
imageSize: ->
cropit = @first().data dataKey
cropit?.getImageSize()
previewSize: (newSize) ->
if newSize?
@each ->
cropit = $.data @, dataKey
cropit?.setPreviewSize newSize
else
cropit = @first().data dataKey
cropit?.getPreviewSize()
disable: ->
@each ->
cropit = $.data @, dataKey
cropit.disable()
reenable: ->
@each ->
cropit = $.data @, dataKey
cropit.reenable()
$.fn.cropit = (method) ->
if methods[method]
methods[method].apply @, [].slice.call arguments, 1
else
methods.init.apply @, arguments
|
[
{
"context": "e: 'markers',\n type: 'scatter',\n name: 'Personas',\n marker:{\n size:15,\n color",
"end": 986,
"score": 0.9971829056739807,
"start": 980,
"tag": "NAME",
"value": "Person"
},
{
"context": "rkers',\n type: 'scatter',\n name: 'Perso... | app/assets/javascripts/dashboard.coffee | emmamm05/ftw-backend | 0 | # Place all the behaviors and hooks related to the matching controller here.
# All this logic will automatically be available in application.js.
# You can use CoffeeScript in this file: http://coffeescript.org/
$ ->
$(document).on 'ready', (evt) ->
# Sleep Analysis Chart.
elem = document.getElementById('sleep-analysis')
data = {
x: gon.sleep_people_timestamps
y: gon.sleep_people_counts,
}
layout = {
xaxis: {
rangemode: 'tozero'
}
yaxis: {
range: [0,5],
title: 'Sleep people'
},
margin:{
top: 0
}
}
console.log gon.sleep_people_timestamps
console.log gon.sleep_people_counts
Plotly.plot elem, [ data ], layout=layout, margin: t: 0
# Map Chart.
elem = document.getElementById('localization-map-chart')
localizations = {
x: gon.localizations_x,
y: gon.localizations_y,
mode: 'markers',
type: 'scatter',
name: 'Personas',
marker:{
size:15,
color: ['rgb(93, 164, 214)']
}
}
reference_nodes = {
x: gon.reference_localizations_x,
y: gon.reference_localizations_y,
mode: 'markers',
type: 'scatter',
name: 'Referencias'
marker:{
size: 15,
}
}
# maximun height and width
min_x = -4
max_x = 12
min_y = -4
max_y = 12
layout = {
width:800,
margin:{
l:0,
r:0,
b:0,
t:0
},
xaxis:{
showgrid: false,
zeroline: false,
showline: false,
autotick: true,
ticks: '',
showticklabels: false,
range: [min_x,max_x]
},
yaxis:{
showgrid: false,
zeroline: false,
showline: false,
autotick: true,
ticks: '',
showticklabels: false,
range:[min_y,max_y]
},
images: [
{
xref: "x",
yref: "y",
x: min_x,
y: max_y,
sizex: max_x - min_x,
sizey: max_y - min_y,
sizing: "stretch",
opacity: 1,
layer: "below",
source: "map.png"
}
]
}
Plotly.plot elem, [localizations,reference_nodes], layout=layout
#
#$(document).ready ->
# $('#slider-timeline-days').ionRangeSlider({
# type: "double",
# min: 0,
# max: 100,
# from: 200,
# to: 500,
# grid: true
# }); | 174426 | # Place all the behaviors and hooks related to the matching controller here.
# All this logic will automatically be available in application.js.
# You can use CoffeeScript in this file: http://coffeescript.org/
$ ->
$(document).on 'ready', (evt) ->
# Sleep Analysis Chart.
elem = document.getElementById('sleep-analysis')
data = {
x: gon.sleep_people_timestamps
y: gon.sleep_people_counts,
}
layout = {
xaxis: {
rangemode: 'tozero'
}
yaxis: {
range: [0,5],
title: 'Sleep people'
},
margin:{
top: 0
}
}
console.log gon.sleep_people_timestamps
console.log gon.sleep_people_counts
Plotly.plot elem, [ data ], layout=layout, margin: t: 0
# Map Chart.
elem = document.getElementById('localization-map-chart')
localizations = {
x: gon.localizations_x,
y: gon.localizations_y,
mode: 'markers',
type: 'scatter',
name: '<NAME> <NAME>',
marker:{
size:15,
color: ['rgb(93, 164, 214)']
}
}
reference_nodes = {
x: gon.reference_localizations_x,
y: gon.reference_localizations_y,
mode: 'markers',
type: 'scatter',
name: '<NAME> <NAME>'
marker:{
size: 15,
}
}
# maximun height and width
min_x = -4
max_x = 12
min_y = -4
max_y = 12
layout = {
width:800,
margin:{
l:0,
r:0,
b:0,
t:0
},
xaxis:{
showgrid: false,
zeroline: false,
showline: false,
autotick: true,
ticks: '',
showticklabels: false,
range: [min_x,max_x]
},
yaxis:{
showgrid: false,
zeroline: false,
showline: false,
autotick: true,
ticks: '',
showticklabels: false,
range:[min_y,max_y]
},
images: [
{
xref: "x",
yref: "y",
x: min_x,
y: max_y,
sizex: max_x - min_x,
sizey: max_y - min_y,
sizing: "stretch",
opacity: 1,
layer: "below",
source: "map.png"
}
]
}
Plotly.plot elem, [localizations,reference_nodes], layout=layout
#
#$(document).ready ->
# $('#slider-timeline-days').ionRangeSlider({
# type: "double",
# min: 0,
# max: 100,
# from: 200,
# to: 500,
# grid: true
# }); | true | # Place all the behaviors and hooks related to the matching controller here.
# All this logic will automatically be available in application.js.
# You can use CoffeeScript in this file: http://coffeescript.org/
$ ->
$(document).on 'ready', (evt) ->
# Sleep Analysis Chart.
elem = document.getElementById('sleep-analysis')
data = {
x: gon.sleep_people_timestamps
y: gon.sleep_people_counts,
}
layout = {
xaxis: {
rangemode: 'tozero'
}
yaxis: {
range: [0,5],
title: 'Sleep people'
},
margin:{
top: 0
}
}
console.log gon.sleep_people_timestamps
console.log gon.sleep_people_counts
Plotly.plot elem, [ data ], layout=layout, margin: t: 0
# Map Chart.
elem = document.getElementById('localization-map-chart')
localizations = {
x: gon.localizations_x,
y: gon.localizations_y,
mode: 'markers',
type: 'scatter',
name: 'PI:NAME:<NAME>END_PI PI:NAME:<NAME>END_PI',
marker:{
size:15,
color: ['rgb(93, 164, 214)']
}
}
reference_nodes = {
x: gon.reference_localizations_x,
y: gon.reference_localizations_y,
mode: 'markers',
type: 'scatter',
name: 'PI:NAME:<NAME>END_PI PI:NAME:<NAME>END_PI'
marker:{
size: 15,
}
}
# maximun height and width
min_x = -4
max_x = 12
min_y = -4
max_y = 12
layout = {
width:800,
margin:{
l:0,
r:0,
b:0,
t:0
},
xaxis:{
showgrid: false,
zeroline: false,
showline: false,
autotick: true,
ticks: '',
showticklabels: false,
range: [min_x,max_x]
},
yaxis:{
showgrid: false,
zeroline: false,
showline: false,
autotick: true,
ticks: '',
showticklabels: false,
range:[min_y,max_y]
},
images: [
{
xref: "x",
yref: "y",
x: min_x,
y: max_y,
sizex: max_x - min_x,
sizey: max_y - min_y,
sizing: "stretch",
opacity: 1,
layer: "below",
source: "map.png"
}
]
}
Plotly.plot elem, [localizations,reference_nodes], layout=layout
#
#$(document).ready ->
# $('#slider-timeline-days').ionRangeSlider({
# type: "double",
# min: 0,
# max: 100,
# from: 200,
# to: 500,
# grid: true
# }); |
[
{
"context": "..........................\n### https://github.com/wdavidw/node-csv-parse ###\n_new_csv_parser = re",
"end": 2114,
"score": 0.8501050472259521,
"start": 2107,
"tag": "USERNAME",
"value": "wdavidw"
},
{
"context": "S)\\s+/, ''\n name = name.repla... | src/scratch/__old__GTFS-READER.coffee | loveencounterflow/timetable | 1 |
############################################################################################################
# njs_util = require 'util'
# njs_path = require 'path'
njs_fs = require 'fs'
# njs_crypto = require 'crypto'
#...........................................................................................................
# BAP = require 'coffeenode-bitsnpieces'
TYPES = require 'coffeenode-types'
TEXT = require 'coffeenode-text'
TRM = require 'coffeenode-trm'
rpr = TRM.rpr.bind TRM
badge = 'TIMETABLE/read-gtfs-data'
log = TRM.get_logger 'plain', badge
info = TRM.get_logger 'info', badge
whisper = TRM.get_logger 'whisper', badge
alert = TRM.get_logger 'alert', badge
debug = TRM.get_logger 'debug', badge
warn = TRM.get_logger 'warn', badge
help = TRM.get_logger 'help', badge
urge = TRM.get_logger 'urge', badge
echo = TRM.echo.bind TRM
rainbow = TRM.rainbow.bind TRM
#...........................................................................................................
T = require './TRANSFORMERS'
as_transformer = T.as_transformer.bind T
options = require '../options'
global_data_limit = options[ 'data' ]?[ 'limit' ] ? Infinity
datasource_infos = ( require './get-datasource-infos' )()
create_readstream = require './create-readstream'
REGISTRY = require './REGISTRY'
#...........................................................................................................
ASYNC = require 'async'
#...........................................................................................................
### https://github.com/wdavidw/node-csv-parse ###
_new_csv_parser = require 'csv-parse'
new_csv_parser = -> _new_csv_parser options[ 'parser' ]
############################################################################################################
# GENERIC METHODS
#-----------------------------------------------------------------------------------------------------------
### TAINT very Berlin-specific method, shouldnt appear here ###
@_normalize_name = ( name ) ->
name = name.replace /\s+\(Berlin\)(\s+Bus)?$/, ''
name = name.replace /^(U|S\+U|S)\s+/, ''
name = name.replace /^(Alexanderplatz) Bhf\/(.+)$/, '$1 ($2)'
name = name.replace /^(Lichtenberg) Bhf\/(.+)$/, '$1 ($2)'
name = name.replace /^(Alexanderplatz) Bhf/, '$1'
name = name.replace /^(Zoologischer Garten) Bhf/, '$1'
name = name.replace /^(Gesundbrunnen) Bhf/, '$1'
name = name.replace /^(Potsdamer Platz) Bhf/, '$1'
name = name.replace /^(Lichtenberg) Bhf/, '$1'
name = name.replace /^(Friedrichstr\.) Bhf/, '$1'
name = name.replace /^(Jungfernheide) Bhf/, '$1'
name = name.replace /^(Stadtmitte) U[26]/, '$1'
name = name.replace /^(.+)str\./, '$1straße'
name = name.replace /^(.+)\s+Str\./, '$1 Straße'
return name
#-----------------------------------------------------------------------------------------------------------
@_get_system_name = ( name ) ->
name = name.toLowerCase()
name = name.replace /,/g, ''
name = name.replace /\./g, ''
name = name.replace /\s+/g, '-'
return name
#-----------------------------------------------------------------------------------------------------------
### TAINT unify with following ###
@$normalize_station_name = ->
return as_transformer ( record, handler ) =>
record[ 'name' ] = @_normalize_name record[ 'name' ]
handler null, record
#-----------------------------------------------------------------------------------------------------------
@$normalize_headsign = ->
return as_transformer ( record, handler ) =>
record[ 'headsign' ] = @_normalize_name record[ 'headsign' ]
handler null, record
#-----------------------------------------------------------------------------------------------------------
@$convert_latlon = ->
return as_transformer ( record, handler ) =>
record[ 'lat' ] = parseFloat record[ 'lat' ]
record[ 'lon' ] = parseFloat record[ 'lon' ]
handler null, record
#-----------------------------------------------------------------------------------------------------------
@$register = ( registry ) ->
return as_transformer ( record, handler ) =>
REGISTRY.register_gtfs registry, record
return record
############################################################################################################
# SPECIFIC METHODS
#===========================================================================================================
# SPECIFIC METHODS: AGENCIES
#-----------------------------------------------------------------------------------------------------------
@$clean_agency_record = ->
return as_transformer ( record, handler ) =>
delete record[ 'agency_phone' ]
delete record[ 'agency_lang' ]
handler null, record
#===========================================================================================================
# SPECIFIC METHODS: STOPTIMES
#-----------------------------------------------------------------------------------------------------------
@$clean_stoptime_record = ->
return as_transformer ( record, handler ) =>
# delete record[ 'trip_id' ]
# delete record[ 'arrival_time' ]
# delete record[ 'departure_time' ]
# delete record[ 'stop_id' ]
# delete record[ 'stop_sequence' ]
delete record[ 'stop_headsign' ]
delete record[ 'pickup_type' ]
delete record[ 'drop_off_type' ]
delete record[ 'shape_dist_traveled' ]
handler null, record
#-----------------------------------------------------------------------------------------------------------
@$add_stoptime_idx = ->
return as_transformer ( record, handler ) =>
record[ 'idx' ] = ( parseInt record[ 'stop-sequence' ], 10 ) - 1
delete record[ 'stop-sequence' ]
handler null, record
#===========================================================================================================
# SPECIFIC METHODS: ROUTES
#-----------------------------------------------------------------------------------------------------------
@$clean_route_record = ->
return as_transformer ( record, handler ) =>
# delete record[ 'route_id' ]
# delete record[ 'agency_id' ]
# delete record[ 'route_short_name' ]
delete record[ 'route_long_name' ]
delete record[ 'route_desc' ]
# delete record[ 'route_type' ]
delete record[ 'route_url' ]
delete record[ 'route_color' ]
delete record[ 'route_text_color' ]
handler null, record
#===========================================================================================================
# SPECIFIC METHODS: STATIONS
#-----------------------------------------------------------------------------------------------------------
@$clean_station_record = ->
return as_transformer ( record, handler ) =>
# delete record[ 'stop_id' ]
# delete record[ 'stop_code' ]
# delete record[ 'stop_name' ]
delete record[ 'stop_desc' ]
# delete record[ 'stop_lat' ]
# delete record[ 'stop_lon' ]
delete record[ 'zone_id' ]
delete record[ 'stop_url' ]
delete record[ 'location_type' ]
delete record[ 'parent_station' ]
handler null, record
#-----------------------------------------------------------------------------------------------------------
@$add_station_system_name = ->
return as_transformer ( record, handler ) =>
record[ '~name' ] = @_get_system_name record[ 'name' ]
handler null, record
#===========================================================================================================
# SPECIFIC METHODS: TRIPS
#-----------------------------------------------------------------------------------------------------------
@$clean_trip_record = ->
return as_transformer ( record, handler ) =>
# delete record[ 'route_id' ]
# delete record[ 'service_id' ]
# delete record[ 'trip_id' ]
# delete record[ 'trip_headsign' ]
delete record[ 'trip_short_name' ]
delete record[ 'direction_id' ]
delete record[ 'block_id' ]
delete record[ 'shape_id' ]
handler null, record
#-----------------------------------------------------------------------------------------------------------
@$add_headsign_system_name = ->
return as_transformer ( record, handler ) =>
record[ '~headsign' ] = @_get_system_name record[ 'headsign' ]
handler null, record
############################################################################################################
# FINALIZATION
#-----------------------------------------------------------------------------------------------------------
#-----------------------------------------------------------------------------------------------------------
@$add_agency_ids = ( registry ) ->
return ( record ) ->
record[ 'id' ] = record[ '%gtfs-id' ].replace /[-_]+$/, ''
return record
#-----------------------------------------------------------------------------------------------------------
@$add_stoptime_ids = ( registry ) ->
return ( record ) ->
gtfs_stop_id = record[ '%gtfs-stop-id' ]
gtfs_trip_id = record[ '%gtfs-trip-id' ]
idx = record[ 'idx' ]
record[ 'id' ] = "gtfs-stop:#{gtfs_stop_id}/gtfs-trip:#{gtfs_trip_id}/idx:#{idx}"
return record
#-----------------------------------------------------------------------------------------------------------
@$add_route_ids = ( registry ) ->
route_idx = -1
return ( record ) ->
route_idx += 1
gtfs_agency_id = record[ '%gtfs-agency-id' ]
gtfs_id = record[ '%gtfs-id' ]
name = record[ 'name' ]
agency = registry[ 'old' ][ gtfs_agency_id ]
return handler new Error "unable to find agency with GTFS ID #{rpr gtfs_agency_id}" unless agency?
agency_id = agency[ 'id' ]
record[ 'id' ] = "route:#{route_idx}/#{agency_id}/name:#{name}"
return record
#-----------------------------------------------------------------------------------------------------------
@$add_station_ids = ( registry ) ->
return ( record ) ->
stations_by_names = registry[ '%stations-by-names' ]?= {}
sys_name = record[ '~name' ]
stations = stations_by_names[ sys_name ]?= []
station_idx = stations.length
stations.push record
record[ 'id' ] = "station/name:#{sys_name}/idx:#{station_idx}"
# whisper '©4p1', record[ 'id' ]
return record
#-----------------------------------------------------------------------------------------------------------
@$add_trip_ids = ( registry ) ->
return ( record ) ->
gtfs_trip_id = record[ '%gtfs-id' ]
gtfs_route_id = record[ '%gtfs-route-id' ]
# route = registry[ 'old' ][ gtfs_route_id ]
# sys_headsign = record[ '~headsign' ]
# record[ 'id' ] = "station/headsign:#{sys_headsign}/gtfs-route-id:#{gtfs_route_id}/gtfs-trip-id:#{gtfs_trip_id}"
### does this make sense?? ###
record[ 'id' ] = "gtfs-route-id:#{gtfs_route_id}/gtfs-trip-id:#{gtfs_trip_id}"
# whisper record
# whisper '©4p1', record[ 'id' ]
return record
#-----------------------------------------------------------------------------------------------------------
@finalize = ( registry, handler ) ->
method_by_types =
'agency': ( @$add_agency_ids registry ).bind @
'stoptime': ( @$add_stoptime_ids registry ).bind @
'route': ( @$add_route_ids registry ).bind @
'station': ( @$add_station_ids registry ).bind @
'trip': ( @$add_trip_ids registry ).bind @
for _, record of registry[ 'old' ]
method = method_by_types[ label = record[ '~label' ] ]
unless method?
warn "unable to locate method `add_#{label}_ids; skipping"
continue
method record
id = record[ 'id' ]
unless id?
warn "unable to find ID in #{record}; skipping"
continue
if ( duplicate = registry[ 'new' ][ id ] )?
return handler new Error """
duplicate IDs:
#{rpr duplicate}
#{rpr record}"""
registry[ 'new' ][ id ] = record
handler null
############################################################################################################
# MAKE IT SO
#-----------------------------------------------------------------------------------------------------------
@read_agencies = ( route, registry, handler ) ->
parser = new_csv_parser()
input = create_readstream route, 'agencies'
#.........................................................................................................
input.on 'end', ->
info 'ok: agencies'
return handler null
#.........................................................................................................
input.pipe parser
.pipe T.$as_pods()
.pipe @$clean_agency_record()
.pipe T.$delete_prefix 'agency_'
.pipe T.$set '%gtfs-type', 'agency'
.pipe T.$rename 'id', '%gtfs-id'
.pipe T.$dasherize_field_names()
.pipe @$register registry
.pipe T.$show_sample input
# .pipe T.$show_and_quit()
#.........................................................................................................
whisper 'reading GTFS agencies...'
return null
#-----------------------------------------------------------------------------------------------------------
@read_stop_times = ( route, registry, handler ) ->
parser = new_csv_parser()
input = create_readstream route, 'stop_times'
#.........................................................................................................
input.on 'end', ->
info 'ok: stoptimes'
return handler null
#.........................................................................................................
input.pipe parser
.pipe T.$skip global_data_limit
.pipe T.$as_pods()
.pipe @$clean_stoptime_record()
# .pipe @$fix_ids()
.pipe T.$delete_prefix 'trip_'
.pipe T.$add_n4j_system_properties 'node', 'stoptime'
.pipe T.$dasherize_field_names()
.pipe T.$rename 'id', '%gtfs-trip-id'
.pipe T.$rename 'stop-id', '%gtfs-stop-id'
.pipe @$add_stoptime_idx()
# .pipe T.$register registry
.pipe T.$show_sample input
# .pipe T.$show_and_quit()
#.........................................................................................................
whisper 'reading GTFS stoptimes...'
return null
#-----------------------------------------------------------------------------------------------------------
### TAINT name clash (filesystem route vs. GTFS route) ###
@read_routes = ( route, registry, handler ) ->
parser = new_csv_parser()
input = create_readstream route, 'routes'
#.........................................................................................................
input.on 'end', ->
info 'ok: routes'
return handler null
#.........................................................................................................
input.pipe parser
.pipe T.$skip global_data_limit
.pipe T.$as_pods()
.pipe @$clean_route_record()
# .pipe @$fix_ids()
.pipe T.$dasherize_field_names()
.pipe T.$rename 'route-id', '%gtfs-id'
.pipe T.$rename 'agency-id', '%gtfs-agency-id'
.pipe T.$rename 'route-short-name', 'name'
.pipe T.$add_n4j_system_properties 'node', 'route'
# .pipe T.$register registry
.pipe T.$show_sample input
# .pipe T.$show_and_quit()
#.........................................................................................................
whisper 'reading GTFS routes...'
return null
#-----------------------------------------------------------------------------------------------------------
@read_stops = ( route, registry, handler ) ->
parser = new_csv_parser()
input = create_readstream route, 'stops'
#.........................................................................................................
input.on 'end', ->
info 'ok: stops'
return handler null
#.........................................................................................................
input.pipe parser
# .pipe T.$skip global_data_limit
.pipe T.$as_pods()
.pipe @$clean_station_record()
.pipe T.$delete_prefix 'stop_'
.pipe T.$set '%gtfs-type', 'stops'
# .pipe T.$copy 'name', '%gtfs-name'
# .pipe @$normalize_station_name()
# .pipe @$add_station_system_name()
.pipe T.$rename 'id', '%gtfs-id'
# .pipe T.$add_n4j_system_properties 'node', 'station'
.pipe @$convert_latlon()
.pipe @$register registry
.pipe T.$show_sample input
# .pipe T.$show_and_quit()
#.........................................................................................................
whisper 'reading GTFS stops...'
return null
#-----------------------------------------------------------------------------------------------------------
@read_trips = ( route, registry, handler ) ->
parser = new_csv_parser()
input = create_readstream route, 'trips'
#.........................................................................................................
input.on 'end', ->
info 'ok: trips'
return handler null
#.........................................................................................................
input.pipe parser
.pipe T.$skip global_data_limit
.pipe T.$as_pods()
.pipe @$clean_trip_record()
# .pipe @$fix_ids()
.pipe T.$delete_prefix 'trip_'
.pipe T.$dasherize_field_names()
.pipe T.$rename 'id', '%gtfs-id'
.pipe T.$rename 'route-id', '%gtfs-route-id'
.pipe T.$rename 'service-id', '%gtfs-service-id'
.pipe T.$copy 'headsign', '%gtfs-headsign'
.pipe @$normalize_headsign()
.pipe @$add_headsign_system_name()
.pipe T.$add_n4j_system_properties 'node', 'trip'
# .pipe T.$register registry
.pipe T.$show_sample input
# .pipe T.$show_and_quit()
#.........................................................................................................
whisper 'reading GTFS trips...'
return null
#===========================================================================================================
# READ METHOD
#-----------------------------------------------------------------------------------------------------------
@main = ( registry, handler ) ->
#.........................................................................................................
for source_name, route_by_types of datasource_infos
tasks = []
no_source = []
no_method = []
ok_types = []
#.......................................................................................................
for gtfs_type in options[ 'data' ][ 'gtfs-types' ]
route = route_by_types[ gtfs_type ]
unless route?
no_source.push "skipping #{source_name}/#{gtfs_type} (no source file)"
continue
help "found data source for #{source_name}/#{gtfs_type}"
#.....................................................................................................
method = null
switch gtfs_type
when 'agency' then method = @read_agencies
# when 'calendar_dates' then method = @read_calendar_dates
# when 'calendar' then method = @read_calendar
# when 'routes' then method = @read_routes
# # when 'stop_times' then method = @read_stop_times
when 'stops' then method = @read_stops
# when 'transfers' then method = @read_transfers
# when 'trips' then method = @read_trips
unless method?
no_method.push "no method to read GTFS data of type #{rpr gtfs_type}; skipping"
continue
method = method.bind @
ok_types.push gtfs_type
#.....................................................................................................
do ( method, route ) =>
tasks.push ( async_handler ) => method route, registry, async_handler
#.......................................................................................................
for messages in [ no_source, no_method, ]
for message in messages
warn message
#.......................................................................................................
info "reading data for #{ok_types.length} type(s)"
info " (#{ok_types.join ', '})"
#.........................................................................................................
# limit = options[ 'stream-transform' ]?[ 'parallel' ] ? 1
ASYNC.series tasks, ( error ) =>
throw error if error?
handler null, registry
#.........................................................................................................
return null
############################################################################################################
# HELPERS
#===========================================================================================================
############################################################################################################
unless module.parent?
@main ( error, registry ) ->
throw error if error?
info registry
| 76614 |
############################################################################################################
# njs_util = require 'util'
# njs_path = require 'path'
njs_fs = require 'fs'
# njs_crypto = require 'crypto'
#...........................................................................................................
# BAP = require 'coffeenode-bitsnpieces'
TYPES = require 'coffeenode-types'
TEXT = require 'coffeenode-text'
TRM = require 'coffeenode-trm'
rpr = TRM.rpr.bind TRM
badge = 'TIMETABLE/read-gtfs-data'
log = TRM.get_logger 'plain', badge
info = TRM.get_logger 'info', badge
whisper = TRM.get_logger 'whisper', badge
alert = TRM.get_logger 'alert', badge
debug = TRM.get_logger 'debug', badge
warn = TRM.get_logger 'warn', badge
help = TRM.get_logger 'help', badge
urge = TRM.get_logger 'urge', badge
echo = TRM.echo.bind TRM
rainbow = TRM.rainbow.bind TRM
#...........................................................................................................
T = require './TRANSFORMERS'
as_transformer = T.as_transformer.bind T
options = require '../options'
global_data_limit = options[ 'data' ]?[ 'limit' ] ? Infinity
datasource_infos = ( require './get-datasource-infos' )()
create_readstream = require './create-readstream'
REGISTRY = require './REGISTRY'
#...........................................................................................................
ASYNC = require 'async'
#...........................................................................................................
### https://github.com/wdavidw/node-csv-parse ###
_new_csv_parser = require 'csv-parse'
new_csv_parser = -> _new_csv_parser options[ 'parser' ]
############################################################################################################
# GENERIC METHODS
#-----------------------------------------------------------------------------------------------------------
### TAINT very Berlin-specific method, shouldnt appear here ###
@_normalize_name = ( name ) ->
name = name.replace /\s+\(Berlin\)(\s+Bus)?$/, ''
name = name.replace /^(U|S\+U|S)\s+/, ''
name = name.replace /^(<NAME>) Bhf\/(.+)$/, '$1 ($2)'
name = name.replace /^(<NAME>) Bhf\/(.+)$/, '$1 ($2)'
name = name.replace /^(<NAME>) Bhf/, '$1'
name = name.replace /^(<NAME>oologischer Garten) Bhf/, '$1'
name = name.replace /^(<NAME>esundbrunnen) Bhf/, '$1'
name = name.replace /^(<NAME>) Bhf/, '$1'
name = name.replace /^(<NAME>) Bhf/, '$1'
name = name.replace /^(<NAME>\.) Bhf/, '$1'
name = name.replace /^(<NAME>) Bhf/, '$1'
name = name.replace /^(Stadtmitte) U[26]/, '$1'
name = name.replace /^(.+)str\./, '$1straße'
name = name.replace /^(.+)\s+Str\./, '$1 Straße'
return name
#-----------------------------------------------------------------------------------------------------------
@_get_system_name = ( name ) ->
name = name.toLowerCase()
name = name.replace /,/g, ''
name = name.replace /\./g, ''
name = name.replace /\s+/g, '-'
return name
#-----------------------------------------------------------------------------------------------------------
### TAINT unify with following ###
@$normalize_station_name = ->
return as_transformer ( record, handler ) =>
record[ 'name' ] = @_normalize_name record[ 'name' ]
handler null, record
#-----------------------------------------------------------------------------------------------------------
@$normalize_headsign = ->
return as_transformer ( record, handler ) =>
record[ 'headsign' ] = @_normalize_name record[ 'headsign' ]
handler null, record
#-----------------------------------------------------------------------------------------------------------
@$convert_latlon = ->
return as_transformer ( record, handler ) =>
record[ 'lat' ] = parseFloat record[ 'lat' ]
record[ 'lon' ] = parseFloat record[ 'lon' ]
handler null, record
#-----------------------------------------------------------------------------------------------------------
@$register = ( registry ) ->
return as_transformer ( record, handler ) =>
REGISTRY.register_gtfs registry, record
return record
############################################################################################################
# SPECIFIC METHODS
#===========================================================================================================
# SPECIFIC METHODS: AGENCIES
#-----------------------------------------------------------------------------------------------------------
@$clean_agency_record = ->
return as_transformer ( record, handler ) =>
delete record[ 'agency_phone' ]
delete record[ 'agency_lang' ]
handler null, record
#===========================================================================================================
# SPECIFIC METHODS: STOPTIMES
#-----------------------------------------------------------------------------------------------------------
@$clean_stoptime_record = ->
return as_transformer ( record, handler ) =>
# delete record[ 'trip_id' ]
# delete record[ 'arrival_time' ]
# delete record[ 'departure_time' ]
# delete record[ 'stop_id' ]
# delete record[ 'stop_sequence' ]
delete record[ 'stop_headsign' ]
delete record[ 'pickup_type' ]
delete record[ 'drop_off_type' ]
delete record[ 'shape_dist_traveled' ]
handler null, record
#-----------------------------------------------------------------------------------------------------------
@$add_stoptime_idx = ->
return as_transformer ( record, handler ) =>
record[ 'idx' ] = ( parseInt record[ 'stop-sequence' ], 10 ) - 1
delete record[ 'stop-sequence' ]
handler null, record
#===========================================================================================================
# SPECIFIC METHODS: ROUTES
#-----------------------------------------------------------------------------------------------------------
@$clean_route_record = ->
return as_transformer ( record, handler ) =>
# delete record[ 'route_id' ]
# delete record[ 'agency_id' ]
# delete record[ 'route_short_name' ]
delete record[ 'route_long_name' ]
delete record[ 'route_desc' ]
# delete record[ 'route_type' ]
delete record[ 'route_url' ]
delete record[ 'route_color' ]
delete record[ 'route_text_color' ]
handler null, record
#===========================================================================================================
# SPECIFIC METHODS: STATIONS
#-----------------------------------------------------------------------------------------------------------
@$clean_station_record = ->
return as_transformer ( record, handler ) =>
# delete record[ 'stop_id' ]
# delete record[ 'stop_code' ]
# delete record[ 'stop_name' ]
delete record[ 'stop_desc' ]
# delete record[ 'stop_lat' ]
# delete record[ 'stop_lon' ]
delete record[ 'zone_id' ]
delete record[ 'stop_url' ]
delete record[ 'location_type' ]
delete record[ 'parent_station' ]
handler null, record
#-----------------------------------------------------------------------------------------------------------
@$add_station_system_name = ->
return as_transformer ( record, handler ) =>
record[ '~name' ] = @_get_system_name record[ 'name' ]
handler null, record
#===========================================================================================================
# SPECIFIC METHODS: TRIPS
#-----------------------------------------------------------------------------------------------------------
@$clean_trip_record = ->
return as_transformer ( record, handler ) =>
# delete record[ 'route_id' ]
# delete record[ 'service_id' ]
# delete record[ 'trip_id' ]
# delete record[ 'trip_headsign' ]
delete record[ 'trip_short_name' ]
delete record[ 'direction_id' ]
delete record[ 'block_id' ]
delete record[ 'shape_id' ]
handler null, record
#-----------------------------------------------------------------------------------------------------------
@$add_headsign_system_name = ->
return as_transformer ( record, handler ) =>
record[ '~headsign' ] = @_get_system_name record[ 'headsign' ]
handler null, record
############################################################################################################
# FINALIZATION
#-----------------------------------------------------------------------------------------------------------
#-----------------------------------------------------------------------------------------------------------
@$add_agency_ids = ( registry ) ->
return ( record ) ->
record[ 'id' ] = record[ '%gtfs-id' ].replace /[-_]+$/, ''
return record
#-----------------------------------------------------------------------------------------------------------
@$add_stoptime_ids = ( registry ) ->
return ( record ) ->
gtfs_stop_id = record[ '%gtfs-stop-id' ]
gtfs_trip_id = record[ '%gtfs-trip-id' ]
idx = record[ 'idx' ]
record[ 'id' ] = "gtfs-stop:#{gtfs_stop_id}/gtfs-trip:#{gtfs_trip_id}/idx:#{idx}"
return record
#-----------------------------------------------------------------------------------------------------------
@$add_route_ids = ( registry ) ->
route_idx = -1
return ( record ) ->
route_idx += 1
gtfs_agency_id = record[ '%gtfs-agency-id' ]
gtfs_id = record[ '%gtfs-id' ]
name = record[ 'name' ]
agency = registry[ 'old' ][ gtfs_agency_id ]
return handler new Error "unable to find agency with GTFS ID #{rpr gtfs_agency_id}" unless agency?
agency_id = agency[ 'id' ]
record[ 'id' ] = "route:#{route_idx}/#{agency_id}/name:#{name}"
return record
#-----------------------------------------------------------------------------------------------------------
@$add_station_ids = ( registry ) ->
return ( record ) ->
stations_by_names = registry[ '%stations-by-names' ]?= {}
sys_name = record[ '~name' ]
stations = stations_by_names[ sys_name ]?= []
station_idx = stations.length
stations.push record
record[ 'id' ] = "station/name:#{sys_name}/idx:#{station_idx}"
# whisper '©4p1', record[ 'id' ]
return record
#-----------------------------------------------------------------------------------------------------------
@$add_trip_ids = ( registry ) ->
return ( record ) ->
gtfs_trip_id = record[ '%gtfs-id' ]
gtfs_route_id = record[ '%gtfs-route-id' ]
# route = registry[ 'old' ][ gtfs_route_id ]
# sys_headsign = record[ '~headsign' ]
# record[ 'id' ] = "station/headsign:#{sys_headsign}/gtfs-route-id:#{gtfs_route_id}/gtfs-trip-id:#{gtfs_trip_id}"
### does this make sense?? ###
record[ 'id' ] = "gtfs-route-id:#{gtfs_route_id}/gtfs-trip-id:#{gtfs_trip_id}"
# whisper record
# whisper '©4p1', record[ 'id' ]
return record
#-----------------------------------------------------------------------------------------------------------
@finalize = ( registry, handler ) ->
method_by_types =
'agency': ( @$add_agency_ids registry ).bind @
'stoptime': ( @$add_stoptime_ids registry ).bind @
'route': ( @$add_route_ids registry ).bind @
'station': ( @$add_station_ids registry ).bind @
'trip': ( @$add_trip_ids registry ).bind @
for _, record of registry[ 'old' ]
method = method_by_types[ label = record[ '~label' ] ]
unless method?
warn "unable to locate method `add_#{label}_ids; skipping"
continue
method record
id = record[ 'id' ]
unless id?
warn "unable to find ID in #{record}; skipping"
continue
if ( duplicate = registry[ 'new' ][ id ] )?
return handler new Error """
duplicate IDs:
#{rpr duplicate}
#{rpr record}"""
registry[ 'new' ][ id ] = record
handler null
############################################################################################################
# MAKE IT SO
#-----------------------------------------------------------------------------------------------------------
@read_agencies = ( route, registry, handler ) ->
parser = new_csv_parser()
input = create_readstream route, 'agencies'
#.........................................................................................................
input.on 'end', ->
info 'ok: agencies'
return handler null
#.........................................................................................................
input.pipe parser
.pipe T.$as_pods()
.pipe @$clean_agency_record()
.pipe T.$delete_prefix 'agency_'
.pipe T.$set '%gtfs-type', 'agency'
.pipe T.$rename 'id', '%gtfs-id'
.pipe T.$dasherize_field_names()
.pipe @$register registry
.pipe T.$show_sample input
# .pipe T.$show_and_quit()
#.........................................................................................................
whisper 'reading GTFS agencies...'
return null
#-----------------------------------------------------------------------------------------------------------
@read_stop_times = ( route, registry, handler ) ->
parser = new_csv_parser()
input = create_readstream route, 'stop_times'
#.........................................................................................................
input.on 'end', ->
info 'ok: stoptimes'
return handler null
#.........................................................................................................
input.pipe parser
.pipe T.$skip global_data_limit
.pipe T.$as_pods()
.pipe @$clean_stoptime_record()
# .pipe @$fix_ids()
.pipe T.$delete_prefix 'trip_'
.pipe T.$add_n4j_system_properties 'node', 'stoptime'
.pipe T.$dasherize_field_names()
.pipe T.$rename 'id', '%gtfs-trip-id'
.pipe T.$rename 'stop-id', '%gtfs-stop-id'
.pipe @$add_stoptime_idx()
# .pipe T.$register registry
.pipe T.$show_sample input
# .pipe T.$show_and_quit()
#.........................................................................................................
whisper 'reading GTFS stoptimes...'
return null
#-----------------------------------------------------------------------------------------------------------
### TAINT name clash (filesystem route vs. GTFS route) ###
@read_routes = ( route, registry, handler ) ->
parser = new_csv_parser()
input = create_readstream route, 'routes'
#.........................................................................................................
input.on 'end', ->
info 'ok: routes'
return handler null
#.........................................................................................................
input.pipe parser
.pipe T.$skip global_data_limit
.pipe T.$as_pods()
.pipe @$clean_route_record()
# .pipe @$fix_ids()
.pipe T.$dasherize_field_names()
.pipe T.$rename 'route-id', '%gtfs-id'
.pipe T.$rename 'agency-id', '%gtfs-agency-id'
.pipe T.$rename 'route-short-name', 'name'
.pipe T.$add_n4j_system_properties 'node', 'route'
# .pipe T.$register registry
.pipe T.$show_sample input
# .pipe T.$show_and_quit()
#.........................................................................................................
whisper 'reading GTFS routes...'
return null
#-----------------------------------------------------------------------------------------------------------
@read_stops = ( route, registry, handler ) ->
parser = new_csv_parser()
input = create_readstream route, 'stops'
#.........................................................................................................
input.on 'end', ->
info 'ok: stops'
return handler null
#.........................................................................................................
input.pipe parser
# .pipe T.$skip global_data_limit
.pipe T.$as_pods()
.pipe @$clean_station_record()
.pipe T.$delete_prefix 'stop_'
.pipe T.$set '%gtfs-type', 'stops'
# .pipe T.$copy 'name', '%gtfs-name'
# .pipe @$normalize_station_name()
# .pipe @$add_station_system_name()
.pipe T.$rename 'id', '%gtfs-id'
# .pipe T.$add_n4j_system_properties 'node', 'station'
.pipe @$convert_latlon()
.pipe @$register registry
.pipe T.$show_sample input
# .pipe T.$show_and_quit()
#.........................................................................................................
whisper 'reading GTFS stops...'
return null
#-----------------------------------------------------------------------------------------------------------
@read_trips = ( route, registry, handler ) ->
parser = new_csv_parser()
input = create_readstream route, 'trips'
#.........................................................................................................
input.on 'end', ->
info 'ok: trips'
return handler null
#.........................................................................................................
input.pipe parser
.pipe T.$skip global_data_limit
.pipe T.$as_pods()
.pipe @$clean_trip_record()
# .pipe @$fix_ids()
.pipe T.$delete_prefix 'trip_'
.pipe T.$dasherize_field_names()
.pipe T.$rename 'id', '%gtfs-id'
.pipe T.$rename 'route-id', '%gtfs-route-id'
.pipe T.$rename 'service-id', '%gtfs-service-id'
.pipe T.$copy 'headsign', '%gtfs-headsign'
.pipe @$normalize_headsign()
.pipe @$add_headsign_system_name()
.pipe T.$add_n4j_system_properties 'node', 'trip'
# .pipe T.$register registry
.pipe T.$show_sample input
# .pipe T.$show_and_quit()
#.........................................................................................................
whisper 'reading GTFS trips...'
return null
#===========================================================================================================
# READ METHOD
#-----------------------------------------------------------------------------------------------------------
@main = ( registry, handler ) ->
#.........................................................................................................
for source_name, route_by_types of datasource_infos
tasks = []
no_source = []
no_method = []
ok_types = []
#.......................................................................................................
for gtfs_type in options[ 'data' ][ 'gtfs-types' ]
route = route_by_types[ gtfs_type ]
unless route?
no_source.push "skipping #{source_name}/#{gtfs_type} (no source file)"
continue
help "found data source for #{source_name}/#{gtfs_type}"
#.....................................................................................................
method = null
switch gtfs_type
when 'agency' then method = @read_agencies
# when 'calendar_dates' then method = @read_calendar_dates
# when 'calendar' then method = @read_calendar
# when 'routes' then method = @read_routes
# # when 'stop_times' then method = @read_stop_times
when 'stops' then method = @read_stops
# when 'transfers' then method = @read_transfers
# when 'trips' then method = @read_trips
unless method?
no_method.push "no method to read GTFS data of type #{rpr gtfs_type}; skipping"
continue
method = method.bind @
ok_types.push gtfs_type
#.....................................................................................................
do ( method, route ) =>
tasks.push ( async_handler ) => method route, registry, async_handler
#.......................................................................................................
for messages in [ no_source, no_method, ]
for message in messages
warn message
#.......................................................................................................
info "reading data for #{ok_types.length} type(s)"
info " (#{ok_types.join ', '})"
#.........................................................................................................
# limit = options[ 'stream-transform' ]?[ 'parallel' ] ? 1
ASYNC.series tasks, ( error ) =>
throw error if error?
handler null, registry
#.........................................................................................................
return null
############################################################################################################
# HELPERS
#===========================================================================================================
############################################################################################################
unless module.parent?
@main ( error, registry ) ->
throw error if error?
info registry
| true |
############################################################################################################
# njs_util = require 'util'
# njs_path = require 'path'
njs_fs = require 'fs'
# njs_crypto = require 'crypto'
#...........................................................................................................
# BAP = require 'coffeenode-bitsnpieces'
TYPES = require 'coffeenode-types'
TEXT = require 'coffeenode-text'
TRM = require 'coffeenode-trm'
rpr = TRM.rpr.bind TRM
badge = 'TIMETABLE/read-gtfs-data'
log = TRM.get_logger 'plain', badge
info = TRM.get_logger 'info', badge
whisper = TRM.get_logger 'whisper', badge
alert = TRM.get_logger 'alert', badge
debug = TRM.get_logger 'debug', badge
warn = TRM.get_logger 'warn', badge
help = TRM.get_logger 'help', badge
urge = TRM.get_logger 'urge', badge
echo = TRM.echo.bind TRM
rainbow = TRM.rainbow.bind TRM
#...........................................................................................................
T = require './TRANSFORMERS'
as_transformer = T.as_transformer.bind T
options = require '../options'
global_data_limit = options[ 'data' ]?[ 'limit' ] ? Infinity
datasource_infos = ( require './get-datasource-infos' )()
create_readstream = require './create-readstream'
REGISTRY = require './REGISTRY'
#...........................................................................................................
ASYNC = require 'async'
#...........................................................................................................
### https://github.com/wdavidw/node-csv-parse ###
_new_csv_parser = require 'csv-parse'
new_csv_parser = -> _new_csv_parser options[ 'parser' ]
############################################################################################################
# GENERIC METHODS
#-----------------------------------------------------------------------------------------------------------
### TAINT very Berlin-specific method, shouldnt appear here ###
@_normalize_name = ( name ) ->
name = name.replace /\s+\(Berlin\)(\s+Bus)?$/, ''
name = name.replace /^(U|S\+U|S)\s+/, ''
name = name.replace /^(PI:NAME:<NAME>END_PI) Bhf\/(.+)$/, '$1 ($2)'
name = name.replace /^(PI:NAME:<NAME>END_PI) Bhf\/(.+)$/, '$1 ($2)'
name = name.replace /^(PI:NAME:<NAME>END_PI) Bhf/, '$1'
name = name.replace /^(PI:NAME:<NAME>END_PIoologischer Garten) Bhf/, '$1'
name = name.replace /^(PI:NAME:<NAME>END_PIesundbrunnen) Bhf/, '$1'
name = name.replace /^(PI:NAME:<NAME>END_PI) Bhf/, '$1'
name = name.replace /^(PI:NAME:<NAME>END_PI) Bhf/, '$1'
name = name.replace /^(PI:NAME:<NAME>END_PI\.) Bhf/, '$1'
name = name.replace /^(PI:NAME:<NAME>END_PI) Bhf/, '$1'
name = name.replace /^(Stadtmitte) U[26]/, '$1'
name = name.replace /^(.+)str\./, '$1straße'
name = name.replace /^(.+)\s+Str\./, '$1 Straße'
return name
#-----------------------------------------------------------------------------------------------------------
@_get_system_name = ( name ) ->
name = name.toLowerCase()
name = name.replace /,/g, ''
name = name.replace /\./g, ''
name = name.replace /\s+/g, '-'
return name
#-----------------------------------------------------------------------------------------------------------
### TAINT unify with following ###
@$normalize_station_name = ->
return as_transformer ( record, handler ) =>
record[ 'name' ] = @_normalize_name record[ 'name' ]
handler null, record
#-----------------------------------------------------------------------------------------------------------
@$normalize_headsign = ->
return as_transformer ( record, handler ) =>
record[ 'headsign' ] = @_normalize_name record[ 'headsign' ]
handler null, record
#-----------------------------------------------------------------------------------------------------------
@$convert_latlon = ->
return as_transformer ( record, handler ) =>
record[ 'lat' ] = parseFloat record[ 'lat' ]
record[ 'lon' ] = parseFloat record[ 'lon' ]
handler null, record
#-----------------------------------------------------------------------------------------------------------
@$register = ( registry ) ->
return as_transformer ( record, handler ) =>
REGISTRY.register_gtfs registry, record
return record
############################################################################################################
# SPECIFIC METHODS
#===========================================================================================================
# SPECIFIC METHODS: AGENCIES
#-----------------------------------------------------------------------------------------------------------
@$clean_agency_record = ->
return as_transformer ( record, handler ) =>
delete record[ 'agency_phone' ]
delete record[ 'agency_lang' ]
handler null, record
#===========================================================================================================
# SPECIFIC METHODS: STOPTIMES
#-----------------------------------------------------------------------------------------------------------
@$clean_stoptime_record = ->
return as_transformer ( record, handler ) =>
# delete record[ 'trip_id' ]
# delete record[ 'arrival_time' ]
# delete record[ 'departure_time' ]
# delete record[ 'stop_id' ]
# delete record[ 'stop_sequence' ]
delete record[ 'stop_headsign' ]
delete record[ 'pickup_type' ]
delete record[ 'drop_off_type' ]
delete record[ 'shape_dist_traveled' ]
handler null, record
#-----------------------------------------------------------------------------------------------------------
@$add_stoptime_idx = ->
return as_transformer ( record, handler ) =>
record[ 'idx' ] = ( parseInt record[ 'stop-sequence' ], 10 ) - 1
delete record[ 'stop-sequence' ]
handler null, record
#===========================================================================================================
# SPECIFIC METHODS: ROUTES
#-----------------------------------------------------------------------------------------------------------
@$clean_route_record = ->
return as_transformer ( record, handler ) =>
# delete record[ 'route_id' ]
# delete record[ 'agency_id' ]
# delete record[ 'route_short_name' ]
delete record[ 'route_long_name' ]
delete record[ 'route_desc' ]
# delete record[ 'route_type' ]
delete record[ 'route_url' ]
delete record[ 'route_color' ]
delete record[ 'route_text_color' ]
handler null, record
#===========================================================================================================
# SPECIFIC METHODS: STATIONS
#-----------------------------------------------------------------------------------------------------------
@$clean_station_record = ->
return as_transformer ( record, handler ) =>
# delete record[ 'stop_id' ]
# delete record[ 'stop_code' ]
# delete record[ 'stop_name' ]
delete record[ 'stop_desc' ]
# delete record[ 'stop_lat' ]
# delete record[ 'stop_lon' ]
delete record[ 'zone_id' ]
delete record[ 'stop_url' ]
delete record[ 'location_type' ]
delete record[ 'parent_station' ]
handler null, record
#-----------------------------------------------------------------------------------------------------------
@$add_station_system_name = ->
return as_transformer ( record, handler ) =>
record[ '~name' ] = @_get_system_name record[ 'name' ]
handler null, record
#===========================================================================================================
# SPECIFIC METHODS: TRIPS
#-----------------------------------------------------------------------------------------------------------
@$clean_trip_record = ->
return as_transformer ( record, handler ) =>
# delete record[ 'route_id' ]
# delete record[ 'service_id' ]
# delete record[ 'trip_id' ]
# delete record[ 'trip_headsign' ]
delete record[ 'trip_short_name' ]
delete record[ 'direction_id' ]
delete record[ 'block_id' ]
delete record[ 'shape_id' ]
handler null, record
#-----------------------------------------------------------------------------------------------------------
@$add_headsign_system_name = ->
return as_transformer ( record, handler ) =>
record[ '~headsign' ] = @_get_system_name record[ 'headsign' ]
handler null, record
############################################################################################################
# FINALIZATION
#-----------------------------------------------------------------------------------------------------------
#-----------------------------------------------------------------------------------------------------------
@$add_agency_ids = ( registry ) ->
return ( record ) ->
record[ 'id' ] = record[ '%gtfs-id' ].replace /[-_]+$/, ''
return record
#-----------------------------------------------------------------------------------------------------------
@$add_stoptime_ids = ( registry ) ->
return ( record ) ->
gtfs_stop_id = record[ '%gtfs-stop-id' ]
gtfs_trip_id = record[ '%gtfs-trip-id' ]
idx = record[ 'idx' ]
record[ 'id' ] = "gtfs-stop:#{gtfs_stop_id}/gtfs-trip:#{gtfs_trip_id}/idx:#{idx}"
return record
#-----------------------------------------------------------------------------------------------------------
@$add_route_ids = ( registry ) ->
route_idx = -1
return ( record ) ->
route_idx += 1
gtfs_agency_id = record[ '%gtfs-agency-id' ]
gtfs_id = record[ '%gtfs-id' ]
name = record[ 'name' ]
agency = registry[ 'old' ][ gtfs_agency_id ]
return handler new Error "unable to find agency with GTFS ID #{rpr gtfs_agency_id}" unless agency?
agency_id = agency[ 'id' ]
record[ 'id' ] = "route:#{route_idx}/#{agency_id}/name:#{name}"
return record
#-----------------------------------------------------------------------------------------------------------
@$add_station_ids = ( registry ) ->
return ( record ) ->
stations_by_names = registry[ '%stations-by-names' ]?= {}
sys_name = record[ '~name' ]
stations = stations_by_names[ sys_name ]?= []
station_idx = stations.length
stations.push record
record[ 'id' ] = "station/name:#{sys_name}/idx:#{station_idx}"
# whisper '©4p1', record[ 'id' ]
return record
#-----------------------------------------------------------------------------------------------------------
@$add_trip_ids = ( registry ) ->
return ( record ) ->
gtfs_trip_id = record[ '%gtfs-id' ]
gtfs_route_id = record[ '%gtfs-route-id' ]
# route = registry[ 'old' ][ gtfs_route_id ]
# sys_headsign = record[ '~headsign' ]
# record[ 'id' ] = "station/headsign:#{sys_headsign}/gtfs-route-id:#{gtfs_route_id}/gtfs-trip-id:#{gtfs_trip_id}"
### does this make sense?? ###
record[ 'id' ] = "gtfs-route-id:#{gtfs_route_id}/gtfs-trip-id:#{gtfs_trip_id}"
# whisper record
# whisper '©4p1', record[ 'id' ]
return record
#-----------------------------------------------------------------------------------------------------------
@finalize = ( registry, handler ) ->
method_by_types =
'agency': ( @$add_agency_ids registry ).bind @
'stoptime': ( @$add_stoptime_ids registry ).bind @
'route': ( @$add_route_ids registry ).bind @
'station': ( @$add_station_ids registry ).bind @
'trip': ( @$add_trip_ids registry ).bind @
for _, record of registry[ 'old' ]
method = method_by_types[ label = record[ '~label' ] ]
unless method?
warn "unable to locate method `add_#{label}_ids; skipping"
continue
method record
id = record[ 'id' ]
unless id?
warn "unable to find ID in #{record}; skipping"
continue
if ( duplicate = registry[ 'new' ][ id ] )?
return handler new Error """
duplicate IDs:
#{rpr duplicate}
#{rpr record}"""
registry[ 'new' ][ id ] = record
handler null
############################################################################################################
# MAKE IT SO
#-----------------------------------------------------------------------------------------------------------
@read_agencies = ( route, registry, handler ) ->
parser = new_csv_parser()
input = create_readstream route, 'agencies'
#.........................................................................................................
input.on 'end', ->
info 'ok: agencies'
return handler null
#.........................................................................................................
input.pipe parser
.pipe T.$as_pods()
.pipe @$clean_agency_record()
.pipe T.$delete_prefix 'agency_'
.pipe T.$set '%gtfs-type', 'agency'
.pipe T.$rename 'id', '%gtfs-id'
.pipe T.$dasherize_field_names()
.pipe @$register registry
.pipe T.$show_sample input
# .pipe T.$show_and_quit()
#.........................................................................................................
whisper 'reading GTFS agencies...'
return null
#-----------------------------------------------------------------------------------------------------------
@read_stop_times = ( route, registry, handler ) ->
parser = new_csv_parser()
input = create_readstream route, 'stop_times'
#.........................................................................................................
input.on 'end', ->
info 'ok: stoptimes'
return handler null
#.........................................................................................................
input.pipe parser
.pipe T.$skip global_data_limit
.pipe T.$as_pods()
.pipe @$clean_stoptime_record()
# .pipe @$fix_ids()
.pipe T.$delete_prefix 'trip_'
.pipe T.$add_n4j_system_properties 'node', 'stoptime'
.pipe T.$dasherize_field_names()
.pipe T.$rename 'id', '%gtfs-trip-id'
.pipe T.$rename 'stop-id', '%gtfs-stop-id'
.pipe @$add_stoptime_idx()
# .pipe T.$register registry
.pipe T.$show_sample input
# .pipe T.$show_and_quit()
#.........................................................................................................
whisper 'reading GTFS stoptimes...'
return null
#-----------------------------------------------------------------------------------------------------------
### TAINT name clash (filesystem route vs. GTFS route) ###
@read_routes = ( route, registry, handler ) ->
parser = new_csv_parser()
input = create_readstream route, 'routes'
#.........................................................................................................
input.on 'end', ->
info 'ok: routes'
return handler null
#.........................................................................................................
input.pipe parser
.pipe T.$skip global_data_limit
.pipe T.$as_pods()
.pipe @$clean_route_record()
# .pipe @$fix_ids()
.pipe T.$dasherize_field_names()
.pipe T.$rename 'route-id', '%gtfs-id'
.pipe T.$rename 'agency-id', '%gtfs-agency-id'
.pipe T.$rename 'route-short-name', 'name'
.pipe T.$add_n4j_system_properties 'node', 'route'
# .pipe T.$register registry
.pipe T.$show_sample input
# .pipe T.$show_and_quit()
#.........................................................................................................
whisper 'reading GTFS routes...'
return null
#-----------------------------------------------------------------------------------------------------------
@read_stops = ( route, registry, handler ) ->
parser = new_csv_parser()
input = create_readstream route, 'stops'
#.........................................................................................................
input.on 'end', ->
info 'ok: stops'
return handler null
#.........................................................................................................
input.pipe parser
# .pipe T.$skip global_data_limit
.pipe T.$as_pods()
.pipe @$clean_station_record()
.pipe T.$delete_prefix 'stop_'
.pipe T.$set '%gtfs-type', 'stops'
# .pipe T.$copy 'name', '%gtfs-name'
# .pipe @$normalize_station_name()
# .pipe @$add_station_system_name()
.pipe T.$rename 'id', '%gtfs-id'
# .pipe T.$add_n4j_system_properties 'node', 'station'
.pipe @$convert_latlon()
.pipe @$register registry
.pipe T.$show_sample input
# .pipe T.$show_and_quit()
#.........................................................................................................
whisper 'reading GTFS stops...'
return null
#-----------------------------------------------------------------------------------------------------------
@read_trips = ( route, registry, handler ) ->
parser = new_csv_parser()
input = create_readstream route, 'trips'
#.........................................................................................................
input.on 'end', ->
info 'ok: trips'
return handler null
#.........................................................................................................
input.pipe parser
.pipe T.$skip global_data_limit
.pipe T.$as_pods()
.pipe @$clean_trip_record()
# .pipe @$fix_ids()
.pipe T.$delete_prefix 'trip_'
.pipe T.$dasherize_field_names()
.pipe T.$rename 'id', '%gtfs-id'
.pipe T.$rename 'route-id', '%gtfs-route-id'
.pipe T.$rename 'service-id', '%gtfs-service-id'
.pipe T.$copy 'headsign', '%gtfs-headsign'
.pipe @$normalize_headsign()
.pipe @$add_headsign_system_name()
.pipe T.$add_n4j_system_properties 'node', 'trip'
# .pipe T.$register registry
.pipe T.$show_sample input
# .pipe T.$show_and_quit()
#.........................................................................................................
whisper 'reading GTFS trips...'
return null
#===========================================================================================================
# READ METHOD
#-----------------------------------------------------------------------------------------------------------
@main = ( registry, handler ) ->
#.........................................................................................................
for source_name, route_by_types of datasource_infos
tasks = []
no_source = []
no_method = []
ok_types = []
#.......................................................................................................
for gtfs_type in options[ 'data' ][ 'gtfs-types' ]
route = route_by_types[ gtfs_type ]
unless route?
no_source.push "skipping #{source_name}/#{gtfs_type} (no source file)"
continue
help "found data source for #{source_name}/#{gtfs_type}"
#.....................................................................................................
method = null
switch gtfs_type
when 'agency' then method = @read_agencies
# when 'calendar_dates' then method = @read_calendar_dates
# when 'calendar' then method = @read_calendar
# when 'routes' then method = @read_routes
# # when 'stop_times' then method = @read_stop_times
when 'stops' then method = @read_stops
# when 'transfers' then method = @read_transfers
# when 'trips' then method = @read_trips
unless method?
no_method.push "no method to read GTFS data of type #{rpr gtfs_type}; skipping"
continue
method = method.bind @
ok_types.push gtfs_type
#.....................................................................................................
do ( method, route ) =>
tasks.push ( async_handler ) => method route, registry, async_handler
#.......................................................................................................
for messages in [ no_source, no_method, ]
for message in messages
warn message
#.......................................................................................................
info "reading data for #{ok_types.length} type(s)"
info " (#{ok_types.join ', '})"
#.........................................................................................................
# limit = options[ 'stream-transform' ]?[ 'parallel' ] ? 1
ASYNC.series tasks, ( error ) =>
throw error if error?
handler null, registry
#.........................................................................................................
return null
############################################################################################################
# HELPERS
#===========================================================================================================
############################################################################################################
unless module.parent?
@main ( error, registry ) ->
throw error if error?
info registry
|
[
{
"context": "###################################\n#\n# Created by Markus on 26/10/2017.\n#\n################################",
"end": 75,
"score": 0.9994184374809265,
"start": 69,
"tag": "NAME",
"value": "Markus"
},
{
"context": "##############################\n_designer_email = \"d... | server/init_database.coffee | MooqitaSFH/worklearn | 0 | #####################################################
#
# Created by Markus on 26/10/2017.
#
#####################################################
#####################################################
_designer_email = "designer@mooqita.org"
_learner_email = "@uni.edu"
#####################################################
_organization_title_base = ["Organization "]
#####################################################
_challenge_title = "The test challenge"
_challenge_title_base = ["Challenge "]
#####################################################
@run_database_test_bed = () ->
console.log "####################################################"
console.log "## running test bed ##"
console.log "####################################################"
designer = _test_get_designer _designer_email
learners = _test_get_learners _learner_email
challenge = _test_challenge _challenge_title, designer
solutions = _test_solutions challenge, learners
_test_reviews challenge, learners
_test_feedbacks solutions
# Add more challenges and jobs for an nlp test
organizations = _test_organizations designer
_test_jobs(designer, organizations)
console.log "####################################################"
console.log "## test run finished ##"
console.log "####################################################"
# TODO: test reviews when there is a solution like for tutors
#####################################################
_test_get_designer = (email) ->
user = Accounts.findUserByEmail(email)
if user
return user
_test_user_creation email, "organization"
user = Accounts.findUserByEmail email
return user
#####################################################
_test_get_learners = (email_template) ->
learners = []
for i in [1, 2, 3, 4, 5, 6, 7, 8, 9]
mail = String(i) + email_template
profile_id = _test_user_creation mail, "learner"
user_id = get_document_owner Profiles, profile_id
user = Meteor.users.findOne(user_id)
learners.push user
return learners
#####################################################
_test_user_creation = (mail) ->
user = Accounts.findUserByEmail(mail)
if user
profile = get_document user, "owner", Profiles
return profile._id
user =
email: mail
password: "none"
user_id = Accounts.createUser user
user = Meteor.users.findOne user_id
console.log "Test user creation: " + get_user_mail user
profile = get_profile user
profile_id = profile._id
big_five = randomize_big_five(big_five_15)
modify_field_unprotected Profiles, profile_id, "avatar", faker.image.avatar()
modify_field_unprotected Profiles, profile_id, "given_name", faker.name.firstName()
modify_field_unprotected Profiles, profile_id, "family_name", faker.name.lastName()
modify_field_unprotected Profiles, profile_id, "middle_name", faker.name.firstName()
modify_field_unprotected Profiles, profile_id, "city", faker.address.city()
modify_field_unprotected Profiles, profile_id, "country", faker.address.country()
modify_field_unprotected Profiles, profile_id, "state", faker.address.state()
modify_field_unprotected Profiles, profile_id, "job_interested", true
modify_field_unprotected Profiles, profile_id, "job_type", Random.choice ["free", "full"]
modify_field_unprotected Profiles, profile_id, "job_locale", Random.choice ["remote", "local"]
modify_field_unprotected Profiles, profile_id, "hours_per_week", Math.round(Random.fraction() * 40)
modify_field_unprotected Profiles, profile_id, "resume", faker.lorem.paragraphs 2
modify_field_unprotected Profiles, profile_id, "big_five", big_five
modify_field_unprotected Profiles, profile_id, "test_object", true
return profile_id
#####################################################
_test_organizations = (designer) ->
organizations = []
for i in [1..10]
name = _organization_title_base + i
organization = _test_organization(name, designer)
organizations.push(organization)
return organizations
#####################################################
_test_organization = (name, designer) ->
organization = Organizations.findOne({name:name})
if organization
return organization
organization_id = gen_organization(null, designer)
content = get_profile_name(get_profile(designer)) + ": "
content += faker.lorem.paragraphs(3)
modify_field_unprotected Organizations, organization_id, "name", name
modify_field_unprotected Organizations, organization_id, "description", content
modify_field_unprotected Organizations, organization_id, "avatar", faker.image.avatar()
modify_field_unprotected Organizations, organization_id, "test_object", true
organization = get_document designer, OWNER, Organizations, {_id:organization_id}
return organization
#####################################################
_test_jobs = (designer, organizations) ->
jobs = []
for org in organizations
job = _test_job(designer, org)
jobs.push(job)
return jobs
#####################################################
_test_job = (designer, organization) ->
title = get_profile_name(get_profile(designer)) + " for: " + organization.name
job = Jobs.findOne({title:title})
if job
return job
job_id = gen_job(null, designer)
content = title
content += faker.lorem.paragraphs(3)
role = Random.choice ["design", "ops", "sales", "other", "marketing", "dev"]
challenges = _test_challenges(designer, 3)
challenge_ids = (c._id for c in challenges)
modify_field_unprotected Jobs, job_id, "title", title
modify_field_unprotected Jobs, job_id, "description", content
modify_field_unprotected Jobs, job_id, "organization_id", organization._id
modify_field_unprotected Jobs, job_id, "role", role
modify_field_unprotected Jobs, job_id, "team", Random.choice [1,0]
modify_field_unprotected Jobs, job_id, "idea", Random.choice [1,0]
modify_field_unprotected Jobs, job_id, "social", Random.choice [1,0]
modify_field_unprotected Jobs, job_id, "process", Random.choice [1,0]
modify_field_unprotected Jobs, job_id, "strategic", Random.choice [1,0]
modify_field_unprotected Jobs, job_id, "contributor", Random.choice [1,0]
modify_field_unprotected Jobs, job_id, "challenge_ids", challenge_ids
modify_field_unprotected Jobs, job_id, "test_object", true
job = get_document designer, OWNER, Jobs, {_id:job_id}
return job
#####################################################
_test_challenges = (designer, count) ->
challenges = []
for i in [1..count]
title = faker.lorem.sentence()
challenges.push(_test_challenge(title, designer))
return challenges
#####################################################
_test_challenge = (title, designer) ->
filter = {title: title}
challenge = get_document designer, OWNER, Challenges, filter
if challenge
return challenge
challenge_id = gen_challenge designer
content = get_profile_name(designer) + ": "
content += faker.lorem.paragraphs(3)
modify_field_unprotected Challenges, challenge_id, "title", title
modify_field_unprotected Challenges, challenge_id, "content", content
modify_field_unprotected Challenges, challenge_id, "test_object", true
#TODO: add test for material
challenge = Challenges.findOne challenge_id
challenge_id = finish_challenge challenge, designer
challenge = get_document designer, OWNER, Challenges, {_id:challenge_id}
return challenge
#####################################################
_test_solutions = (challenge, learners) ->
solutions = []
for s in learners
solution = _test_solution challenge, s
solutions.push solution
return solutions
#####################################################
_test_solution = (challenge, learner) ->
solution = get_document learner._id, OWNER, "solutions", {challenge_id: challenge._id}
if solution
return solution
solution_id = gen_solution challenge, learner
content = get_profile_name(get_profile(learner)) + ": "
content += faker.lorem.paragraphs(3)
modify_field_unprotected Solutions, solution_id, "content", content
modify_field_unprotected Solutions, solution_id, "test_object", true
solution = get_document learner, OWNER, Solutions, {_id: solution_id}
solution_id = finish_solution solution, learner
#TODO: add reopen fail test
#TODO: add reopen success test
return solution_id
#####################################################
_test_reviews = (challenge, learners) ->
reviews = []
for i in [1..challenge.num_reviews]
for learner in learners
filter =
challenge_id: challenge._id
review_crs = get_documents learner, OWNER, Reviews, filter
if review_crs.count() >= challenge.num_reviews
console.log "review count satisfied."
continue
review_id = _test_review challenge, learner
reviews.push review_id
return reviews
#####################################################
_test_review = (challenge, learner) ->
res = assign_review challenge, learner
recipient = get_document_owner(Solutions, res.solution_id)
content = get_profile_name(get_profile(learner)) + " for "
content += get_profile_name(get_profile(recipient)) + ": "
content += faker.lorem.paragraphs(3)
modify_field_unprotected Reviews, res.review_id, "content", content
modify_field_unprotected Reviews, res.review_id, "rating", Random.choice [1, 2, 3, 4, 5]
modify_field_unprotected Reviews, res.review_id, "test_object", true
review = get_document learner, OWNER, Reviews, {_id: res.review_id}
review_id = finish_review review, learner
#TODO: add reopen fail test
#TODO: add reopen success test
return review_id
#####################################################
_test_feedbacks = (solutions) ->
feedbacks = []
for solution_id in solutions
solution = Solutions.findOne(solution_id)
learner = get_document_owner Solutions, solution_id
filter = {solution_id: solution_id}
reviews = get_documents learner, RECIPIENT, Reviews, filter
for review in reviews.fetch()
feedback = _test_feedback solution, review, learner
feedbacks.push feedback
return feedbacks
#####################################################
_test_feedback = (solution, review, learner) ->
feedback_id = gen_feedback solution, review, learner
feedback = Feedback.findOne(feedback_id)
feedback = get_document learner, OWNER, Feedback, {_id:feedback_id}
if feedback.published == true
return
recipient = get_document_owner(Reviews, feedback.review_id)
content = get_profile_name(get_profile(learner)) + " for "
content += get_profile_name(get_profile(recipient)) + ": "
content += faker.lorem.paragraphs(3)
modify_field_unprotected Feedback, feedback_id, "content", content
modify_field_unprotected Feedback, feedback_id, "rating", Random.choice [1, 2, 3, 4, 5]
modify_field_unprotected Feedback, feedback_id, "test_object", true
feedback = get_document learner, OWNER, Feedback, {_id:feedback_id}
finish_feedback feedback, learner
reopen_feedback feedback, learner
finish_feedback feedback, learner
feedback = get_document learner, OWNER, Feedback, {_id:feedback_id}
return feedback_id
| 181138 | #####################################################
#
# Created by <NAME> on 26/10/2017.
#
#####################################################
#####################################################
_designer_email = "<EMAIL>"
_learner_email = <EMAIL>"
#####################################################
_organization_title_base = ["Organization "]
#####################################################
_challenge_title = "The test challenge"
_challenge_title_base = ["Challenge "]
#####################################################
@run_database_test_bed = () ->
console.log "####################################################"
console.log "## running test bed ##"
console.log "####################################################"
designer = _test_get_designer _designer_email
learners = _test_get_learners _learner_email
challenge = _test_challenge _challenge_title, designer
solutions = _test_solutions challenge, learners
_test_reviews challenge, learners
_test_feedbacks solutions
# Add more challenges and jobs for an nlp test
organizations = _test_organizations designer
_test_jobs(designer, organizations)
console.log "####################################################"
console.log "## test run finished ##"
console.log "####################################################"
# TODO: test reviews when there is a solution like for tutors
#####################################################
_test_get_designer = (email) ->
user = Accounts.findUserByEmail(email)
if user
return user
_test_user_creation email, "organization"
user = Accounts.findUserByEmail email
return user
#####################################################
_test_get_learners = (email_template) ->
learners = []
for i in [1, 2, 3, 4, 5, 6, 7, 8, 9]
mail = String(i) + email_template
profile_id = _test_user_creation mail, "learner"
user_id = get_document_owner Profiles, profile_id
user = Meteor.users.findOne(user_id)
learners.push user
return learners
#####################################################
_test_user_creation = (mail) ->
user = Accounts.findUserByEmail(mail)
if user
profile = get_document user, "owner", Profiles
return profile._id
user =
email: mail
password: "<PASSWORD>"
user_id = Accounts.createUser user
user = Meteor.users.findOne user_id
console.log "Test user creation: " + get_user_mail user
profile = get_profile user
profile_id = profile._id
big_five = randomize_big_five(big_five_15)
modify_field_unprotected Profiles, profile_id, "avatar", faker.image.avatar()
modify_field_unprotected Profiles, profile_id, "given_name", faker.name.firstName()
modify_field_unprotected Profiles, profile_id, "family_name", faker.name.lastName()
modify_field_unprotected Profiles, profile_id, "middle_name", faker.name.firstName()
modify_field_unprotected Profiles, profile_id, "city", faker.address.city()
modify_field_unprotected Profiles, profile_id, "country", faker.address.country()
modify_field_unprotected Profiles, profile_id, "state", faker.address.state()
modify_field_unprotected Profiles, profile_id, "job_interested", true
modify_field_unprotected Profiles, profile_id, "job_type", Random.choice ["free", "full"]
modify_field_unprotected Profiles, profile_id, "job_locale", Random.choice ["remote", "local"]
modify_field_unprotected Profiles, profile_id, "hours_per_week", Math.round(Random.fraction() * 40)
modify_field_unprotected Profiles, profile_id, "resume", faker.lorem.paragraphs 2
modify_field_unprotected Profiles, profile_id, "big_five", big_five
modify_field_unprotected Profiles, profile_id, "test_object", true
return profile_id
#####################################################
_test_organizations = (designer) ->
organizations = []
for i in [1..10]
name = _organization_title_base + i
organization = _test_organization(name, designer)
organizations.push(organization)
return organizations
#####################################################
_test_organization = (name, designer) ->
organization = Organizations.findOne({name:name})
if organization
return organization
organization_id = gen_organization(null, designer)
content = get_profile_name(get_profile(designer)) + ": "
content += faker.lorem.paragraphs(3)
modify_field_unprotected Organizations, organization_id, "name", name
modify_field_unprotected Organizations, organization_id, "description", content
modify_field_unprotected Organizations, organization_id, "avatar", faker.image.avatar()
modify_field_unprotected Organizations, organization_id, "test_object", true
organization = get_document designer, OWNER, Organizations, {_id:organization_id}
return organization
#####################################################
_test_jobs = (designer, organizations) ->
jobs = []
for org in organizations
job = _test_job(designer, org)
jobs.push(job)
return jobs
#####################################################
_test_job = (designer, organization) ->
title = get_profile_name(get_profile(designer)) + " for: " + organization.name
job = Jobs.findOne({title:title})
if job
return job
job_id = gen_job(null, designer)
content = title
content += faker.lorem.paragraphs(3)
role = Random.choice ["design", "ops", "sales", "other", "marketing", "dev"]
challenges = _test_challenges(designer, 3)
challenge_ids = (c._id for c in challenges)
modify_field_unprotected Jobs, job_id, "title", title
modify_field_unprotected Jobs, job_id, "description", content
modify_field_unprotected Jobs, job_id, "organization_id", organization._id
modify_field_unprotected Jobs, job_id, "role", role
modify_field_unprotected Jobs, job_id, "team", Random.choice [1,0]
modify_field_unprotected Jobs, job_id, "idea", Random.choice [1,0]
modify_field_unprotected Jobs, job_id, "social", Random.choice [1,0]
modify_field_unprotected Jobs, job_id, "process", Random.choice [1,0]
modify_field_unprotected Jobs, job_id, "strategic", Random.choice [1,0]
modify_field_unprotected Jobs, job_id, "contributor", Random.choice [1,0]
modify_field_unprotected Jobs, job_id, "challenge_ids", challenge_ids
modify_field_unprotected Jobs, job_id, "test_object", true
job = get_document designer, OWNER, Jobs, {_id:job_id}
return job
#####################################################
_test_challenges = (designer, count) ->
challenges = []
for i in [1..count]
title = faker.lorem.sentence()
challenges.push(_test_challenge(title, designer))
return challenges
#####################################################
_test_challenge = (title, designer) ->
filter = {title: title}
challenge = get_document designer, OWNER, Challenges, filter
if challenge
return challenge
challenge_id = gen_challenge designer
content = get_profile_name(designer) + ": "
content += faker.lorem.paragraphs(3)
modify_field_unprotected Challenges, challenge_id, "title", title
modify_field_unprotected Challenges, challenge_id, "content", content
modify_field_unprotected Challenges, challenge_id, "test_object", true
#TODO: add test for material
challenge = Challenges.findOne challenge_id
challenge_id = finish_challenge challenge, designer
challenge = get_document designer, OWNER, Challenges, {_id:challenge_id}
return challenge
#####################################################
_test_solutions = (challenge, learners) ->
solutions = []
for s in learners
solution = _test_solution challenge, s
solutions.push solution
return solutions
#####################################################
_test_solution = (challenge, learner) ->
solution = get_document learner._id, OWNER, "solutions", {challenge_id: challenge._id}
if solution
return solution
solution_id = gen_solution challenge, learner
content = get_profile_name(get_profile(learner)) + ": "
content += faker.lorem.paragraphs(3)
modify_field_unprotected Solutions, solution_id, "content", content
modify_field_unprotected Solutions, solution_id, "test_object", true
solution = get_document learner, OWNER, Solutions, {_id: solution_id}
solution_id = finish_solution solution, learner
#TODO: add reopen fail test
#TODO: add reopen success test
return solution_id
#####################################################
_test_reviews = (challenge, learners) ->
reviews = []
for i in [1..challenge.num_reviews]
for learner in learners
filter =
challenge_id: challenge._id
review_crs = get_documents learner, OWNER, Reviews, filter
if review_crs.count() >= challenge.num_reviews
console.log "review count satisfied."
continue
review_id = _test_review challenge, learner
reviews.push review_id
return reviews
#####################################################
_test_review = (challenge, learner) ->
res = assign_review challenge, learner
recipient = get_document_owner(Solutions, res.solution_id)
content = get_profile_name(get_profile(learner)) + " for "
content += get_profile_name(get_profile(recipient)) + ": "
content += faker.lorem.paragraphs(3)
modify_field_unprotected Reviews, res.review_id, "content", content
modify_field_unprotected Reviews, res.review_id, "rating", Random.choice [1, 2, 3, 4, 5]
modify_field_unprotected Reviews, res.review_id, "test_object", true
review = get_document learner, OWNER, Reviews, {_id: res.review_id}
review_id = finish_review review, learner
#TODO: add reopen fail test
#TODO: add reopen success test
return review_id
#####################################################
_test_feedbacks = (solutions) ->
feedbacks = []
for solution_id in solutions
solution = Solutions.findOne(solution_id)
learner = get_document_owner Solutions, solution_id
filter = {solution_id: solution_id}
reviews = get_documents learner, RECIPIENT, Reviews, filter
for review in reviews.fetch()
feedback = _test_feedback solution, review, learner
feedbacks.push feedback
return feedbacks
#####################################################
_test_feedback = (solution, review, learner) ->
feedback_id = gen_feedback solution, review, learner
feedback = Feedback.findOne(feedback_id)
feedback = get_document learner, OWNER, Feedback, {_id:feedback_id}
if feedback.published == true
return
recipient = get_document_owner(Reviews, feedback.review_id)
content = get_profile_name(get_profile(learner)) + " for "
content += get_profile_name(get_profile(recipient)) + ": "
content += faker.lorem.paragraphs(3)
modify_field_unprotected Feedback, feedback_id, "content", content
modify_field_unprotected Feedback, feedback_id, "rating", Random.choice [1, 2, 3, 4, 5]
modify_field_unprotected Feedback, feedback_id, "test_object", true
feedback = get_document learner, OWNER, Feedback, {_id:feedback_id}
finish_feedback feedback, learner
reopen_feedback feedback, learner
finish_feedback feedback, learner
feedback = get_document learner, OWNER, Feedback, {_id:feedback_id}
return feedback_id
| true | #####################################################
#
# Created by PI:NAME:<NAME>END_PI on 26/10/2017.
#
#####################################################
#####################################################
_designer_email = "PI:EMAIL:<EMAIL>END_PI"
_learner_email = PI:EMAIL:<EMAIL>END_PI"
#####################################################
_organization_title_base = ["Organization "]
#####################################################
_challenge_title = "The test challenge"
_challenge_title_base = ["Challenge "]
#####################################################
@run_database_test_bed = () ->
console.log "####################################################"
console.log "## running test bed ##"
console.log "####################################################"
designer = _test_get_designer _designer_email
learners = _test_get_learners _learner_email
challenge = _test_challenge _challenge_title, designer
solutions = _test_solutions challenge, learners
_test_reviews challenge, learners
_test_feedbacks solutions
# Add more challenges and jobs for an nlp test
organizations = _test_organizations designer
_test_jobs(designer, organizations)
console.log "####################################################"
console.log "## test run finished ##"
console.log "####################################################"
# TODO: test reviews when there is a solution like for tutors
#####################################################
_test_get_designer = (email) ->
user = Accounts.findUserByEmail(email)
if user
return user
_test_user_creation email, "organization"
user = Accounts.findUserByEmail email
return user
#####################################################
_test_get_learners = (email_template) ->
learners = []
for i in [1, 2, 3, 4, 5, 6, 7, 8, 9]
mail = String(i) + email_template
profile_id = _test_user_creation mail, "learner"
user_id = get_document_owner Profiles, profile_id
user = Meteor.users.findOne(user_id)
learners.push user
return learners
#####################################################
_test_user_creation = (mail) ->
user = Accounts.findUserByEmail(mail)
if user
profile = get_document user, "owner", Profiles
return profile._id
user =
email: mail
password: "PI:PASSWORD:<PASSWORD>END_PI"
user_id = Accounts.createUser user
user = Meteor.users.findOne user_id
console.log "Test user creation: " + get_user_mail user
profile = get_profile user
profile_id = profile._id
big_five = randomize_big_five(big_five_15)
modify_field_unprotected Profiles, profile_id, "avatar", faker.image.avatar()
modify_field_unprotected Profiles, profile_id, "given_name", faker.name.firstName()
modify_field_unprotected Profiles, profile_id, "family_name", faker.name.lastName()
modify_field_unprotected Profiles, profile_id, "middle_name", faker.name.firstName()
modify_field_unprotected Profiles, profile_id, "city", faker.address.city()
modify_field_unprotected Profiles, profile_id, "country", faker.address.country()
modify_field_unprotected Profiles, profile_id, "state", faker.address.state()
modify_field_unprotected Profiles, profile_id, "job_interested", true
modify_field_unprotected Profiles, profile_id, "job_type", Random.choice ["free", "full"]
modify_field_unprotected Profiles, profile_id, "job_locale", Random.choice ["remote", "local"]
modify_field_unprotected Profiles, profile_id, "hours_per_week", Math.round(Random.fraction() * 40)
modify_field_unprotected Profiles, profile_id, "resume", faker.lorem.paragraphs 2
modify_field_unprotected Profiles, profile_id, "big_five", big_five
modify_field_unprotected Profiles, profile_id, "test_object", true
return profile_id
#####################################################
_test_organizations = (designer) ->
organizations = []
for i in [1..10]
name = _organization_title_base + i
organization = _test_organization(name, designer)
organizations.push(organization)
return organizations
#####################################################
_test_organization = (name, designer) ->
organization = Organizations.findOne({name:name})
if organization
return organization
organization_id = gen_organization(null, designer)
content = get_profile_name(get_profile(designer)) + ": "
content += faker.lorem.paragraphs(3)
modify_field_unprotected Organizations, organization_id, "name", name
modify_field_unprotected Organizations, organization_id, "description", content
modify_field_unprotected Organizations, organization_id, "avatar", faker.image.avatar()
modify_field_unprotected Organizations, organization_id, "test_object", true
organization = get_document designer, OWNER, Organizations, {_id:organization_id}
return organization
#####################################################
_test_jobs = (designer, organizations) ->
jobs = []
for org in organizations
job = _test_job(designer, org)
jobs.push(job)
return jobs
#####################################################
_test_job = (designer, organization) ->
title = get_profile_name(get_profile(designer)) + " for: " + organization.name
job = Jobs.findOne({title:title})
if job
return job
job_id = gen_job(null, designer)
content = title
content += faker.lorem.paragraphs(3)
role = Random.choice ["design", "ops", "sales", "other", "marketing", "dev"]
challenges = _test_challenges(designer, 3)
challenge_ids = (c._id for c in challenges)
modify_field_unprotected Jobs, job_id, "title", title
modify_field_unprotected Jobs, job_id, "description", content
modify_field_unprotected Jobs, job_id, "organization_id", organization._id
modify_field_unprotected Jobs, job_id, "role", role
modify_field_unprotected Jobs, job_id, "team", Random.choice [1,0]
modify_field_unprotected Jobs, job_id, "idea", Random.choice [1,0]
modify_field_unprotected Jobs, job_id, "social", Random.choice [1,0]
modify_field_unprotected Jobs, job_id, "process", Random.choice [1,0]
modify_field_unprotected Jobs, job_id, "strategic", Random.choice [1,0]
modify_field_unprotected Jobs, job_id, "contributor", Random.choice [1,0]
modify_field_unprotected Jobs, job_id, "challenge_ids", challenge_ids
modify_field_unprotected Jobs, job_id, "test_object", true
job = get_document designer, OWNER, Jobs, {_id:job_id}
return job
#####################################################
_test_challenges = (designer, count) ->
challenges = []
for i in [1..count]
title = faker.lorem.sentence()
challenges.push(_test_challenge(title, designer))
return challenges
#####################################################
_test_challenge = (title, designer) ->
filter = {title: title}
challenge = get_document designer, OWNER, Challenges, filter
if challenge
return challenge
challenge_id = gen_challenge designer
content = get_profile_name(designer) + ": "
content += faker.lorem.paragraphs(3)
modify_field_unprotected Challenges, challenge_id, "title", title
modify_field_unprotected Challenges, challenge_id, "content", content
modify_field_unprotected Challenges, challenge_id, "test_object", true
#TODO: add test for material
challenge = Challenges.findOne challenge_id
challenge_id = finish_challenge challenge, designer
challenge = get_document designer, OWNER, Challenges, {_id:challenge_id}
return challenge
#####################################################
_test_solutions = (challenge, learners) ->
solutions = []
for s in learners
solution = _test_solution challenge, s
solutions.push solution
return solutions
#####################################################
_test_solution = (challenge, learner) ->
solution = get_document learner._id, OWNER, "solutions", {challenge_id: challenge._id}
if solution
return solution
solution_id = gen_solution challenge, learner
content = get_profile_name(get_profile(learner)) + ": "
content += faker.lorem.paragraphs(3)
modify_field_unprotected Solutions, solution_id, "content", content
modify_field_unprotected Solutions, solution_id, "test_object", true
solution = get_document learner, OWNER, Solutions, {_id: solution_id}
solution_id = finish_solution solution, learner
#TODO: add reopen fail test
#TODO: add reopen success test
return solution_id
#####################################################
_test_reviews = (challenge, learners) ->
reviews = []
for i in [1..challenge.num_reviews]
for learner in learners
filter =
challenge_id: challenge._id
review_crs = get_documents learner, OWNER, Reviews, filter
if review_crs.count() >= challenge.num_reviews
console.log "review count satisfied."
continue
review_id = _test_review challenge, learner
reviews.push review_id
return reviews
#####################################################
_test_review = (challenge, learner) ->
res = assign_review challenge, learner
recipient = get_document_owner(Solutions, res.solution_id)
content = get_profile_name(get_profile(learner)) + " for "
content += get_profile_name(get_profile(recipient)) + ": "
content += faker.lorem.paragraphs(3)
modify_field_unprotected Reviews, res.review_id, "content", content
modify_field_unprotected Reviews, res.review_id, "rating", Random.choice [1, 2, 3, 4, 5]
modify_field_unprotected Reviews, res.review_id, "test_object", true
review = get_document learner, OWNER, Reviews, {_id: res.review_id}
review_id = finish_review review, learner
#TODO: add reopen fail test
#TODO: add reopen success test
return review_id
#####################################################
_test_feedbacks = (solutions) ->
feedbacks = []
for solution_id in solutions
solution = Solutions.findOne(solution_id)
learner = get_document_owner Solutions, solution_id
filter = {solution_id: solution_id}
reviews = get_documents learner, RECIPIENT, Reviews, filter
for review in reviews.fetch()
feedback = _test_feedback solution, review, learner
feedbacks.push feedback
return feedbacks
#####################################################
_test_feedback = (solution, review, learner) ->
feedback_id = gen_feedback solution, review, learner
feedback = Feedback.findOne(feedback_id)
feedback = get_document learner, OWNER, Feedback, {_id:feedback_id}
if feedback.published == true
return
recipient = get_document_owner(Reviews, feedback.review_id)
content = get_profile_name(get_profile(learner)) + " for "
content += get_profile_name(get_profile(recipient)) + ": "
content += faker.lorem.paragraphs(3)
modify_field_unprotected Feedback, feedback_id, "content", content
modify_field_unprotected Feedback, feedback_id, "rating", Random.choice [1, 2, 3, 4, 5]
modify_field_unprotected Feedback, feedback_id, "test_object", true
feedback = get_document learner, OWNER, Feedback, {_id:feedback_id}
finish_feedback feedback, learner
reopen_feedback feedback, learner
finish_feedback feedback, learner
feedback = get_document learner, OWNER, Feedback, {_id:feedback_id}
return feedback_id
|
[
{
"context": "odal-url='#{modalUrl}'>\")[0]\n\n formText = \"My Lovely Form\"\n server = sinon.fakeServer.create()\n s",
"end": 299,
"score": 0.7165780067443848,
"start": 288,
"tag": "PASSWORD",
"value": "Lovely Form"
}
] | tests/features/remote_modal_view_spec.coffee | th3james/BellHopper | 0 | describe 'Remote Modal View - Feature', ->
describe "#render", ->
it "requests the HTML for the modal from 'data-modal-url' and renders it
to the modal body", (done) ->
modalUrl = "/links/new"
srcEl = $("<div data-modal-url='#{modalUrl}'>")[0]
formText = "My Lovely Form"
server = sinon.fakeServer.create()
server.respondWith(modalUrl,
[200, { "Content-Type": "text/html" }, "<div>#{formText}</div>"]
)
view = new RemoteModalView(srcEl)
view.render().done(->
modalText = $.trim(view.$el.text())
expect(modalText).toEqual(formText)
).fail(->
expected(false).toBeTruthy()
).always(->
done()
server.restore()
view.close()
)
server.respond()
it "renders an error message to the modal if the modal view request
fails", (done) ->
modalUrl = "/links/new"
srcEl = $("<div data-modal-url='#{modalUrl}'>")
server = sinon.fakeServer.create()
server.respondWith(modalUrl,
[500, { "Content-Type": "text/html" }, "ERROROROR"]
)
view = new RemoteModalView(srcEl)
view.render().done(->
expect(false).toBeTruthy()
).fail((err)->
expect(err.message).toEqual(
"Unable to load remote view from '#{modalUrl}'"
)
modalText = $.trim(view.$el.text())
expect(modalText).toEqual(
"Unable to load content, please reload the page"
)
).always(->
view.close()
server.restore()
done()
)
server.respond()
describe "on a view with a remote form", ->
modalUrl = "/links/new"
srcEl = $("<div data-modal-url='#{modalUrl}'>")[0]
formPostUrl = "/links"
form = """
<form action="#{formPostUrl}">
<input type="submit">
<button data-action="cancel">Cancel</button>
</form>
"""
formResponse = [200, { "Content-Type": "text/html" }, form]
describe "when clicking submit", ->
describe "if the request succeeds", ->
successJSON = { status: "Success" }
successResponse = [201, { "Content-Type": "text/json" }, JSON.stringify(
successJSON
)]
it "closes the view", (done) ->
server = sinon.fakeServer.create()
# Fake routes
server.respondWith(modalUrl, formResponse)
server.respondWith(formPostUrl, successResponse)
view = new RemoteModalView(srcEl)
closeSpy = sinon.spy(view, 'close')
view.render().done(->
view.$el.find('input[type="submit"]').click()
expect(server.requests.length).toEqual(2)
expect(server.requests[1].url).toEqual(formPostUrl)
server.respond()
expect(closeSpy.callCount).toEqual(1)
).fail(->
expect(false).toBeTruthy()
).always(->
done()
server.restore()
)
server.respond()
describe "if the request returns an error form", ->
newForm = "newForm"
errorJson = {
status: "UnprocessableEntity"
template: newForm
}
errorResponse = [422, { "Content-Type": "text/json" }, JSON.stringify(
errorJson
)]
it "renders the error form", (done) ->
server = sinon.fakeServer.create()
# Fake routes
server.respondWith(modalUrl, formResponse)
server.respondWith(formPostUrl, errorResponse)
view = new RemoteModalView(srcEl)
view.render().done(->
view.$el.find('input[type="submit"]').click()
expect(server.requests.length).toEqual(2)
expect(server.requests[1].url).toEqual(formPostUrl)
server.respond()
expect(view.$el.html()).toMatch(///.*#{newForm}.*///)
).fail(->
expect(false).toBeTruthy()
).always(->
done()
server.restore()
view.close()
)
server.respond()
it "closes the modal when cancel is clicked", ->
view = new RemoteModalView(srcEl)
view.replaceModalContent(form)
modalCount = $('.modal').length
view.$el.find('[data-action="cancel"]').click()
expect($('.modal').length).toEqual(modalCount - 1)
describe "for a view which mutates models, when the form is submitted", ->
modalUrl = "/links/new"
mutatedModels = "investigations"
srcEl = $("""
<div data-modal-url="#{modalUrl}" data-mutates-models="#{mutatedModels}">
""")[0]
formPostUrl = "/links"
form = """
<form action="#{formPostUrl}"><input type="submit"/></form>
"""
it "calls RemoteHelper.triggerChange() with the mutated model", ->
view = new RemoteModalView(srcEl)
view.replaceModalContent(form)
viewCloseSpy = sinon.spy(view, 'close')
server = sinon.fakeServer.create()
# Fake routes
server.respondWith(formPostUrl,
[201, { "Content-Type": "text/json" }, JSON.stringify(
{status: "Success"}
)])
triggerChangeStub = sinon.stub(
RemoteHelpers, 'triggerChange', ->)
view.$el.find('input[type="submit"]').click()
server.respond()
try
expect(
triggerChangeStub.calledWith(mutatedModels)
).toBeTruthy()
expect(
viewCloseSpy.callCount
).toEqual(1)
finally
triggerChangeStub.restore()
server.restore()
| 223512 | describe 'Remote Modal View - Feature', ->
describe "#render", ->
it "requests the HTML for the modal from 'data-modal-url' and renders it
to the modal body", (done) ->
modalUrl = "/links/new"
srcEl = $("<div data-modal-url='#{modalUrl}'>")[0]
formText = "My <PASSWORD>"
server = sinon.fakeServer.create()
server.respondWith(modalUrl,
[200, { "Content-Type": "text/html" }, "<div>#{formText}</div>"]
)
view = new RemoteModalView(srcEl)
view.render().done(->
modalText = $.trim(view.$el.text())
expect(modalText).toEqual(formText)
).fail(->
expected(false).toBeTruthy()
).always(->
done()
server.restore()
view.close()
)
server.respond()
it "renders an error message to the modal if the modal view request
fails", (done) ->
modalUrl = "/links/new"
srcEl = $("<div data-modal-url='#{modalUrl}'>")
server = sinon.fakeServer.create()
server.respondWith(modalUrl,
[500, { "Content-Type": "text/html" }, "ERROROROR"]
)
view = new RemoteModalView(srcEl)
view.render().done(->
expect(false).toBeTruthy()
).fail((err)->
expect(err.message).toEqual(
"Unable to load remote view from '#{modalUrl}'"
)
modalText = $.trim(view.$el.text())
expect(modalText).toEqual(
"Unable to load content, please reload the page"
)
).always(->
view.close()
server.restore()
done()
)
server.respond()
describe "on a view with a remote form", ->
modalUrl = "/links/new"
srcEl = $("<div data-modal-url='#{modalUrl}'>")[0]
formPostUrl = "/links"
form = """
<form action="#{formPostUrl}">
<input type="submit">
<button data-action="cancel">Cancel</button>
</form>
"""
formResponse = [200, { "Content-Type": "text/html" }, form]
describe "when clicking submit", ->
describe "if the request succeeds", ->
successJSON = { status: "Success" }
successResponse = [201, { "Content-Type": "text/json" }, JSON.stringify(
successJSON
)]
it "closes the view", (done) ->
server = sinon.fakeServer.create()
# Fake routes
server.respondWith(modalUrl, formResponse)
server.respondWith(formPostUrl, successResponse)
view = new RemoteModalView(srcEl)
closeSpy = sinon.spy(view, 'close')
view.render().done(->
view.$el.find('input[type="submit"]').click()
expect(server.requests.length).toEqual(2)
expect(server.requests[1].url).toEqual(formPostUrl)
server.respond()
expect(closeSpy.callCount).toEqual(1)
).fail(->
expect(false).toBeTruthy()
).always(->
done()
server.restore()
)
server.respond()
describe "if the request returns an error form", ->
newForm = "newForm"
errorJson = {
status: "UnprocessableEntity"
template: newForm
}
errorResponse = [422, { "Content-Type": "text/json" }, JSON.stringify(
errorJson
)]
it "renders the error form", (done) ->
server = sinon.fakeServer.create()
# Fake routes
server.respondWith(modalUrl, formResponse)
server.respondWith(formPostUrl, errorResponse)
view = new RemoteModalView(srcEl)
view.render().done(->
view.$el.find('input[type="submit"]').click()
expect(server.requests.length).toEqual(2)
expect(server.requests[1].url).toEqual(formPostUrl)
server.respond()
expect(view.$el.html()).toMatch(///.*#{newForm}.*///)
).fail(->
expect(false).toBeTruthy()
).always(->
done()
server.restore()
view.close()
)
server.respond()
it "closes the modal when cancel is clicked", ->
view = new RemoteModalView(srcEl)
view.replaceModalContent(form)
modalCount = $('.modal').length
view.$el.find('[data-action="cancel"]').click()
expect($('.modal').length).toEqual(modalCount - 1)
describe "for a view which mutates models, when the form is submitted", ->
modalUrl = "/links/new"
mutatedModels = "investigations"
srcEl = $("""
<div data-modal-url="#{modalUrl}" data-mutates-models="#{mutatedModels}">
""")[0]
formPostUrl = "/links"
form = """
<form action="#{formPostUrl}"><input type="submit"/></form>
"""
it "calls RemoteHelper.triggerChange() with the mutated model", ->
view = new RemoteModalView(srcEl)
view.replaceModalContent(form)
viewCloseSpy = sinon.spy(view, 'close')
server = sinon.fakeServer.create()
# Fake routes
server.respondWith(formPostUrl,
[201, { "Content-Type": "text/json" }, JSON.stringify(
{status: "Success"}
)])
triggerChangeStub = sinon.stub(
RemoteHelpers, 'triggerChange', ->)
view.$el.find('input[type="submit"]').click()
server.respond()
try
expect(
triggerChangeStub.calledWith(mutatedModels)
).toBeTruthy()
expect(
viewCloseSpy.callCount
).toEqual(1)
finally
triggerChangeStub.restore()
server.restore()
| true | describe 'Remote Modal View - Feature', ->
describe "#render", ->
it "requests the HTML for the modal from 'data-modal-url' and renders it
to the modal body", (done) ->
modalUrl = "/links/new"
srcEl = $("<div data-modal-url='#{modalUrl}'>")[0]
formText = "My PI:PASSWORD:<PASSWORD>END_PI"
server = sinon.fakeServer.create()
server.respondWith(modalUrl,
[200, { "Content-Type": "text/html" }, "<div>#{formText}</div>"]
)
view = new RemoteModalView(srcEl)
view.render().done(->
modalText = $.trim(view.$el.text())
expect(modalText).toEqual(formText)
).fail(->
expected(false).toBeTruthy()
).always(->
done()
server.restore()
view.close()
)
server.respond()
it "renders an error message to the modal if the modal view request
fails", (done) ->
modalUrl = "/links/new"
srcEl = $("<div data-modal-url='#{modalUrl}'>")
server = sinon.fakeServer.create()
server.respondWith(modalUrl,
[500, { "Content-Type": "text/html" }, "ERROROROR"]
)
view = new RemoteModalView(srcEl)
view.render().done(->
expect(false).toBeTruthy()
).fail((err)->
expect(err.message).toEqual(
"Unable to load remote view from '#{modalUrl}'"
)
modalText = $.trim(view.$el.text())
expect(modalText).toEqual(
"Unable to load content, please reload the page"
)
).always(->
view.close()
server.restore()
done()
)
server.respond()
describe "on a view with a remote form", ->
modalUrl = "/links/new"
srcEl = $("<div data-modal-url='#{modalUrl}'>")[0]
formPostUrl = "/links"
form = """
<form action="#{formPostUrl}">
<input type="submit">
<button data-action="cancel">Cancel</button>
</form>
"""
formResponse = [200, { "Content-Type": "text/html" }, form]
describe "when clicking submit", ->
describe "if the request succeeds", ->
successJSON = { status: "Success" }
successResponse = [201, { "Content-Type": "text/json" }, JSON.stringify(
successJSON
)]
it "closes the view", (done) ->
server = sinon.fakeServer.create()
# Fake routes
server.respondWith(modalUrl, formResponse)
server.respondWith(formPostUrl, successResponse)
view = new RemoteModalView(srcEl)
closeSpy = sinon.spy(view, 'close')
view.render().done(->
view.$el.find('input[type="submit"]').click()
expect(server.requests.length).toEqual(2)
expect(server.requests[1].url).toEqual(formPostUrl)
server.respond()
expect(closeSpy.callCount).toEqual(1)
).fail(->
expect(false).toBeTruthy()
).always(->
done()
server.restore()
)
server.respond()
describe "if the request returns an error form", ->
newForm = "newForm"
errorJson = {
status: "UnprocessableEntity"
template: newForm
}
errorResponse = [422, { "Content-Type": "text/json" }, JSON.stringify(
errorJson
)]
it "renders the error form", (done) ->
server = sinon.fakeServer.create()
# Fake routes
server.respondWith(modalUrl, formResponse)
server.respondWith(formPostUrl, errorResponse)
view = new RemoteModalView(srcEl)
view.render().done(->
view.$el.find('input[type="submit"]').click()
expect(server.requests.length).toEqual(2)
expect(server.requests[1].url).toEqual(formPostUrl)
server.respond()
expect(view.$el.html()).toMatch(///.*#{newForm}.*///)
).fail(->
expect(false).toBeTruthy()
).always(->
done()
server.restore()
view.close()
)
server.respond()
it "closes the modal when cancel is clicked", ->
view = new RemoteModalView(srcEl)
view.replaceModalContent(form)
modalCount = $('.modal').length
view.$el.find('[data-action="cancel"]').click()
expect($('.modal').length).toEqual(modalCount - 1)
describe "for a view which mutates models, when the form is submitted", ->
modalUrl = "/links/new"
mutatedModels = "investigations"
srcEl = $("""
<div data-modal-url="#{modalUrl}" data-mutates-models="#{mutatedModels}">
""")[0]
formPostUrl = "/links"
form = """
<form action="#{formPostUrl}"><input type="submit"/></form>
"""
it "calls RemoteHelper.triggerChange() with the mutated model", ->
view = new RemoteModalView(srcEl)
view.replaceModalContent(form)
viewCloseSpy = sinon.spy(view, 'close')
server = sinon.fakeServer.create()
# Fake routes
server.respondWith(formPostUrl,
[201, { "Content-Type": "text/json" }, JSON.stringify(
{status: "Success"}
)])
triggerChangeStub = sinon.stub(
RemoteHelpers, 'triggerChange', ->)
view.$el.find('input[type="submit"]').click()
server.respond()
try
expect(
triggerChangeStub.calledWith(mutatedModels)
).toBeTruthy()
expect(
viewCloseSpy.callCount
).toEqual(1)
finally
triggerChangeStub.restore()
server.restore()
|
[
{
"context": "\nnikita = require '@nikitajs/core/lib'\n{tags, config, db} = require './test'\nt",
"end": 28,
"score": 0.8864140510559082,
"start": 26,
"tag": "USERNAME",
"value": "js"
},
{
"context": "user_3'\n @db.user\n username: 'db_create_user_3'\n ... | packages/db/test/database.coffee | shivaylamba/meilisearch-gatsby-plugin-guide | 31 |
nikita = require '@nikitajs/core/lib'
{tags, config, db} = require './test'
they = require('mocha-they')(config)
{command} = require '../src/query'
return unless tags.db
for engine, _ of db then do (engine) ->
describe "db.database #{engine}", ->
they 'database as an argument', ({ssh}) ->
{exists} = await nikita
$ssh: ssh
db: db[engine]
.db.database.remove 'db_create_0'
.db.database 'db_create_0'
.db.database.exists 'db_create_0'
exists.should.be.true()
they 'output `$status`', ({ssh}) ->
nikita
$ssh: ssh
db: db[engine]
, ->
@db.database.remove 'db_create_1'
{$status} = await @db.database 'db_create_1'
$status.should.be.true()
{$status} = await @db.database 'db_create_1'
$status.should.be.false()
@db.database.remove 'db_create_1'
describe 'user', ->
they 'which is existing', ({ssh}) ->
nikita
$ssh: ssh
db: db[engine]
, ->
@db.database.remove 'db_create_3'
@db.user.remove 'db_create_user_3'
@db.user
username: 'db_create_user_3'
password: 'db_create_user_3'
@db.database
database: 'db_create_3'
user: 'db_create_user_3'
# Todo: why not using nikita.user.exists ?
{$status: user_exists} = await @execute
command: switch engine
when 'mariadb', 'mysql' then command(db[engine], database: 'mysql', "SELECT user FROM db WHERE db='db_create_3';") + " | grep 'db_create_user_3'"
when 'postgresql' then command(db[engine], database: 'db_create_3', '\\l') + " | egrep '^db_create_user_3='"
user_exists.should.be.true()
@db.database.remove 'db_create_3'
@db.user.remove 'db_create_user_3'
they 'output `$status`', ({ssh}) ->
nikita
$ssh: ssh
db: db[engine]
, ->
@db.database.remove 'db_create_3'
@db.user.remove 'db_create_user_3'
@db.user
username: 'db_create_user_3'
password: 'db_create_user_3'
@db.database
database: 'db_create_3'
{$status} = await @db.database
database: 'db_create_3'
user: 'db_create_user_3'
$status.should.be.true()
{$status} = await @db.database
database: 'db_create_3'
user: 'db_create_user_3'
$status.should.be.false()
@db.database.remove 'db_create_3'
@db.user.remove 'db_create_user_3'
they 'which is not existing', ({ssh}) ->
nikita
$ssh: ssh
db: db[engine]
, ->
try
@db.database.remove 'db_create_4'
@db.user.remove 'db_create_user_4'
await @db.database
database: 'db_create_4'
user: 'db_create_user_4'
throw Error 'Oh no'
catch err
err.message.should.eql 'DB user does not exists: db_create_user_4'
finally
@db.database.remove 'db_create_4'
@db.user.remove 'db_create_user_4'
| 170275 |
nikita = require '@nikitajs/core/lib'
{tags, config, db} = require './test'
they = require('mocha-they')(config)
{command} = require '../src/query'
return unless tags.db
for engine, _ of db then do (engine) ->
describe "db.database #{engine}", ->
they 'database as an argument', ({ssh}) ->
{exists} = await nikita
$ssh: ssh
db: db[engine]
.db.database.remove 'db_create_0'
.db.database 'db_create_0'
.db.database.exists 'db_create_0'
exists.should.be.true()
they 'output `$status`', ({ssh}) ->
nikita
$ssh: ssh
db: db[engine]
, ->
@db.database.remove 'db_create_1'
{$status} = await @db.database 'db_create_1'
$status.should.be.true()
{$status} = await @db.database 'db_create_1'
$status.should.be.false()
@db.database.remove 'db_create_1'
describe 'user', ->
they 'which is existing', ({ssh}) ->
nikita
$ssh: ssh
db: db[engine]
, ->
@db.database.remove 'db_create_3'
@db.user.remove 'db_create_user_3'
@db.user
username: 'db_create_user_3'
password: '<PASSWORD>'
@db.database
database: 'db_create_3'
user: 'db_create_user_3'
# Todo: why not using nikita.user.exists ?
{$status: user_exists} = await @execute
command: switch engine
when 'mariadb', 'mysql' then command(db[engine], database: 'mysql', "SELECT user FROM db WHERE db='db_create_3';") + " | grep 'db_create_user_3'"
when 'postgresql' then command(db[engine], database: 'db_create_3', '\\l') + " | egrep '^db_create_user_3='"
user_exists.should.be.true()
@db.database.remove 'db_create_3'
@db.user.remove 'db_create_user_3'
they 'output `$status`', ({ssh}) ->
nikita
$ssh: ssh
db: db[engine]
, ->
@db.database.remove 'db_create_3'
@db.user.remove 'db_create_user_3'
@db.user
username: 'db_create_user_3'
password: '<PASSWORD>'
@db.database
database: 'db_create_3'
{$status} = await @db.database
database: 'db_create_3'
user: 'db_create_user_3'
$status.should.be.true()
{$status} = await @db.database
database: 'db_create_3'
user: 'db_create_user_3'
$status.should.be.false()
@db.database.remove 'db_create_3'
@db.user.remove 'db_create_user_3'
they 'which is not existing', ({ssh}) ->
nikita
$ssh: ssh
db: db[engine]
, ->
try
@db.database.remove 'db_create_4'
@db.user.remove 'db_create_user_4'
await @db.database
database: 'db_create_4'
user: 'db_create_user_4'
throw Error 'Oh no'
catch err
err.message.should.eql 'DB user does not exists: db_create_user_4'
finally
@db.database.remove 'db_create_4'
@db.user.remove 'db_create_user_4'
| true |
nikita = require '@nikitajs/core/lib'
{tags, config, db} = require './test'
they = require('mocha-they')(config)
{command} = require '../src/query'
return unless tags.db
for engine, _ of db then do (engine) ->
describe "db.database #{engine}", ->
they 'database as an argument', ({ssh}) ->
{exists} = await nikita
$ssh: ssh
db: db[engine]
.db.database.remove 'db_create_0'
.db.database 'db_create_0'
.db.database.exists 'db_create_0'
exists.should.be.true()
they 'output `$status`', ({ssh}) ->
nikita
$ssh: ssh
db: db[engine]
, ->
@db.database.remove 'db_create_1'
{$status} = await @db.database 'db_create_1'
$status.should.be.true()
{$status} = await @db.database 'db_create_1'
$status.should.be.false()
@db.database.remove 'db_create_1'
describe 'user', ->
they 'which is existing', ({ssh}) ->
nikita
$ssh: ssh
db: db[engine]
, ->
@db.database.remove 'db_create_3'
@db.user.remove 'db_create_user_3'
@db.user
username: 'db_create_user_3'
password: 'PI:PASSWORD:<PASSWORD>END_PI'
@db.database
database: 'db_create_3'
user: 'db_create_user_3'
# Todo: why not using nikita.user.exists ?
{$status: user_exists} = await @execute
command: switch engine
when 'mariadb', 'mysql' then command(db[engine], database: 'mysql', "SELECT user FROM db WHERE db='db_create_3';") + " | grep 'db_create_user_3'"
when 'postgresql' then command(db[engine], database: 'db_create_3', '\\l') + " | egrep '^db_create_user_3='"
user_exists.should.be.true()
@db.database.remove 'db_create_3'
@db.user.remove 'db_create_user_3'
they 'output `$status`', ({ssh}) ->
nikita
$ssh: ssh
db: db[engine]
, ->
@db.database.remove 'db_create_3'
@db.user.remove 'db_create_user_3'
@db.user
username: 'db_create_user_3'
password: 'PI:PASSWORD:<PASSWORD>END_PI'
@db.database
database: 'db_create_3'
{$status} = await @db.database
database: 'db_create_3'
user: 'db_create_user_3'
$status.should.be.true()
{$status} = await @db.database
database: 'db_create_3'
user: 'db_create_user_3'
$status.should.be.false()
@db.database.remove 'db_create_3'
@db.user.remove 'db_create_user_3'
they 'which is not existing', ({ssh}) ->
nikita
$ssh: ssh
db: db[engine]
, ->
try
@db.database.remove 'db_create_4'
@db.user.remove 'db_create_user_4'
await @db.database
database: 'db_create_4'
user: 'db_create_user_4'
throw Error 'Oh no'
catch err
err.message.should.eql 'DB user does not exists: db_create_user_4'
finally
@db.database.remove 'db_create_4'
@db.user.remove 'db_create_user_4'
|
[
{
"context": "manatee - outputs a random manatee\n#\n# Author:\n# Danny Lockard\n\nSelect = require( \"soupselect\" ).select\nHTMLPars",
"end": 261,
"score": 0.9998611807823181,
"start": 248,
"tag": "NAME",
"value": "Danny Lockard"
}
] | src/scripts/manatee.coffee | Reelhouse/hubot-scripts | 2 | # Description:
# Allows Hubot to pull down images from calmingmanatee.com
#
# Dependencies:
# "htmlparser": "1.7.6"
# "soupselect: "0.2.0"
#
# Configuration:
# None
#
# Commands:
# hubot manatee - outputs a random manatee
#
# Author:
# Danny Lockard
Select = require( "soupselect" ).select
HTMLParser = require "htmlparser"
module.exports = (robot) ->
robot.respond /manatee/i, (msg) ->
options = {
"User-Agent": "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.6 (KHTML, like Gecko) Chrome/20.0.1092.0 Safari/536.6"
}
msg
.http( 'http://calmingmanatee.com' )
.header('User-Agent', 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.6 (KHTML, like Gecko) Chrome/20.0.1092.0 Safari/536.6')
.get(options) (err, res, body) ->
if err
msg.send "Something went wrong #{err}"
return
msg.send "http://calmingmanatee.com/" + get_manatee(body, "body div#holder img")
get_manatee = (body, selector)->
html_handler = new HTMLParser.DefaultHandler((()->), ignoreWhitespace: true)
html_parser = new HTMLParser.Parser html_handler
html_parser.parseComplete body
Select(html_handler.dom, selector)[0].attribs.src
| 166352 | # Description:
# Allows Hubot to pull down images from calmingmanatee.com
#
# Dependencies:
# "htmlparser": "1.7.6"
# "soupselect: "0.2.0"
#
# Configuration:
# None
#
# Commands:
# hubot manatee - outputs a random manatee
#
# Author:
# <NAME>
Select = require( "soupselect" ).select
HTMLParser = require "htmlparser"
module.exports = (robot) ->
robot.respond /manatee/i, (msg) ->
options = {
"User-Agent": "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.6 (KHTML, like Gecko) Chrome/20.0.1092.0 Safari/536.6"
}
msg
.http( 'http://calmingmanatee.com' )
.header('User-Agent', 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.6 (KHTML, like Gecko) Chrome/20.0.1092.0 Safari/536.6')
.get(options) (err, res, body) ->
if err
msg.send "Something went wrong #{err}"
return
msg.send "http://calmingmanatee.com/" + get_manatee(body, "body div#holder img")
get_manatee = (body, selector)->
html_handler = new HTMLParser.DefaultHandler((()->), ignoreWhitespace: true)
html_parser = new HTMLParser.Parser html_handler
html_parser.parseComplete body
Select(html_handler.dom, selector)[0].attribs.src
| true | # Description:
# Allows Hubot to pull down images from calmingmanatee.com
#
# Dependencies:
# "htmlparser": "1.7.6"
# "soupselect: "0.2.0"
#
# Configuration:
# None
#
# Commands:
# hubot manatee - outputs a random manatee
#
# Author:
# PI:NAME:<NAME>END_PI
Select = require( "soupselect" ).select
HTMLParser = require "htmlparser"
module.exports = (robot) ->
robot.respond /manatee/i, (msg) ->
options = {
"User-Agent": "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.6 (KHTML, like Gecko) Chrome/20.0.1092.0 Safari/536.6"
}
msg
.http( 'http://calmingmanatee.com' )
.header('User-Agent', 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.6 (KHTML, like Gecko) Chrome/20.0.1092.0 Safari/536.6')
.get(options) (err, res, body) ->
if err
msg.send "Something went wrong #{err}"
return
msg.send "http://calmingmanatee.com/" + get_manatee(body, "body div#holder img")
get_manatee = (body, selector)->
html_handler = new HTMLParser.DefaultHandler((()->), ignoreWhitespace: true)
html_parser = new HTMLParser.Parser html_handler
html_parser.parseComplete body
Select(html_handler.dom, selector)[0].attribs.src
|
[
{
"context": " = aString.replace(/super$/gm, \"window[@[arguments.callee.name + '_class_injected_in']].__super__[arg",
"end": 931,
"score": 0.7111687064170837,
"start": 931,
"tag": "EMAIL",
"value": ""
},
{
"context": "ing.replace(/super$/gm, \"window[@[arguments.callee.name + '_clas... | src/meta/Mixin.coffee | intensifier/Fizzygum | 110 | class Mixin
@allMixines: []
nonStaticPropertiesSources: nil
staticPropertiesSources: nil
name: ""
_equivalentforSuper: (aString) ->
#console.log "removing super from: " + aString
# coffeescript won't compile "super" unless it's an instance
# method (i.e. if it comes inside a class), so we need to
# translate that manually into valid CS that doesn't use super.
# rephrasing "super" here...
# we can't compile "super" in a mixin because we can't tell which
# class this will be mixed in in advance, i.e. at compile time it doesn't
# belong to a class, so at compile time it doesn't know which class
# it will be injected in.
# So that's why _at time of injection_ we need
# to store the class it's injected in in a special
# variable... and then at runtime here we use that variable to
# implement super
aString = aString.replace(/super$/gm, "window[@[arguments.callee.name + '_class_injected_in']].__super__[arguments.callee.name].apply(this, arguments)")
aString = aString.replace(/super /g, "window[@[arguments.callee.name + '_class_injected_in']].__super__[arguments.callee.name].call this, ")
# TODO un-translated cases as of yet
# /super\(\)/g -> ...???...
# /super\(/g -> ...???...
# Coffeescript adds some helper functions at the top of the compiled code:
#
# slice = [].slice
# indexOf = [].indexOf
# hasProp = {}.hasOwnProperty
#
# here we remove them them all, because they mangle the code,
# also we just have them all in the global scope by now so
# they are not needed multiple times
_removeHelperFunctions: (aString) ->
aString = aString.replace /indexOf = [].indexOf/, "$$$$$$"
aString = aString.replace /hasProp = {}.hasProp/, "$$$$$$"
aString = aString.replace /slice = [].slice/, "$$$$$$"
if (aString.includes "[].indexOf") or
(aString.includes "{}.hasProp") or
(aString.includes "[].slice")
console.log "code contains a helper var, it shouldn't: " + aString
debugger
return aString
constructor: (source, generatePreCompiledJS, createMixin) ->
@nonStaticPropertiesSources = {}
@staticPropertiesSources = {}
# find the Mixin name
mixinRegex = /^([a-zA-Z_$][0-9a-zA-Z_$]*)Mixin *=/m
if (m = mixinRegex.exec(source))?
m.forEach((match, groupIndex) ->
if srcLoadCompileDebugWrites then console.log("Found match, group #{groupIndex}: #{match}")
)
@name = m[1]
if srcLoadCompileDebugWrites then console.log "mixin name: " + @name
if srcLoadCompileDebugWrites then console.log "source ---------\n" + source
sourceToBeParsed = source + "\n $$$STOPTOKEN_LASTFIELD :"
# Now find all the fields definitions
# note that the constructor, methods, properties and static properties
# are ALL fields definitions, so we are basically going to cycle through
# everything
# to match a valid JS variable name (we just ignore the keywords):
# [a-zA-Z_$][0-9a-zA-Z_$]*
regex = /^ ([a-zA-Z_$][0-9a-zA-Z_$]*) *: *([^]*?)(?=^ ([a-zA-Z_$][0-9a-zA-Z_$]*) *:)/gm
while (m = regex.exec(sourceToBeParsed))?
if (m.index == regex.lastIndex)
regex.lastIndex++
m.forEach (match, groupIndex) ->
if srcLoadCompileDebugWrites then console.log "Found match, group #{groupIndex}: #{match}"
if m[1].valueOf() == "$$$STOPTOKEN_LASTFIELD "
break
else
if srcLoadCompileDebugWrites then console.log "not the stop field: " + m[1].valueOf()
@nonStaticPropertiesSources[m[1]] = m[2]
if generatePreCompiledJS or createMixin
JS_string_definitions = compileFGCode (@_equivalentforSuper source), true
JSSourcesContainer.content += JS_string_definitions + "\n"
if createMixin
try
eval.call window, JS_string_definitions
catch err
console.log " error " + err + " evaling : " + JS_string_definitions
debugger
#if @name == "LCLCodePreprocessor" then debugger
| 26040 | class Mixin
@allMixines: []
nonStaticPropertiesSources: nil
staticPropertiesSources: nil
name: ""
_equivalentforSuper: (aString) ->
#console.log "removing super from: " + aString
# coffeescript won't compile "super" unless it's an instance
# method (i.e. if it comes inside a class), so we need to
# translate that manually into valid CS that doesn't use super.
# rephrasing "super" here...
# we can't compile "super" in a mixin because we can't tell which
# class this will be mixed in in advance, i.e. at compile time it doesn't
# belong to a class, so at compile time it doesn't know which class
# it will be injected in.
# So that's why _at time of injection_ we need
# to store the class it's injected in in a special
# variable... and then at runtime here we use that variable to
# implement super
aString = aString.replace(/super$/gm, "window[@[arguments<EMAIL>.callee<EMAIL>.name + '_class_injected_in']].__super__[arguments.callee.name].apply(this, arguments)")
aString = aString.replace(/super /g, "window[@[arguments.<EMAIL> + '_class_injected_in']].__super__[arguments.callee.name].call this, ")
# TODO un-translated cases as of yet
# /super\(\)/g -> ...???...
# /super\(/g -> ...???...
# Coffeescript adds some helper functions at the top of the compiled code:
#
# slice = [].slice
# indexOf = [].indexOf
# hasProp = {}.hasOwnProperty
#
# here we remove them them all, because they mangle the code,
# also we just have them all in the global scope by now so
# they are not needed multiple times
_removeHelperFunctions: (aString) ->
aString = aString.replace /indexOf = [].indexOf/, "$$$$$$"
aString = aString.replace /hasProp = {}.hasProp/, "$$$$$$"
aString = aString.replace /slice = [].slice/, "$$$$$$"
if (aString.includes "[].indexOf") or
(aString.includes "{}.hasProp") or
(aString.includes "[].slice")
console.log "code contains a helper var, it shouldn't: " + aString
debugger
return aString
constructor: (source, generatePreCompiledJS, createMixin) ->
@nonStaticPropertiesSources = {}
@staticPropertiesSources = {}
# find the Mixin name
mixinRegex = /^([a-zA-Z_$][0-9a-zA-Z_$]*)Mixin *=/m
if (m = mixinRegex.exec(source))?
m.forEach((match, groupIndex) ->
if srcLoadCompileDebugWrites then console.log("Found match, group #{groupIndex}: #{match}")
)
@name = m[1]
if srcLoadCompileDebugWrites then console.log "mixin name: " + @name
if srcLoadCompileDebugWrites then console.log "source ---------\n" + source
sourceToBeParsed = source + "\n $$$STOPTOKEN_LASTFIELD :"
# Now find all the fields definitions
# note that the constructor, methods, properties and static properties
# are ALL fields definitions, so we are basically going to cycle through
# everything
# to match a valid JS variable name (we just ignore the keywords):
# [a-zA-Z_$][0-9a-zA-Z_$]*
regex = /^ ([a-zA-Z_$][0-9a-zA-Z_$]*) *: *([^]*?)(?=^ ([a-zA-Z_$][0-9a-zA-Z_$]*) *:)/gm
while (m = regex.exec(sourceToBeParsed))?
if (m.index == regex.lastIndex)
regex.lastIndex++
m.forEach (match, groupIndex) ->
if srcLoadCompileDebugWrites then console.log "Found match, group #{groupIndex}: #{match}"
if m[1].valueOf() == "$$$STOPTOKEN_LASTFIELD "
break
else
if srcLoadCompileDebugWrites then console.log "not the stop field: " + m[1].valueOf()
@nonStaticPropertiesSources[m[1]] = m[2]
if generatePreCompiledJS or createMixin
JS_string_definitions = compileFGCode (@_equivalentforSuper source), true
JSSourcesContainer.content += JS_string_definitions + "\n"
if createMixin
try
eval.call window, JS_string_definitions
catch err
console.log " error " + err + " evaling : " + JS_string_definitions
debugger
#if @name == "LCLCodePreprocessor" then debugger
| true | class Mixin
@allMixines: []
nonStaticPropertiesSources: nil
staticPropertiesSources: nil
name: ""
_equivalentforSuper: (aString) ->
#console.log "removing super from: " + aString
# coffeescript won't compile "super" unless it's an instance
# method (i.e. if it comes inside a class), so we need to
# translate that manually into valid CS that doesn't use super.
# rephrasing "super" here...
# we can't compile "super" in a mixin because we can't tell which
# class this will be mixed in in advance, i.e. at compile time it doesn't
# belong to a class, so at compile time it doesn't know which class
# it will be injected in.
# So that's why _at time of injection_ we need
# to store the class it's injected in in a special
# variable... and then at runtime here we use that variable to
# implement super
aString = aString.replace(/super$/gm, "window[@[argumentsPI:EMAIL:<EMAIL>END_PI.calleePI:EMAIL:<EMAIL>END_PI.name + '_class_injected_in']].__super__[arguments.callee.name].apply(this, arguments)")
aString = aString.replace(/super /g, "window[@[arguments.PI:EMAIL:<EMAIL>END_PI + '_class_injected_in']].__super__[arguments.callee.name].call this, ")
# TODO un-translated cases as of yet
# /super\(\)/g -> ...???...
# /super\(/g -> ...???...
# Coffeescript adds some helper functions at the top of the compiled code:
#
# slice = [].slice
# indexOf = [].indexOf
# hasProp = {}.hasOwnProperty
#
# here we remove them them all, because they mangle the code,
# also we just have them all in the global scope by now so
# they are not needed multiple times
_removeHelperFunctions: (aString) ->
aString = aString.replace /indexOf = [].indexOf/, "$$$$$$"
aString = aString.replace /hasProp = {}.hasProp/, "$$$$$$"
aString = aString.replace /slice = [].slice/, "$$$$$$"
if (aString.includes "[].indexOf") or
(aString.includes "{}.hasProp") or
(aString.includes "[].slice")
console.log "code contains a helper var, it shouldn't: " + aString
debugger
return aString
constructor: (source, generatePreCompiledJS, createMixin) ->
@nonStaticPropertiesSources = {}
@staticPropertiesSources = {}
# find the Mixin name
mixinRegex = /^([a-zA-Z_$][0-9a-zA-Z_$]*)Mixin *=/m
if (m = mixinRegex.exec(source))?
m.forEach((match, groupIndex) ->
if srcLoadCompileDebugWrites then console.log("Found match, group #{groupIndex}: #{match}")
)
@name = m[1]
if srcLoadCompileDebugWrites then console.log "mixin name: " + @name
if srcLoadCompileDebugWrites then console.log "source ---------\n" + source
sourceToBeParsed = source + "\n $$$STOPTOKEN_LASTFIELD :"
# Now find all the fields definitions
# note that the constructor, methods, properties and static properties
# are ALL fields definitions, so we are basically going to cycle through
# everything
# to match a valid JS variable name (we just ignore the keywords):
# [a-zA-Z_$][0-9a-zA-Z_$]*
regex = /^ ([a-zA-Z_$][0-9a-zA-Z_$]*) *: *([^]*?)(?=^ ([a-zA-Z_$][0-9a-zA-Z_$]*) *:)/gm
while (m = regex.exec(sourceToBeParsed))?
if (m.index == regex.lastIndex)
regex.lastIndex++
m.forEach (match, groupIndex) ->
if srcLoadCompileDebugWrites then console.log "Found match, group #{groupIndex}: #{match}"
if m[1].valueOf() == "$$$STOPTOKEN_LASTFIELD "
break
else
if srcLoadCompileDebugWrites then console.log "not the stop field: " + m[1].valueOf()
@nonStaticPropertiesSources[m[1]] = m[2]
if generatePreCompiledJS or createMixin
JS_string_definitions = compileFGCode (@_equivalentforSuper source), true
JSSourcesContainer.content += JS_string_definitions + "\n"
if createMixin
try
eval.call window, JS_string_definitions
catch err
console.log " error " + err + " evaling : " + JS_string_definitions
debugger
#if @name == "LCLCodePreprocessor" then debugger
|
[
{
"context": "nnerHTML = translate 'span', 'ok' if task.key is 'animalsPresent'\n \n$(decisionTree.el).on decisionTree.CHANGE, ({",
"end": 398,
"score": 0.8981921672821045,
"start": 384,
"tag": "KEY",
"value": "animalsPresent"
},
{
"context": "lEvent: detail: {key, value}}) ->\n \n... | confirm-button-label.coffee | zooniverse/penguinwatch | 0 | # Update the confirm button label for the first task, according to the current choice.
currentProject = require 'zooniverse-readymade/current-project'
translate = require 't7e'
{decisionTree} = currentProject.classifyPages[0]
$(decisionTree.el).on decisionTree.LOAD_TASK, ({originalEvent: detail: {task}})->
task.confirmButton.innerHTML = translate 'span', 'ok' if task.key is 'animalsPresent'
$(decisionTree.el).on decisionTree.CHANGE, ({originalEvent: detail: {key, value}}) ->
if key is 'animalsPresent'
switch value
when 'yes'
label = translate 'span', 'ok'
when 'no', 'cant_tell'
label = translate 'span', 'finish'
decisionTree.tasks[key].confirmButton.innerHTML = label if label? | 131323 | # Update the confirm button label for the first task, according to the current choice.
currentProject = require 'zooniverse-readymade/current-project'
translate = require 't7e'
{decisionTree} = currentProject.classifyPages[0]
$(decisionTree.el).on decisionTree.LOAD_TASK, ({originalEvent: detail: {task}})->
task.confirmButton.innerHTML = translate 'span', 'ok' if task.key is '<KEY>'
$(decisionTree.el).on decisionTree.CHANGE, ({originalEvent: detail: {key, value}}) ->
if key is '<KEY>'
switch value
when 'yes'
label = translate 'span', 'ok'
when 'no', 'cant_tell'
label = translate 'span', 'finish'
decisionTree.tasks[key].confirmButton.innerHTML = label if label? | true | # Update the confirm button label for the first task, according to the current choice.
currentProject = require 'zooniverse-readymade/current-project'
translate = require 't7e'
{decisionTree} = currentProject.classifyPages[0]
$(decisionTree.el).on decisionTree.LOAD_TASK, ({originalEvent: detail: {task}})->
task.confirmButton.innerHTML = translate 'span', 'ok' if task.key is 'PI:KEY:<KEY>END_PI'
$(decisionTree.el).on decisionTree.CHANGE, ({originalEvent: detail: {key, value}}) ->
if key is 'PI:KEY:<KEY>END_PI'
switch value
when 'yes'
label = translate 'span', 'ok'
when 'no', 'cant_tell'
label = translate 'span', 'finish'
decisionTree.tasks[key].confirmButton.innerHTML = label if label? |
[
{
"context": "quest.setRequestHeader(\"X-Authentication-Token\", 'xzBA8HXinAO2zprPr')\n error: (jqXHR, textStatus, errorThrown) -",
"end": 724,
"score": 0.7546015977859497,
"start": 707,
"tag": "PASSWORD",
"value": "xzBA8HXinAO2zprPr"
}
] | app/assets/javascripts/questionnaire_details.js.coffee | unepwcmc/ORS-API | 2 | window.QuestionnaireDetails = class QuestionnaireDetails
constructor: (@$container_el, @$tab_container,
@$details_container, @$questions_container) ->
@questionnaire_helper = new QuestionnaireHelper()
@questionnaire_id = localStorage.getItem('questionnaire_id')
@get_questionnaire_details()
new Questions(@$questions_container, @questionnaire_id)
@init_events()
get_questionnaire_details: ->
$.ajax(
url: "http://demo-ors-api.ort-staging.linode.unep-wcmc.org/api/v1/questionnaires/#{@questionnaire_id}"
type: 'GET'
dataType: 'json'
contentType: 'text/plain'
beforeSend: (request) ->
request.setRequestHeader("X-Authentication-Token", 'xzBA8HXinAO2zprPr')
error: (jqXHR, textStatus, errorThrown) ->
@$container_el.append "AJAX Error: #{textStatus}"
success: (data, textStatus, jqXHR) =>
@append_questionnaire_details(data.questionnaire)
)
append_questionnaire_details: (questionnaire) ->
[respondents, submissions] = @questionnaire_helper.submission_percentage(questionnaire)
@sort_respondents(questionnaire.respondents)
$.extend(questionnaire,{submissions: submissions, no_respondents: respondents})
@$container_el.find('h1').append(questionnaire.title)
@$details_container.append(HandlebarsTemplates['questionnaire/details'](questionnaire))
sort_respondents: (respondents) ->
respondents.sort( (a,b) ->
if a.respondent.full_name < b.respondent.full_name
return -1
else if a.respondent.full_name > b.respondent.full_name
return 1
return 0
)
init_events: ->
@tabs_selection()
tabs_selection: ->
@$container_el.on('click', (event) ->
event.preventDefault()
)
opts = @$container_el.find('li')
self = @
opts.on('click', ->
opt = $(this)
if opt.hasClass("active")
return
else
self.$container_el.find(".active").removeClass("active")
opt.addClass("active")
data_container = $('.' + $(this).data('container'))
if data_container.hasClass("active")
return
else
self.$tab_container.find('.active-container').removeClass('active-container').hide()
data_container.addClass("active-container").show()
)
| 191409 | window.QuestionnaireDetails = class QuestionnaireDetails
constructor: (@$container_el, @$tab_container,
@$details_container, @$questions_container) ->
@questionnaire_helper = new QuestionnaireHelper()
@questionnaire_id = localStorage.getItem('questionnaire_id')
@get_questionnaire_details()
new Questions(@$questions_container, @questionnaire_id)
@init_events()
get_questionnaire_details: ->
$.ajax(
url: "http://demo-ors-api.ort-staging.linode.unep-wcmc.org/api/v1/questionnaires/#{@questionnaire_id}"
type: 'GET'
dataType: 'json'
contentType: 'text/plain'
beforeSend: (request) ->
request.setRequestHeader("X-Authentication-Token", '<PASSWORD>')
error: (jqXHR, textStatus, errorThrown) ->
@$container_el.append "AJAX Error: #{textStatus}"
success: (data, textStatus, jqXHR) =>
@append_questionnaire_details(data.questionnaire)
)
append_questionnaire_details: (questionnaire) ->
[respondents, submissions] = @questionnaire_helper.submission_percentage(questionnaire)
@sort_respondents(questionnaire.respondents)
$.extend(questionnaire,{submissions: submissions, no_respondents: respondents})
@$container_el.find('h1').append(questionnaire.title)
@$details_container.append(HandlebarsTemplates['questionnaire/details'](questionnaire))
sort_respondents: (respondents) ->
respondents.sort( (a,b) ->
if a.respondent.full_name < b.respondent.full_name
return -1
else if a.respondent.full_name > b.respondent.full_name
return 1
return 0
)
init_events: ->
@tabs_selection()
tabs_selection: ->
@$container_el.on('click', (event) ->
event.preventDefault()
)
opts = @$container_el.find('li')
self = @
opts.on('click', ->
opt = $(this)
if opt.hasClass("active")
return
else
self.$container_el.find(".active").removeClass("active")
opt.addClass("active")
data_container = $('.' + $(this).data('container'))
if data_container.hasClass("active")
return
else
self.$tab_container.find('.active-container').removeClass('active-container').hide()
data_container.addClass("active-container").show()
)
| true | window.QuestionnaireDetails = class QuestionnaireDetails
constructor: (@$container_el, @$tab_container,
@$details_container, @$questions_container) ->
@questionnaire_helper = new QuestionnaireHelper()
@questionnaire_id = localStorage.getItem('questionnaire_id')
@get_questionnaire_details()
new Questions(@$questions_container, @questionnaire_id)
@init_events()
get_questionnaire_details: ->
$.ajax(
url: "http://demo-ors-api.ort-staging.linode.unep-wcmc.org/api/v1/questionnaires/#{@questionnaire_id}"
type: 'GET'
dataType: 'json'
contentType: 'text/plain'
beforeSend: (request) ->
request.setRequestHeader("X-Authentication-Token", 'PI:PASSWORD:<PASSWORD>END_PI')
error: (jqXHR, textStatus, errorThrown) ->
@$container_el.append "AJAX Error: #{textStatus}"
success: (data, textStatus, jqXHR) =>
@append_questionnaire_details(data.questionnaire)
)
append_questionnaire_details: (questionnaire) ->
[respondents, submissions] = @questionnaire_helper.submission_percentage(questionnaire)
@sort_respondents(questionnaire.respondents)
$.extend(questionnaire,{submissions: submissions, no_respondents: respondents})
@$container_el.find('h1').append(questionnaire.title)
@$details_container.append(HandlebarsTemplates['questionnaire/details'](questionnaire))
sort_respondents: (respondents) ->
respondents.sort( (a,b) ->
if a.respondent.full_name < b.respondent.full_name
return -1
else if a.respondent.full_name > b.respondent.full_name
return 1
return 0
)
init_events: ->
@tabs_selection()
tabs_selection: ->
@$container_el.on('click', (event) ->
event.preventDefault()
)
opts = @$container_el.find('li')
self = @
opts.on('click', ->
opt = $(this)
if opt.hasClass("active")
return
else
self.$container_el.find(".active").removeClass("active")
opt.addClass("active")
data_container = $('.' + $(this).data('container'))
if data_container.hasClass("active")
return
else
self.$tab_container.find('.active-container').removeClass('active-container').hide()
data_container.addClass("active-container").show()
)
|
[
{
"context": "..........................\n### https://github.com/dominictarr/my-local-ip ###\nget_my_ip = requi",
"end": 1672,
"score": 0.9996989965438843,
"start": 1661,
"tag": "USERNAME",
"value": "dominictarr"
},
{
"context": "..........................\n### https:... | src/main.coffee | loveencounterflow/mingkwai-search | 0 |
require 'coffee-script/register'
############################################################################################################
# njs_fs = require 'fs'
njs_path = require 'path'
njs_os = require 'os'
njs_http = require 'http'
njs_url = require 'url'
#...........................................................................................................
TYPES = require 'coffeenode-types'
TRM = require 'coffeenode-trm'
rpr = TRM.rpr.bind TRM
badge = '明快main'
log = TRM.get_logger 'plain', badge
info = TRM.get_logger 'info', badge
alert = TRM.get_logger 'alert', badge
debug = TRM.get_logger 'debug', badge
warn = TRM.get_logger 'warn', badge
help = TRM.get_logger 'help', badge
echo = TRM.echo.bind TRM
templates = require './templates'
O = require '../options'
#...........................................................................................................
# MOJIKURA = require 'coffeenode-mojikura'
FILESEARCHER = require './FILESEARCHER'
DATASOURCES = require 'jizura-datasources'
DSREGISTRY = DATASOURCES.REGISTRY
# db = MOJIKURA.new_db()
#...........................................................................................................
### https://github.com/dominictarr/my-local-ip ###
get_my_ip = require 'my-local-ip'
#...........................................................................................................
### TAINT: this stuff should go into options ###
os_name = njs_os.platform()
#...........................................................................................................
### https://github.com/cloudhead/node-static ###
STATIC = require 'node-static'
static_route = './public'
log_static_requests = yes
file_server = new STATIC.Server static_route
server = njs_http.createServer()
#...........................................................................................................
server_info = server[ 'info' ] =
'os-name': os_name
'address': null
'port': null
'host': null
'routes': []
'started': null
'request-count': 0
#...........................................................................................................
if os_name is 'darwin'
server_info[ 'port' ] = 80
server_info[ 'host' ] = get_my_ip()
#...........................................................................................................
else
server_info[ 'port' ] = 8080
server_info[ 'host' ] = '142.4.222.238'
#...........................................................................................................
server_info[ 'address' ] = "http://#{server_info[ 'host' ]}:#{server_info[ 'port' ]}"
#...........................................................................................................
### https://github.com/caolan/async ###
ASYNC = require 'async'
async_limit = 5
#...........................................................................................................
### https://github.com/aaronblohowiak/routes.js ###
static_router = ( require 'routes' )()
COOKIE = require 'cookie'
#...........................................................................................................
@_cut_here_mark = '✂cut-here✂'
@_cut_here_matcher = /// <!-- #{@_cut_here_mark} --> ///
#...........................................................................................................
LIMIT = require 'coffeenode-limit'
limit_registry = LIMIT.new_registry()
LIMIT.new_user limit_registry, 'localhost', 'premium'
LIMIT.new_user limit_registry, '127.0.0.1', 'premium'
LIMIT.new_user limit_registry, server_info[ 'host' ], 'premium'
LIMIT.new_user limit_registry, '192.168.178.48', 'premium'
LIMIT.new_user limit_registry, '192.168.178.43', 'premium'
# LIMIT.new_user limit_registry, server_info[ 'host' ], 'default'
# LIMIT.new_user limit_registry, server_info[ 'host' ], 'spam'
#-----------------------------------------------------------------------------------------------------------
handle_static = ( request, response, routing ) ->
# log TRM.pink routing
filename = routing[ 'splats' ][ 0 ]
#.........................................................................................................
if log_static_requests
fileroute = njs_path.join __dirname, static_route, filename
# log TRM.grey '©34e', "static: #{request[ 'url' ]} ⇒ #{fileroute}"
#.........................................................................................................
# static_fileroute = '/public/'.concat filename
static_fileroute = filename
request[ 'url' ] = static_fileroute
file_server.serve request, response
#-----------------------------------------------------------------------------------------------------------
handle_favicon = ( request, response, routing ) ->
# request[ 'url' ] = 'favicon.ico'
log TRM.pink 'favicon:', request[ 'url' ]
# request[ 'url' ] = 'public/favicon.ico'
request[ 'url' ] = 'favicon.ico'
file_server.serve request, response
#-----------------------------------------------------------------------------------------------------------
static_router.addRoute '/public/*', handle_static
static_router.addRoute '/favicon.ico', handle_favicon
static_router.addRoute '/favicon.ico*', handle_favicon
#-----------------------------------------------------------------------------------------------------------
@write_http_head = ( response, status_code, cookie ) ->
#.........................................................................................................
headers =
'Content-Type': 'text/html'
'Connection': 'keep-alive'
'Transfer-Encoding': 'chunked'
#.........................................................................................................
headers[ 'Cookie' ] = ( COOKIE.serialize name, value for name, value of cookie ) if cookie?
#.........................................................................................................
response.writeHead status_code, headers
return null
#-----------------------------------------------------------------------------------------------------------
@distribute = ( request, response ) ->
server[ 'info' ][ 'request-count' ] += 1
#.........................................................................................................
url = request[ 'url' ]
#.........................................................................................................
# Serve static files:
return routing[ 'fn' ] request, response, routing if ( routing = static_router.match url )?
#.........................................................................................................
### Limiting Access based on IP ###
uid = request.connection.remoteAddress
LIMIT.permesso limit_registry, uid, ( error, ok, eta ) =>
throw error if error? # (should never happen)
#.......................................................................................................
return @refuse request, response, uid, url, eta unless ok
return @respond request, response, uid, url
#.........................................................................................................
return null
#-----------------------------------------------------------------------------------------------------------
@refuse = ( request, response, uid, url, ms_to_wait ) ->
@write_http_head response, 429
response.write templates.refuse
'reason': "TOO MANY REQUESTS."
'ms-to-wait': ms_to_wait
response.end()
#-----------------------------------------------------------------------------------------------------------
@respond = ( request, response, uid, url ) ->
t0 = 1 * new Date()
rqid = "RQ##{server[ 'info' ][ 'request-count' ]}"
#.........................................................................................................
parsed_url = njs_url.parse url, true
route = parsed_url[ 'pathname' ]
last_query = parsed_url[ 'query' ]
q = last_query[ 'q' ] ? null
Δ = response.write.bind response
all_ds_infos = DSREGISTRY[ 'ds-infos' ]
dsids = {}
gids = {}
ds_infos = []
do_search_db = last_query[ 'db' ] is 'db'
#.........................................................................................................
if request[ 'headers' ][ 'cookie' ]? then cookie = COOKIE.parse request[ 'headers' ][ 'cookie' ]
else cookie = {}
#.........................................................................................................
help '©34x', rqid, 'cookie:', cookie
help '©34x', rqid, 'url: ', url
help '©34x', rqid, 'route: ', route
tainted_languagecode = cookie[ 'languagecode' ] ? 'en_US'
# #.........................................................................................................
# ### TAINT kludge ###
# if route is '/missing-guides'
# warn "not implemented: #{rpr route}"
# else
#.........................................................................................................
do ( dsid = null, ds_info = null, ds = null ) =>
ds = last_query[ 'ds' ] ? []
ds = [ ds, ] unless TYPES.isa_list ds
for dsid in ds
ds_info = all_ds_infos[ dsid ]
# log TRM.yellow '©32s', dsid, ds_info
continue unless ds_info?
dsids[ dsid ] = 1
ds_infos.push ds_info
#.........................................................................................................
### TAINT code duplication ###
do ( gid = null, ds_info = null, dg = null ) =>
dg = last_query[ 'dg' ] ? []
dg = [ dg, ] unless TYPES.isa_list dg
for gid in dg
continue unless DSREGISTRY[ 'groupname-by-gid' ][ gid ]?
gids[ gid ] = 1
# ds_infos.push ds_info
#.........................................................................................................
log TRM.green '©23k', rqid, last_query
#.........................................................................................................
html = templates.main
'rqid': rqid
'title': "明快搜字機 MingKwai Type Tool"
'headline': "明快搜字機<br>MingKwai Type Tool"
'results': []
'result-type': 'ag'
'dt': 0
'last-query': last_query
'request-count': server[ 'info' ][ 'request-count' ]
'db': do_search_db
'dsids': dsids
'gids': gids
'cut-here-mark': @_cut_here_mark
'languagecode': tainted_languagecode
#.........................................................................................................
[ html_front
html_rear ] = html.split @_cut_here_matcher
#.........................................................................................................
throw new Error "unable to split HTML: no cut mark found" unless html_rear?
#.........................................................................................................
result_count = 0
buffer = []
#.........................................................................................................
send_buffer = =>
unless ( R = buffer.length ) is 0
Δ buffer.join '\n'
buffer.length = 0
Δ templates.update_result_count
result_nr: result_count
result_count: result_count
return R
#.........................................................................................................
finalize_response = =>
send_buffer()
Δ html_rear
response.end()
warn '©23k', rqid, 'finished'
#.........................................................................................................
@write_http_head response, 200, cookie
Δ html_front
#.........................................................................................................
last_idx = ds_infos.length - 1
#.........................................................................................................
if last_idx < 0
warn '©34w', rqid, "no data sources specified"
# log ds_infos
return finalize_response()
#.........................................................................................................
else
log TRM.green '©34w', rqid, "searching in #{ds_infos.length} sources"
###
#.......................................................................................................
search_db = ( async_handler ) =>
id = "glyph:#{q}"
debug '©27t', id
#.....................................................................................................
MOJIKURA.get db, id, null, ( error, glyph_entry ) =>
return async_handler error if error?
# debug '©23w', JSON.stringify glyph_entry
#...................................................................................................
if glyph_entry?
result_count += 1
#.................................................................................................
buffer.push templates.result_row
'rqid': rqid
'result': [ glyph_entry, ]
'result-type': 'db'
'dsid': 'db'
'result-nr': result_count
'result-count': result_count
'languagecode': tainted_languagecode
# #...............................................................................................
# send_buffer() if buffer.length >= 2
#...................................................................................................
async_handler null, null
###
#.......................................................................................................
search_ds_file = ( ds_info, async_handler ) =>
dsid = ds_info[ 'id' ]
ds_route = ds_info[ 'route' ]
ds_name = ds_info[ 'name' ]
#.....................................................................................................
# debug '©88z', rpr q
FILESEARCHER.search ds_route, q, ( error, result ) =>
debug 'XXXX' + error[ 'message' ] if error?
#...................................................................................................
return async_handler null, null if result is null
# log TRM.blue data_route, result.join ' '
#...................................................................................................
result_count += 1
#...................................................................................................
buffer.push templates.result_row
'rqid': rqid
'result': result
'result-type': 'ds'
'dsid': dsid
'result-nr': result_count
'result-count': result_count
'languagecode': tainted_languagecode
#...................................................................................................
send_buffer() if buffer.length >= 2
#.......................................................................................................
tasks = []
tasks.push ( handler ) =>
ASYNC.eachLimit ds_infos, async_limit, search_ds_file, ( error ) =>
handler error, null
#.......................................................................................................
if do_search_db
warn "searching in MojiKuraDB currently not possible"
# debug "searching in MojiKuraDB"
# tasks.push ( handler ) =>
# search_db ( error ) =>
# handler error, null
#.......................................................................................................
ASYNC.parallel tasks, finalize_response
#.........................................................................................................
return null
#===========================================================================================================
# EVENT HANDLING
#-----------------------------------------------------------------------------------------------------------
server.on 'request', @distribute.bind @
#-----------------------------------------------------------------------------------------------------------
server.on 'close', =>
warn "server #{server_address} closed"
#-----------------------------------------------------------------------------------------------------------
server.on 'error', ( error ) =>
alert "when trying to start serving on #{server_info[ 'address' ]}, an error was encountered:"
alert rpr error[ 'message' ]
if error[ 'message' ] is "listen EACCES" and server_info[ 'port' ] < 1024
help "since the configured port is below 1024, you should probably"
help "try and start the server using `sudo`"
throw error
#-----------------------------------------------------------------------------------------------------------
process.on 'uncaughtException', ( error ) ->
alert 'uncaughtException'
throw error
############################################################################################################
# MAKE IT SO
#-----------------------------------------------------------------------------------------------------------
@start = ( handler ) ->
server_info[ 'routes' ].push route for route of static_router.routeMap
server.listen server_info[ 'port' ], server_info[ 'host' ], ( error, P... ) =>
server[ 'info' ][ 'started' ] = new Date()
handler null, server_info if handler?
# # curl -H "X-Forwarded-For: 1.2.3.4" http://192.168.178.25
| 201549 |
require 'coffee-script/register'
############################################################################################################
# njs_fs = require 'fs'
njs_path = require 'path'
njs_os = require 'os'
njs_http = require 'http'
njs_url = require 'url'
#...........................................................................................................
TYPES = require 'coffeenode-types'
TRM = require 'coffeenode-trm'
rpr = TRM.rpr.bind TRM
badge = '明快main'
log = TRM.get_logger 'plain', badge
info = TRM.get_logger 'info', badge
alert = TRM.get_logger 'alert', badge
debug = TRM.get_logger 'debug', badge
warn = TRM.get_logger 'warn', badge
help = TRM.get_logger 'help', badge
echo = TRM.echo.bind TRM
templates = require './templates'
O = require '../options'
#...........................................................................................................
# MOJIKURA = require 'coffeenode-mojikura'
FILESEARCHER = require './FILESEARCHER'
DATASOURCES = require 'jizura-datasources'
DSREGISTRY = DATASOURCES.REGISTRY
# db = MOJIKURA.new_db()
#...........................................................................................................
### https://github.com/dominictarr/my-local-ip ###
get_my_ip = require 'my-local-ip'
#...........................................................................................................
### TAINT: this stuff should go into options ###
os_name = njs_os.platform()
#...........................................................................................................
### https://github.com/cloudhead/node-static ###
STATIC = require 'node-static'
static_route = './public'
log_static_requests = yes
file_server = new STATIC.Server static_route
server = njs_http.createServer()
#...........................................................................................................
server_info = server[ 'info' ] =
'os-name': os_name
'address': null
'port': null
'host': null
'routes': []
'started': null
'request-count': 0
#...........................................................................................................
if os_name is 'darwin'
server_info[ 'port' ] = 80
server_info[ 'host' ] = get_my_ip()
#...........................................................................................................
else
server_info[ 'port' ] = 8080
server_info[ 'host' ] = '172.16.17.32'
#...........................................................................................................
server_info[ 'address' ] = "http://#{server_info[ 'host' ]}:#{server_info[ 'port' ]}"
#...........................................................................................................
### https://github.com/caolan/async ###
ASYNC = require 'async'
async_limit = 5
#...........................................................................................................
### https://github.com/aaronblohowiak/routes.js ###
static_router = ( require 'routes' )()
COOKIE = require 'cookie'
#...........................................................................................................
@_cut_here_mark = '✂cut-here✂'
@_cut_here_matcher = /// <!-- #{@_cut_here_mark} --> ///
#...........................................................................................................
LIMIT = require 'coffeenode-limit'
limit_registry = LIMIT.new_registry()
LIMIT.new_user limit_registry, 'localhost', 'premium'
LIMIT.new_user limit_registry, '127.0.0.1', 'premium'
LIMIT.new_user limit_registry, server_info[ 'host' ], 'premium'
LIMIT.new_user limit_registry, '192.168.178.48', 'premium'
LIMIT.new_user limit_registry, '192.168.178.43', 'premium'
# LIMIT.new_user limit_registry, server_info[ 'host' ], 'default'
# LIMIT.new_user limit_registry, server_info[ 'host' ], 'spam'
#-----------------------------------------------------------------------------------------------------------
handle_static = ( request, response, routing ) ->
# log TRM.pink routing
filename = routing[ 'splats' ][ 0 ]
#.........................................................................................................
if log_static_requests
fileroute = njs_path.join __dirname, static_route, filename
# log TRM.grey '©34e', "static: #{request[ 'url' ]} ⇒ #{fileroute}"
#.........................................................................................................
# static_fileroute = '/public/'.concat filename
static_fileroute = filename
request[ 'url' ] = static_fileroute
file_server.serve request, response
#-----------------------------------------------------------------------------------------------------------
handle_favicon = ( request, response, routing ) ->
# request[ 'url' ] = 'favicon.ico'
log TRM.pink 'favicon:', request[ 'url' ]
# request[ 'url' ] = 'public/favicon.ico'
request[ 'url' ] = 'favicon.ico'
file_server.serve request, response
#-----------------------------------------------------------------------------------------------------------
static_router.addRoute '/public/*', handle_static
static_router.addRoute '/favicon.ico', handle_favicon
static_router.addRoute '/favicon.ico*', handle_favicon
#-----------------------------------------------------------------------------------------------------------
@write_http_head = ( response, status_code, cookie ) ->
#.........................................................................................................
headers =
'Content-Type': 'text/html'
'Connection': 'keep-alive'
'Transfer-Encoding': 'chunked'
#.........................................................................................................
headers[ 'Cookie' ] = ( COOKIE.serialize name, value for name, value of cookie ) if cookie?
#.........................................................................................................
response.writeHead status_code, headers
return null
#-----------------------------------------------------------------------------------------------------------
@distribute = ( request, response ) ->
server[ 'info' ][ 'request-count' ] += 1
#.........................................................................................................
url = request[ 'url' ]
#.........................................................................................................
# Serve static files:
return routing[ 'fn' ] request, response, routing if ( routing = static_router.match url )?
#.........................................................................................................
### Limiting Access based on IP ###
uid = request.connection.remoteAddress
LIMIT.permesso limit_registry, uid, ( error, ok, eta ) =>
throw error if error? # (should never happen)
#.......................................................................................................
return @refuse request, response, uid, url, eta unless ok
return @respond request, response, uid, url
#.........................................................................................................
return null
#-----------------------------------------------------------------------------------------------------------
@refuse = ( request, response, uid, url, ms_to_wait ) ->
@write_http_head response, 429
response.write templates.refuse
'reason': "TOO MANY REQUESTS."
'ms-to-wait': ms_to_wait
response.end()
#-----------------------------------------------------------------------------------------------------------
@respond = ( request, response, uid, url ) ->
t0 = 1 * new Date()
rqid = "RQ##{server[ 'info' ][ 'request-count' ]}"
#.........................................................................................................
parsed_url = njs_url.parse url, true
route = parsed_url[ 'pathname' ]
last_query = parsed_url[ 'query' ]
q = last_query[ 'q' ] ? null
Δ = response.write.bind response
all_ds_infos = DSREGISTRY[ 'ds-infos' ]
dsids = {}
gids = {}
ds_infos = []
do_search_db = last_query[ 'db' ] is 'db'
#.........................................................................................................
if request[ 'headers' ][ 'cookie' ]? then cookie = COOKIE.parse request[ 'headers' ][ 'cookie' ]
else cookie = {}
#.........................................................................................................
help '©34x', rqid, 'cookie:', cookie
help '©34x', rqid, 'url: ', url
help '©34x', rqid, 'route: ', route
tainted_languagecode = cookie[ 'languagecode' ] ? 'en_US'
# #.........................................................................................................
# ### TAINT kludge ###
# if route is '/missing-guides'
# warn "not implemented: #{rpr route}"
# else
#.........................................................................................................
do ( dsid = null, ds_info = null, ds = null ) =>
ds = last_query[ 'ds' ] ? []
ds = [ ds, ] unless TYPES.isa_list ds
for dsid in ds
ds_info = all_ds_infos[ dsid ]
# log TRM.yellow '©32s', dsid, ds_info
continue unless ds_info?
dsids[ dsid ] = 1
ds_infos.push ds_info
#.........................................................................................................
### TAINT code duplication ###
do ( gid = null, ds_info = null, dg = null ) =>
dg = last_query[ 'dg' ] ? []
dg = [ dg, ] unless TYPES.isa_list dg
for gid in dg
continue unless DSREGISTRY[ 'groupname-by-gid' ][ gid ]?
gids[ gid ] = 1
# ds_infos.push ds_info
#.........................................................................................................
log TRM.green '©23k', rqid, last_query
#.........................................................................................................
html = templates.main
'rqid': rqid
'title': "明快搜字機 MingKwai Type Tool"
'headline': "明快搜字機<br>MingKwai Type Tool"
'results': []
'result-type': 'ag'
'dt': 0
'last-query': last_query
'request-count': server[ 'info' ][ 'request-count' ]
'db': do_search_db
'dsids': dsids
'gids': gids
'cut-here-mark': @_cut_here_mark
'languagecode': tainted_languagecode
#.........................................................................................................
[ html_front
html_rear ] = html.split @_cut_here_matcher
#.........................................................................................................
throw new Error "unable to split HTML: no cut mark found" unless html_rear?
#.........................................................................................................
result_count = 0
buffer = []
#.........................................................................................................
send_buffer = =>
unless ( R = buffer.length ) is 0
Δ buffer.join '\n'
buffer.length = 0
Δ templates.update_result_count
result_nr: result_count
result_count: result_count
return R
#.........................................................................................................
finalize_response = =>
send_buffer()
Δ html_rear
response.end()
warn '©23k', rqid, 'finished'
#.........................................................................................................
@write_http_head response, 200, cookie
Δ html_front
#.........................................................................................................
last_idx = ds_infos.length - 1
#.........................................................................................................
if last_idx < 0
warn '©34w', rqid, "no data sources specified"
# log ds_infos
return finalize_response()
#.........................................................................................................
else
log TRM.green '©34w', rqid, "searching in #{ds_infos.length} sources"
###
#.......................................................................................................
search_db = ( async_handler ) =>
id = "glyph:#{q}"
debug '©27t', id
#.....................................................................................................
MOJIKURA.get db, id, null, ( error, glyph_entry ) =>
return async_handler error if error?
# debug '©23w', JSON.stringify glyph_entry
#...................................................................................................
if glyph_entry?
result_count += 1
#.................................................................................................
buffer.push templates.result_row
'rqid': rqid
'result': [ glyph_entry, ]
'result-type': 'db'
'dsid': 'db'
'result-nr': result_count
'result-count': result_count
'languagecode': tainted_languagecode
# #...............................................................................................
# send_buffer() if buffer.length >= 2
#...................................................................................................
async_handler null, null
###
#.......................................................................................................
search_ds_file = ( ds_info, async_handler ) =>
dsid = ds_info[ 'id' ]
ds_route = ds_info[ 'route' ]
ds_name = ds_info[ 'name' ]
#.....................................................................................................
# debug '©88z', rpr q
FILESEARCHER.search ds_route, q, ( error, result ) =>
debug 'XXXX' + error[ 'message' ] if error?
#...................................................................................................
return async_handler null, null if result is null
# log TRM.blue data_route, result.join ' '
#...................................................................................................
result_count += 1
#...................................................................................................
buffer.push templates.result_row
'rqid': rqid
'result': result
'result-type': 'ds'
'dsid': dsid
'result-nr': result_count
'result-count': result_count
'languagecode': tainted_languagecode
#...................................................................................................
send_buffer() if buffer.length >= 2
#.......................................................................................................
tasks = []
tasks.push ( handler ) =>
ASYNC.eachLimit ds_infos, async_limit, search_ds_file, ( error ) =>
handler error, null
#.......................................................................................................
if do_search_db
warn "searching in MojiKuraDB currently not possible"
# debug "searching in MojiKuraDB"
# tasks.push ( handler ) =>
# search_db ( error ) =>
# handler error, null
#.......................................................................................................
ASYNC.parallel tasks, finalize_response
#.........................................................................................................
return null
#===========================================================================================================
# EVENT HANDLING
#-----------------------------------------------------------------------------------------------------------
server.on 'request', @distribute.bind @
#-----------------------------------------------------------------------------------------------------------
server.on 'close', =>
warn "server #{server_address} closed"
#-----------------------------------------------------------------------------------------------------------
server.on 'error', ( error ) =>
alert "when trying to start serving on #{server_info[ 'address' ]}, an error was encountered:"
alert rpr error[ 'message' ]
if error[ 'message' ] is "listen EACCES" and server_info[ 'port' ] < 1024
help "since the configured port is below 1024, you should probably"
help "try and start the server using `sudo`"
throw error
#-----------------------------------------------------------------------------------------------------------
process.on 'uncaughtException', ( error ) ->
alert 'uncaughtException'
throw error
############################################################################################################
# MAKE IT SO
#-----------------------------------------------------------------------------------------------------------
@start = ( handler ) ->
server_info[ 'routes' ].push route for route of static_router.routeMap
server.listen server_info[ 'port' ], server_info[ 'host' ], ( error, P... ) =>
server[ 'info' ][ 'started' ] = new Date()
handler null, server_info if handler?
# # curl -H "X-Forwarded-For: 172.16.58.3" http://192.168.178.25
| true |
require 'coffee-script/register'
############################################################################################################
# njs_fs = require 'fs'
njs_path = require 'path'
njs_os = require 'os'
njs_http = require 'http'
njs_url = require 'url'
#...........................................................................................................
TYPES = require 'coffeenode-types'
TRM = require 'coffeenode-trm'
rpr = TRM.rpr.bind TRM
badge = '明快main'
log = TRM.get_logger 'plain', badge
info = TRM.get_logger 'info', badge
alert = TRM.get_logger 'alert', badge
debug = TRM.get_logger 'debug', badge
warn = TRM.get_logger 'warn', badge
help = TRM.get_logger 'help', badge
echo = TRM.echo.bind TRM
templates = require './templates'
O = require '../options'
#...........................................................................................................
# MOJIKURA = require 'coffeenode-mojikura'
FILESEARCHER = require './FILESEARCHER'
DATASOURCES = require 'jizura-datasources'
DSREGISTRY = DATASOURCES.REGISTRY
# db = MOJIKURA.new_db()
#...........................................................................................................
### https://github.com/dominictarr/my-local-ip ###
get_my_ip = require 'my-local-ip'
#...........................................................................................................
### TAINT: this stuff should go into options ###
os_name = njs_os.platform()
#...........................................................................................................
### https://github.com/cloudhead/node-static ###
STATIC = require 'node-static'
static_route = './public'
log_static_requests = yes
file_server = new STATIC.Server static_route
server = njs_http.createServer()
#...........................................................................................................
server_info = server[ 'info' ] =
'os-name': os_name
'address': null
'port': null
'host': null
'routes': []
'started': null
'request-count': 0
#...........................................................................................................
if os_name is 'darwin'
server_info[ 'port' ] = 80
server_info[ 'host' ] = get_my_ip()
#...........................................................................................................
else
server_info[ 'port' ] = 8080
server_info[ 'host' ] = 'PI:IP_ADDRESS:172.16.17.32END_PI'
#...........................................................................................................
server_info[ 'address' ] = "http://#{server_info[ 'host' ]}:#{server_info[ 'port' ]}"
#...........................................................................................................
### https://github.com/caolan/async ###
ASYNC = require 'async'
async_limit = 5
#...........................................................................................................
### https://github.com/aaronblohowiak/routes.js ###
static_router = ( require 'routes' )()
COOKIE = require 'cookie'
#...........................................................................................................
@_cut_here_mark = '✂cut-here✂'
@_cut_here_matcher = /// <!-- #{@_cut_here_mark} --> ///
#...........................................................................................................
LIMIT = require 'coffeenode-limit'
limit_registry = LIMIT.new_registry()
LIMIT.new_user limit_registry, 'localhost', 'premium'
LIMIT.new_user limit_registry, '127.0.0.1', 'premium'
LIMIT.new_user limit_registry, server_info[ 'host' ], 'premium'
LIMIT.new_user limit_registry, '192.168.178.48', 'premium'
LIMIT.new_user limit_registry, '192.168.178.43', 'premium'
# LIMIT.new_user limit_registry, server_info[ 'host' ], 'default'
# LIMIT.new_user limit_registry, server_info[ 'host' ], 'spam'
#-----------------------------------------------------------------------------------------------------------
handle_static = ( request, response, routing ) ->
# log TRM.pink routing
filename = routing[ 'splats' ][ 0 ]
#.........................................................................................................
if log_static_requests
fileroute = njs_path.join __dirname, static_route, filename
# log TRM.grey '©34e', "static: #{request[ 'url' ]} ⇒ #{fileroute}"
#.........................................................................................................
# static_fileroute = '/public/'.concat filename
static_fileroute = filename
request[ 'url' ] = static_fileroute
file_server.serve request, response
#-----------------------------------------------------------------------------------------------------------
handle_favicon = ( request, response, routing ) ->
# request[ 'url' ] = 'favicon.ico'
log TRM.pink 'favicon:', request[ 'url' ]
# request[ 'url' ] = 'public/favicon.ico'
request[ 'url' ] = 'favicon.ico'
file_server.serve request, response
#-----------------------------------------------------------------------------------------------------------
static_router.addRoute '/public/*', handle_static
static_router.addRoute '/favicon.ico', handle_favicon
static_router.addRoute '/favicon.ico*', handle_favicon
#-----------------------------------------------------------------------------------------------------------
@write_http_head = ( response, status_code, cookie ) ->
#.........................................................................................................
headers =
'Content-Type': 'text/html'
'Connection': 'keep-alive'
'Transfer-Encoding': 'chunked'
#.........................................................................................................
headers[ 'Cookie' ] = ( COOKIE.serialize name, value for name, value of cookie ) if cookie?
#.........................................................................................................
response.writeHead status_code, headers
return null
#-----------------------------------------------------------------------------------------------------------
@distribute = ( request, response ) ->
server[ 'info' ][ 'request-count' ] += 1
#.........................................................................................................
url = request[ 'url' ]
#.........................................................................................................
# Serve static files:
return routing[ 'fn' ] request, response, routing if ( routing = static_router.match url )?
#.........................................................................................................
### Limiting Access based on IP ###
uid = request.connection.remoteAddress
LIMIT.permesso limit_registry, uid, ( error, ok, eta ) =>
throw error if error? # (should never happen)
#.......................................................................................................
return @refuse request, response, uid, url, eta unless ok
return @respond request, response, uid, url
#.........................................................................................................
return null
#-----------------------------------------------------------------------------------------------------------
@refuse = ( request, response, uid, url, ms_to_wait ) ->
@write_http_head response, 429
response.write templates.refuse
'reason': "TOO MANY REQUESTS."
'ms-to-wait': ms_to_wait
response.end()
#-----------------------------------------------------------------------------------------------------------
@respond = ( request, response, uid, url ) ->
t0 = 1 * new Date()
rqid = "RQ##{server[ 'info' ][ 'request-count' ]}"
#.........................................................................................................
parsed_url = njs_url.parse url, true
route = parsed_url[ 'pathname' ]
last_query = parsed_url[ 'query' ]
q = last_query[ 'q' ] ? null
Δ = response.write.bind response
all_ds_infos = DSREGISTRY[ 'ds-infos' ]
dsids = {}
gids = {}
ds_infos = []
do_search_db = last_query[ 'db' ] is 'db'
#.........................................................................................................
if request[ 'headers' ][ 'cookie' ]? then cookie = COOKIE.parse request[ 'headers' ][ 'cookie' ]
else cookie = {}
#.........................................................................................................
help '©34x', rqid, 'cookie:', cookie
help '©34x', rqid, 'url: ', url
help '©34x', rqid, 'route: ', route
tainted_languagecode = cookie[ 'languagecode' ] ? 'en_US'
# #.........................................................................................................
# ### TAINT kludge ###
# if route is '/missing-guides'
# warn "not implemented: #{rpr route}"
# else
#.........................................................................................................
do ( dsid = null, ds_info = null, ds = null ) =>
ds = last_query[ 'ds' ] ? []
ds = [ ds, ] unless TYPES.isa_list ds
for dsid in ds
ds_info = all_ds_infos[ dsid ]
# log TRM.yellow '©32s', dsid, ds_info
continue unless ds_info?
dsids[ dsid ] = 1
ds_infos.push ds_info
#.........................................................................................................
### TAINT code duplication ###
do ( gid = null, ds_info = null, dg = null ) =>
dg = last_query[ 'dg' ] ? []
dg = [ dg, ] unless TYPES.isa_list dg
for gid in dg
continue unless DSREGISTRY[ 'groupname-by-gid' ][ gid ]?
gids[ gid ] = 1
# ds_infos.push ds_info
#.........................................................................................................
log TRM.green '©23k', rqid, last_query
#.........................................................................................................
html = templates.main
'rqid': rqid
'title': "明快搜字機 MingKwai Type Tool"
'headline': "明快搜字機<br>MingKwai Type Tool"
'results': []
'result-type': 'ag'
'dt': 0
'last-query': last_query
'request-count': server[ 'info' ][ 'request-count' ]
'db': do_search_db
'dsids': dsids
'gids': gids
'cut-here-mark': @_cut_here_mark
'languagecode': tainted_languagecode
#.........................................................................................................
[ html_front
html_rear ] = html.split @_cut_here_matcher
#.........................................................................................................
throw new Error "unable to split HTML: no cut mark found" unless html_rear?
#.........................................................................................................
result_count = 0
buffer = []
#.........................................................................................................
send_buffer = =>
unless ( R = buffer.length ) is 0
Δ buffer.join '\n'
buffer.length = 0
Δ templates.update_result_count
result_nr: result_count
result_count: result_count
return R
#.........................................................................................................
finalize_response = =>
send_buffer()
Δ html_rear
response.end()
warn '©23k', rqid, 'finished'
#.........................................................................................................
@write_http_head response, 200, cookie
Δ html_front
#.........................................................................................................
last_idx = ds_infos.length - 1
#.........................................................................................................
if last_idx < 0
warn '©34w', rqid, "no data sources specified"
# log ds_infos
return finalize_response()
#.........................................................................................................
else
log TRM.green '©34w', rqid, "searching in #{ds_infos.length} sources"
###
#.......................................................................................................
search_db = ( async_handler ) =>
id = "glyph:#{q}"
debug '©27t', id
#.....................................................................................................
MOJIKURA.get db, id, null, ( error, glyph_entry ) =>
return async_handler error if error?
# debug '©23w', JSON.stringify glyph_entry
#...................................................................................................
if glyph_entry?
result_count += 1
#.................................................................................................
buffer.push templates.result_row
'rqid': rqid
'result': [ glyph_entry, ]
'result-type': 'db'
'dsid': 'db'
'result-nr': result_count
'result-count': result_count
'languagecode': tainted_languagecode
# #...............................................................................................
# send_buffer() if buffer.length >= 2
#...................................................................................................
async_handler null, null
###
#.......................................................................................................
search_ds_file = ( ds_info, async_handler ) =>
dsid = ds_info[ 'id' ]
ds_route = ds_info[ 'route' ]
ds_name = ds_info[ 'name' ]
#.....................................................................................................
# debug '©88z', rpr q
FILESEARCHER.search ds_route, q, ( error, result ) =>
debug 'XXXX' + error[ 'message' ] if error?
#...................................................................................................
return async_handler null, null if result is null
# log TRM.blue data_route, result.join ' '
#...................................................................................................
result_count += 1
#...................................................................................................
buffer.push templates.result_row
'rqid': rqid
'result': result
'result-type': 'ds'
'dsid': dsid
'result-nr': result_count
'result-count': result_count
'languagecode': tainted_languagecode
#...................................................................................................
send_buffer() if buffer.length >= 2
#.......................................................................................................
tasks = []
tasks.push ( handler ) =>
ASYNC.eachLimit ds_infos, async_limit, search_ds_file, ( error ) =>
handler error, null
#.......................................................................................................
if do_search_db
warn "searching in MojiKuraDB currently not possible"
# debug "searching in MojiKuraDB"
# tasks.push ( handler ) =>
# search_db ( error ) =>
# handler error, null
#.......................................................................................................
ASYNC.parallel tasks, finalize_response
#.........................................................................................................
return null
#===========================================================================================================
# EVENT HANDLING
#-----------------------------------------------------------------------------------------------------------
server.on 'request', @distribute.bind @
#-----------------------------------------------------------------------------------------------------------
server.on 'close', =>
warn "server #{server_address} closed"
#-----------------------------------------------------------------------------------------------------------
server.on 'error', ( error ) =>
alert "when trying to start serving on #{server_info[ 'address' ]}, an error was encountered:"
alert rpr error[ 'message' ]
if error[ 'message' ] is "listen EACCES" and server_info[ 'port' ] < 1024
help "since the configured port is below 1024, you should probably"
help "try and start the server using `sudo`"
throw error
#-----------------------------------------------------------------------------------------------------------
process.on 'uncaughtException', ( error ) ->
alert 'uncaughtException'
throw error
############################################################################################################
# MAKE IT SO
#-----------------------------------------------------------------------------------------------------------
@start = ( handler ) ->
server_info[ 'routes' ].push route for route of static_router.routeMap
server.listen server_info[ 'port' ], server_info[ 'host' ], ( error, P... ) =>
server[ 'info' ][ 'started' ] = new Date()
handler null, server_info if handler?
# # curl -H "X-Forwarded-For: PI:IP_ADDRESS:172.16.58.3END_PI" http://192.168.178.25
|
[
{
"context": "roid: false\n\n Production: false\n\n DisplayName: \"Trivially Pro\"\n\n kMaxRemotePlayers : 35\n\n kMaxPlayerNameL",
"end": 220,
"score": 0.6868947148323059,
"start": 211,
"tag": "NAME",
"value": "Trivially"
}
] | app-src/config.coffee | hyperbotic/crowdgame-trivially-pro | 0 | # ==================================================================================================================
Hy.Config =
AppId: "an id"
platformAndroid: false
Production: false
DisplayName: "Trivially Pro"
kMaxRemotePlayers : 35
kMaxPlayerNameLength: 8
kHelpPage : "??"
PlayerStage:
kMaxNumPlayers : 36 # kMaxRemotePlayers + 1 (for Console Player)
kPadding : 5
Dynamics:
panicAnswerTime: 3
revealAnswerTime: 5
maxNumQuestions: 50 # Max number of questions that can be played at a time
Version:
copyright: "Copyright© 2017"
copyright1: "Copyright© 2017"
copyright2: ""
Console:
kConsoleMajorVersion : 1
kConsoleMinorVersion : 4
kConsoleMinor2Version : 0
kVersionMoniker : ""
Remote:
kMinRemoteMajorVersion : 1
kMinRemoteMinorVersion : 0
isiOS4Plus: ()->
result = false
# add iphone specific tests
if Ti.Platform.name is 'iPhone OS'
version = Ti.Platform.version.split "."
major = parseInt(version[0])
# can only test this support on a 3.2+ device
result = major >= 4
result
Commerce:
kReceiptDirectory : Ti.Filesystem.applicationDataDirectory + "/receipts"
kPurchaseLogFile : Ti.Filesystem.applicationDataDirectory + "/purchases.txt"
kReceiptTimeout : 60 * 1000
kPurchaseTimeout : 2 * 60 * 1000
kRestoreTimeout : 30 * 1000
kPurchaseTEST_isPurchased: false # Set to true to short-circuit "buy" options
kPurchaseTEST_dontShowBuy: false # Set to true to short-circuit "buy" options
StoreKit:
kUseSandbox : false
kVerifyReceipt: true
PlayerNetwork:
kSingleUserModeOverride: true # Set true to force app into single user mode
kMaxNumInitializationAttempts : 5
kTimeBetweenInitializationAttempts: 5 * 1000
# We present a fatal error if the player network hasnt hit the ready state in time
kServiceStartupTimeout: 20 * 1000
kHelpPage : "http:??"
kFirebaseRootURL: "??"
kFirebaseAppRootURL: "??"
registerAPI : "??"
ActivityMonitor:
kRemotePingInterval: 30 * 1000 # This is here just for reference. See main.coffee.
kCheckInterval : (60*1000) + 10 # How often we check the status of connections.
kThresholdActive : (60*1000) + 10 # A client is "active" if we hear from it at least this often.
# This is set to a value that's more than
# double the interval that clients are actually sending pings at, so that a client can
# miss a ping but still be counted as "active"
#
kThresholdAlive : 120*1000 + 10 # A client is dead if we don't hear from it within this timeframe.
# We set it to greater than 4 ping cycles.
#
NetworkService:
kQueueImmediateInterval : 1 * 1000
kQueueBackgroundInterval : 10 * 1000
kDefaultEventTimeout : 25 * 1000 # v1.0.7
Rendezvous:
URL : "??"
URLDisplayName : "??"
API : "??"
MinConsoleUpdateInterval : 5 * 60 * 1000 # 5 minutes
Update:
kUpdateBaseURL : "??"
# Changed protocol for naming the update manifest, as of 2.3:
# Now there's one manifest per shipped version of the app
#
kUpdateCheckInterval : 20*60*1000 # 20 minutes - changed for Pro v1.0.1
kRateAppReminderFileName : Titanium.Filesystem.applicationDataDirectory + "/AppReminderLog"
DownloadManager:
kCacheDirectoryPath : Ti.Filesystem.applicationDataDirectory + "/downloadCache"
kCacheDirectoryName : "Documents/downloadCache"
kMaxSimultaneousDownloads: 1
Trace:
messagesOn : false
memoryLogging : false
uiTrace : false
# HACK, as "applicationDirectory" seems to be returning a path with "Applications" at the end
LogFileDirectory : Titanium.Filesystem.applicationDataDirectory + "../tmp"
MarkerFilename: Titanium.Filesystem.applicationDataDirectory + "../tmp" + "/MARKER.txt"
Content:
kContentMajorVersionSupported : "001"
kUsageDatabaseName : "CrowdGame_Trivially_Usage_database"
kUsageDatabaseVersion : "001"
# This is the "documents" directory
kUpdateDirectory : Ti.Filesystem.applicationDataDirectory
kThirdPartyContentDirectory : Ti.Filesystem.applicationDataDirectory + "/third-party"
kShippedDirectory : Ti.Filesystem.resourcesDirectory + "/data"
kDefaultIconDirectory : Ti.Filesystem.resourcesDirectory + "/data"
kInventoryInterval : 60 * 1000
kInventoryTimeout : 30 * 1000
kContentPackMaxNameLength : 50
kContentPackMaxLongDescriptionLength : 175
kContentPackMaxIconSpecLength : 30
kContentPackMaxQuestionLength : 120
kContentPackMaxAnswerLength : 55
kContentPackMaxAuthorVersionInfoLength : 10
kContentPackMaxAuthorContactInfoLength : (64 + 1 + 255) #http://askville.amazon.com/maximum-length-allowed-email-address/AnswerViewer.do?requestId=1166932
kContentPackWithHeaderMaxNumHeaderProps : 150
kThirdPartyContentPackMinNumRecords : 5
kThirdPartyContentPackMaxNumRecords : 200
kAppStoreProductInfo_CustomTriviaPackFeature_1: "custom_trivia_pack_feature_1"
kHelpPage : "??"
kSamplesPage : "??"
kContentPackMaxBytes : -1
kThirdPartyContentPackMaxBytes : 1024 * 1024 # 1MB
kThirdPartyContentBuyText: "buy"
kThirdPartyContentNewText: "new"
kThirdPartyContentInfoText: "info"
kSampleCustomContestsDefault : [
{name: "Simple Gray Template", url: "a url here"},
{name: "Gamer Template", url: "another here"}
]
Analytics:
active : true
Namespace : "Hy.Analytics.TriviallyPro"
Version : "Pro-v1.1.0"
Google:
accountID : "your-id-here"
Support:
email : "??"
contactUs : "??"
UI:
kTouchAndHoldDuration: 900
kTouchAndHoldDurationStarting : 300
kTouchAndHoldDismissDuration: 2000 # Amount of time the menu stays up after touch event has fired
Hy.Config.Update.kUpdateFilename = "trivially-pro-update-manifest--v-#{Hy.Config.Version.Console.kConsoleMajorVersion}-#{Hy.Config.Version.Console.kConsoleMinorVersion}-#{Hy.Config.Version.Console.kConsoleMinor2Version}.json"
if not Hy.Config.Production
Hy.Config.Trace.messagesOn = true
Hy.Config.Trace.memoryLogging = true
Hy.Config.Trace.uiTrace = true
Hy.Config.Commerce.StoreKit.kUseSandbox = true
Hy.Config.Update.kUpdateCheckInterval = 1 * 60 * 1000
Hy.Config.PlayerNetwork.kServiceStartupTimeout = 60 * 1000
Hy.Config.PlayerNetwork.registerAPI = "a url"
Hy.Config.Commerce.kPurchaseTEST_isPurchased = true # HACK
Hy.Config.Commerce.kPurchaseTEST_dontShowBuy = true # HACK
| 168022 | # ==================================================================================================================
Hy.Config =
AppId: "an id"
platformAndroid: false
Production: false
DisplayName: "<NAME> Pro"
kMaxRemotePlayers : 35
kMaxPlayerNameLength: 8
kHelpPage : "??"
PlayerStage:
kMaxNumPlayers : 36 # kMaxRemotePlayers + 1 (for Console Player)
kPadding : 5
Dynamics:
panicAnswerTime: 3
revealAnswerTime: 5
maxNumQuestions: 50 # Max number of questions that can be played at a time
Version:
copyright: "Copyright© 2017"
copyright1: "Copyright© 2017"
copyright2: ""
Console:
kConsoleMajorVersion : 1
kConsoleMinorVersion : 4
kConsoleMinor2Version : 0
kVersionMoniker : ""
Remote:
kMinRemoteMajorVersion : 1
kMinRemoteMinorVersion : 0
isiOS4Plus: ()->
result = false
# add iphone specific tests
if Ti.Platform.name is 'iPhone OS'
version = Ti.Platform.version.split "."
major = parseInt(version[0])
# can only test this support on a 3.2+ device
result = major >= 4
result
Commerce:
kReceiptDirectory : Ti.Filesystem.applicationDataDirectory + "/receipts"
kPurchaseLogFile : Ti.Filesystem.applicationDataDirectory + "/purchases.txt"
kReceiptTimeout : 60 * 1000
kPurchaseTimeout : 2 * 60 * 1000
kRestoreTimeout : 30 * 1000
kPurchaseTEST_isPurchased: false # Set to true to short-circuit "buy" options
kPurchaseTEST_dontShowBuy: false # Set to true to short-circuit "buy" options
StoreKit:
kUseSandbox : false
kVerifyReceipt: true
PlayerNetwork:
kSingleUserModeOverride: true # Set true to force app into single user mode
kMaxNumInitializationAttempts : 5
kTimeBetweenInitializationAttempts: 5 * 1000
# We present a fatal error if the player network hasnt hit the ready state in time
kServiceStartupTimeout: 20 * 1000
kHelpPage : "http:??"
kFirebaseRootURL: "??"
kFirebaseAppRootURL: "??"
registerAPI : "??"
ActivityMonitor:
kRemotePingInterval: 30 * 1000 # This is here just for reference. See main.coffee.
kCheckInterval : (60*1000) + 10 # How often we check the status of connections.
kThresholdActive : (60*1000) + 10 # A client is "active" if we hear from it at least this often.
# This is set to a value that's more than
# double the interval that clients are actually sending pings at, so that a client can
# miss a ping but still be counted as "active"
#
kThresholdAlive : 120*1000 + 10 # A client is dead if we don't hear from it within this timeframe.
# We set it to greater than 4 ping cycles.
#
NetworkService:
kQueueImmediateInterval : 1 * 1000
kQueueBackgroundInterval : 10 * 1000
kDefaultEventTimeout : 25 * 1000 # v1.0.7
Rendezvous:
URL : "??"
URLDisplayName : "??"
API : "??"
MinConsoleUpdateInterval : 5 * 60 * 1000 # 5 minutes
Update:
kUpdateBaseURL : "??"
# Changed protocol for naming the update manifest, as of 2.3:
# Now there's one manifest per shipped version of the app
#
kUpdateCheckInterval : 20*60*1000 # 20 minutes - changed for Pro v1.0.1
kRateAppReminderFileName : Titanium.Filesystem.applicationDataDirectory + "/AppReminderLog"
DownloadManager:
kCacheDirectoryPath : Ti.Filesystem.applicationDataDirectory + "/downloadCache"
kCacheDirectoryName : "Documents/downloadCache"
kMaxSimultaneousDownloads: 1
Trace:
messagesOn : false
memoryLogging : false
uiTrace : false
# HACK, as "applicationDirectory" seems to be returning a path with "Applications" at the end
LogFileDirectory : Titanium.Filesystem.applicationDataDirectory + "../tmp"
MarkerFilename: Titanium.Filesystem.applicationDataDirectory + "../tmp" + "/MARKER.txt"
Content:
kContentMajorVersionSupported : "001"
kUsageDatabaseName : "CrowdGame_Trivially_Usage_database"
kUsageDatabaseVersion : "001"
# This is the "documents" directory
kUpdateDirectory : Ti.Filesystem.applicationDataDirectory
kThirdPartyContentDirectory : Ti.Filesystem.applicationDataDirectory + "/third-party"
kShippedDirectory : Ti.Filesystem.resourcesDirectory + "/data"
kDefaultIconDirectory : Ti.Filesystem.resourcesDirectory + "/data"
kInventoryInterval : 60 * 1000
kInventoryTimeout : 30 * 1000
kContentPackMaxNameLength : 50
kContentPackMaxLongDescriptionLength : 175
kContentPackMaxIconSpecLength : 30
kContentPackMaxQuestionLength : 120
kContentPackMaxAnswerLength : 55
kContentPackMaxAuthorVersionInfoLength : 10
kContentPackMaxAuthorContactInfoLength : (64 + 1 + 255) #http://askville.amazon.com/maximum-length-allowed-email-address/AnswerViewer.do?requestId=1166932
kContentPackWithHeaderMaxNumHeaderProps : 150
kThirdPartyContentPackMinNumRecords : 5
kThirdPartyContentPackMaxNumRecords : 200
kAppStoreProductInfo_CustomTriviaPackFeature_1: "custom_trivia_pack_feature_1"
kHelpPage : "??"
kSamplesPage : "??"
kContentPackMaxBytes : -1
kThirdPartyContentPackMaxBytes : 1024 * 1024 # 1MB
kThirdPartyContentBuyText: "buy"
kThirdPartyContentNewText: "new"
kThirdPartyContentInfoText: "info"
kSampleCustomContestsDefault : [
{name: "Simple Gray Template", url: "a url here"},
{name: "Gamer Template", url: "another here"}
]
Analytics:
active : true
Namespace : "Hy.Analytics.TriviallyPro"
Version : "Pro-v1.1.0"
Google:
accountID : "your-id-here"
Support:
email : "??"
contactUs : "??"
UI:
kTouchAndHoldDuration: 900
kTouchAndHoldDurationStarting : 300
kTouchAndHoldDismissDuration: 2000 # Amount of time the menu stays up after touch event has fired
Hy.Config.Update.kUpdateFilename = "trivially-pro-update-manifest--v-#{Hy.Config.Version.Console.kConsoleMajorVersion}-#{Hy.Config.Version.Console.kConsoleMinorVersion}-#{Hy.Config.Version.Console.kConsoleMinor2Version}.json"
if not Hy.Config.Production
Hy.Config.Trace.messagesOn = true
Hy.Config.Trace.memoryLogging = true
Hy.Config.Trace.uiTrace = true
Hy.Config.Commerce.StoreKit.kUseSandbox = true
Hy.Config.Update.kUpdateCheckInterval = 1 * 60 * 1000
Hy.Config.PlayerNetwork.kServiceStartupTimeout = 60 * 1000
Hy.Config.PlayerNetwork.registerAPI = "a url"
Hy.Config.Commerce.kPurchaseTEST_isPurchased = true # HACK
Hy.Config.Commerce.kPurchaseTEST_dontShowBuy = true # HACK
| true | # ==================================================================================================================
Hy.Config =
AppId: "an id"
platformAndroid: false
Production: false
DisplayName: "PI:NAME:<NAME>END_PI Pro"
kMaxRemotePlayers : 35
kMaxPlayerNameLength: 8
kHelpPage : "??"
PlayerStage:
kMaxNumPlayers : 36 # kMaxRemotePlayers + 1 (for Console Player)
kPadding : 5
Dynamics:
panicAnswerTime: 3
revealAnswerTime: 5
maxNumQuestions: 50 # Max number of questions that can be played at a time
Version:
copyright: "Copyright© 2017"
copyright1: "Copyright© 2017"
copyright2: ""
Console:
kConsoleMajorVersion : 1
kConsoleMinorVersion : 4
kConsoleMinor2Version : 0
kVersionMoniker : ""
Remote:
kMinRemoteMajorVersion : 1
kMinRemoteMinorVersion : 0
isiOS4Plus: ()->
result = false
# add iphone specific tests
if Ti.Platform.name is 'iPhone OS'
version = Ti.Platform.version.split "."
major = parseInt(version[0])
# can only test this support on a 3.2+ device
result = major >= 4
result
Commerce:
kReceiptDirectory : Ti.Filesystem.applicationDataDirectory + "/receipts"
kPurchaseLogFile : Ti.Filesystem.applicationDataDirectory + "/purchases.txt"
kReceiptTimeout : 60 * 1000
kPurchaseTimeout : 2 * 60 * 1000
kRestoreTimeout : 30 * 1000
kPurchaseTEST_isPurchased: false # Set to true to short-circuit "buy" options
kPurchaseTEST_dontShowBuy: false # Set to true to short-circuit "buy" options
StoreKit:
kUseSandbox : false
kVerifyReceipt: true
PlayerNetwork:
kSingleUserModeOverride: true # Set true to force app into single user mode
kMaxNumInitializationAttempts : 5
kTimeBetweenInitializationAttempts: 5 * 1000
# We present a fatal error if the player network hasnt hit the ready state in time
kServiceStartupTimeout: 20 * 1000
kHelpPage : "http:??"
kFirebaseRootURL: "??"
kFirebaseAppRootURL: "??"
registerAPI : "??"
ActivityMonitor:
kRemotePingInterval: 30 * 1000 # This is here just for reference. See main.coffee.
kCheckInterval : (60*1000) + 10 # How often we check the status of connections.
kThresholdActive : (60*1000) + 10 # A client is "active" if we hear from it at least this often.
# This is set to a value that's more than
# double the interval that clients are actually sending pings at, so that a client can
# miss a ping but still be counted as "active"
#
kThresholdAlive : 120*1000 + 10 # A client is dead if we don't hear from it within this timeframe.
# We set it to greater than 4 ping cycles.
#
NetworkService:
kQueueImmediateInterval : 1 * 1000
kQueueBackgroundInterval : 10 * 1000
kDefaultEventTimeout : 25 * 1000 # v1.0.7
Rendezvous:
URL : "??"
URLDisplayName : "??"
API : "??"
MinConsoleUpdateInterval : 5 * 60 * 1000 # 5 minutes
Update:
kUpdateBaseURL : "??"
# Changed protocol for naming the update manifest, as of 2.3:
# Now there's one manifest per shipped version of the app
#
kUpdateCheckInterval : 20*60*1000 # 20 minutes - changed for Pro v1.0.1
kRateAppReminderFileName : Titanium.Filesystem.applicationDataDirectory + "/AppReminderLog"
DownloadManager:
kCacheDirectoryPath : Ti.Filesystem.applicationDataDirectory + "/downloadCache"
kCacheDirectoryName : "Documents/downloadCache"
kMaxSimultaneousDownloads: 1
Trace:
messagesOn : false
memoryLogging : false
uiTrace : false
# HACK, as "applicationDirectory" seems to be returning a path with "Applications" at the end
LogFileDirectory : Titanium.Filesystem.applicationDataDirectory + "../tmp"
MarkerFilename: Titanium.Filesystem.applicationDataDirectory + "../tmp" + "/MARKER.txt"
Content:
kContentMajorVersionSupported : "001"
kUsageDatabaseName : "CrowdGame_Trivially_Usage_database"
kUsageDatabaseVersion : "001"
# This is the "documents" directory
kUpdateDirectory : Ti.Filesystem.applicationDataDirectory
kThirdPartyContentDirectory : Ti.Filesystem.applicationDataDirectory + "/third-party"
kShippedDirectory : Ti.Filesystem.resourcesDirectory + "/data"
kDefaultIconDirectory : Ti.Filesystem.resourcesDirectory + "/data"
kInventoryInterval : 60 * 1000
kInventoryTimeout : 30 * 1000
kContentPackMaxNameLength : 50
kContentPackMaxLongDescriptionLength : 175
kContentPackMaxIconSpecLength : 30
kContentPackMaxQuestionLength : 120
kContentPackMaxAnswerLength : 55
kContentPackMaxAuthorVersionInfoLength : 10
kContentPackMaxAuthorContactInfoLength : (64 + 1 + 255) #http://askville.amazon.com/maximum-length-allowed-email-address/AnswerViewer.do?requestId=1166932
kContentPackWithHeaderMaxNumHeaderProps : 150
kThirdPartyContentPackMinNumRecords : 5
kThirdPartyContentPackMaxNumRecords : 200
kAppStoreProductInfo_CustomTriviaPackFeature_1: "custom_trivia_pack_feature_1"
kHelpPage : "??"
kSamplesPage : "??"
kContentPackMaxBytes : -1
kThirdPartyContentPackMaxBytes : 1024 * 1024 # 1MB
kThirdPartyContentBuyText: "buy"
kThirdPartyContentNewText: "new"
kThirdPartyContentInfoText: "info"
kSampleCustomContestsDefault : [
{name: "Simple Gray Template", url: "a url here"},
{name: "Gamer Template", url: "another here"}
]
Analytics:
active : true
Namespace : "Hy.Analytics.TriviallyPro"
Version : "Pro-v1.1.0"
Google:
accountID : "your-id-here"
Support:
email : "??"
contactUs : "??"
UI:
kTouchAndHoldDuration: 900
kTouchAndHoldDurationStarting : 300
kTouchAndHoldDismissDuration: 2000 # Amount of time the menu stays up after touch event has fired
Hy.Config.Update.kUpdateFilename = "trivially-pro-update-manifest--v-#{Hy.Config.Version.Console.kConsoleMajorVersion}-#{Hy.Config.Version.Console.kConsoleMinorVersion}-#{Hy.Config.Version.Console.kConsoleMinor2Version}.json"
if not Hy.Config.Production
Hy.Config.Trace.messagesOn = true
Hy.Config.Trace.memoryLogging = true
Hy.Config.Trace.uiTrace = true
Hy.Config.Commerce.StoreKit.kUseSandbox = true
Hy.Config.Update.kUpdateCheckInterval = 1 * 60 * 1000
Hy.Config.PlayerNetwork.kServiceStartupTimeout = 60 * 1000
Hy.Config.PlayerNetwork.registerAPI = "a url"
Hy.Config.Commerce.kPurchaseTEST_isPurchased = true # HACK
Hy.Config.Commerce.kPurchaseTEST_dontShowBuy = true # HACK
|
[
{
"context": "\\[\\]/\"'\\*=~\\-\\u2013\\u2014])|$)///gi\n\t,\n\t\tosis: [\"1John\"]\n\t\tregexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-",
"end": 12485,
"score": 0.5226914882659912,
"start": 12481,
"tag": "NAME",
"value": "John"
},
{
"context": "\\[\\]/\"'\\*=~\\-\\u2013\\u2014])|$)//... | lib/bible-tools/lib/Bible-Passage-Reference-Parser/src/af/regexps.coffee | saiba-mais/bible-lessons | 0 | bcv_parser::regexps.space = "[\\s\\xa0]"
bcv_parser::regexps.escaped_passage = ///
(?:^ | [^\x1f\x1e\dA-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ] ) # Beginning of string or not in the middle of a word or immediately following another book. Only count a book if it's part of a sequence: `Matt5John3` is OK, but not `1Matt5John3`
(
# Start inverted book/chapter (cb)
(?:
(?: ch (?: apters? | a?pts?\.? | a?p?s?\.? )? \s*
\d+ \s* (?: [\u2013\u2014\-] | through | thru | to) \s* \d+ \s*
(?: from | of | in ) (?: \s+ the \s+ book \s+ of )?\s* )
| (?: ch (?: apters? | a?pts?\.? | a?p?s?\.? )? \s*
\d+ \s*
(?: from | of | in ) (?: \s+ the \s+ book \s+ of )?\s* )
| (?: \d+ (?: th | nd | st ) \s*
ch (?: apter | a?pt\.? | a?p?\.? )? \s* #no plurals here since it's a single chapter
(?: from | of | in ) (?: \s+ the \s+ book \s+ of )? \s* )
)? # End inverted book/chapter (cb)
\x1f(\d+)(?:/\d+)?\x1f #book
(?:
/\d+\x1f #special Psalm chapters
| [\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014]
| title (?! [a-z] ) #could be followed by a number
| Bybelvers | hoofstuk | asook | hfst | vers | tot | ff | en
| [a-e] (?! \w ) #a-e allows 1:1a
| $ #or the end of the string
)+
)
///gi
# These are the only valid ways to end a potential passage match. The closing parenthesis allows for fully capturing parentheses surrounding translations (ESV**)**. The last one, `[\d\x1f]` needs not to be +; otherwise `Gen5ff` becomes `\x1f0\x1f5ff`, and `adjust_regexp_end` matches the `\x1f5` and incorrectly dangles the ff.
bcv_parser::regexps.match_end_split = ///
\d \W* title
| \d \W* ff (?: [\s\xa0*]* \.)?
| \d [\s\xa0*]* [a-e] (?! \w )
| \x1e (?: [\s\xa0*]* [)\]\uff09] )? #ff09 is a full-width closing parenthesis
| [\d\x1f]
///gi
bcv_parser::regexps.control = /[\x1e\x1f]/g
bcv_parser::regexps.pre_book = "[^A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ]"
bcv_parser::regexps.first = "(?:1|I)\\.?#{bcv_parser::regexps.space}*"
bcv_parser::regexps.second = "(?:2|II)\\.?#{bcv_parser::regexps.space}*"
bcv_parser::regexps.third = "(?:3|III)\\.?#{bcv_parser::regexps.space}*"
bcv_parser::regexps.range_and = "(?:[&\u2013\u2014-]|(?:en|asook)|tot)"
bcv_parser::regexps.range_only = "(?:[\u2013\u2014-]|tot)"
# Each book regexp should return two parenthesized objects: an optional preliminary character and the book itself.
bcv_parser::regexps.get_books = (include_apocrypha, case_sensitive) ->
books = [
osis: ["Ps"]
apocrypha: true
extra: "2"
regexp: ///(\b)( # Don't match a preceding \d like usual because we only want to match a valid OSIS, which will never have a preceding digit.
Ps151
# Always follwed by ".1"; the regular Psalms parser can handle `Ps151` on its own.
)(?=\.1)///g # Case-sensitive because we only want to match a valid OSIS.
,
osis: ["Gen"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Gen(?:esis)?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Exod"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:E(?:ks(?:odus)?|xod(?:us)?))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Bel"]
apocrypha: true
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Bel)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Lev"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Lev(?:[i\xED]tikus)?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Num"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:N(?:\xFAmeri|um(?:eri)?))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Sir"]
apocrypha: true
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Sir)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Wis"]
apocrypha: true
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Wis)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Lam"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Klaagl(?:iedere)?|Lam)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["EpJer"]
apocrypha: true
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:EpJer)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Rev"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Op(?:enbaring)?|Rev)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["PrMan"]
apocrypha: true
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:PrMan)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Deut"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Deut(?:eron[o\xF3]mium)?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Josh"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Jos(?:ua|h)?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Judg"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Rig(?:ters)?|Judg)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Ruth"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Ruth?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["1Esd"]
apocrypha: true
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:1Esd)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["2Esd"]
apocrypha: true
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:2Esd)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Isa"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Jes(?:aja)?|Isa)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["2Sam"]
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:2(?:[\s\xa0]*Sam(?:uel)?|Sam))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["1Sam"]
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:1(?:[\s\xa0]*Sam(?:uel)?|Sam))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["2Kgs"]
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:2(?:[\s\xa0]*Kon(?:ings)?|Kgs))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["1Kgs"]
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:1(?:[\s\xa0]*Kon(?:ings)?|Kgs))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["2Chr"]
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:2(?:[\s\xa0]*Kron(?:ieke)?|Chr))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["1Chr"]
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:1(?:[\s\xa0]*Kron(?:ieke)?|Chr))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Ezra"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:E[sz]ra)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Neh"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Neh(?:emia)?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["GkEsth"]
apocrypha: true
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:GkEsth)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Esth"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Est(?:er|h)?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Job"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Job)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Ps"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Ps(?:alms?)?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["PrAzar"]
apocrypha: true
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:PrAzar)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Prov"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Spr(?:euke)?|Prov)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Eccl"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Pred(?:iker)?|Eccl)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["SgThree"]
apocrypha: true
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:SgThree)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Song"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Hoogl(?:ied)?|Song)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Jer"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Jer(?:emia)?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Ezek"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:E(?:seg(?:i[e\xEB]l)?|zek))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Dan"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Dan(?:i[e\xEB]l)?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Hos"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Hos(?:ea)?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Joel"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Jo[e\xEB]l)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Amos"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Am(?:os)?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Obad"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Ob(?:ad(?:ja)?)?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Jonah"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Jonah?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Mic"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Mi(?:ga|c))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Nah"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Nah(?:um)?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Hab"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Hab(?:akuk)?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Zeph"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Sef(?:anja)?|Zeph)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Hag"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Hag(?:gai)?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Zech"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Sag(?:aria)?|Zech)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Mal"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Mal(?:eagi)?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Matt"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Matt(?:h[e\xE9][u\xFC]s|eus)?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Mark"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Mark(?:us)?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Luke"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Luk(?:as|e)?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["1John"]
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:1John)|(?:[1I](?:\.[\s\xa0]*Joh(?:annes)?|[\s\xa0]*Joh(?:annes)?))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["2John"]
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:2John)|(?:II(?:\.[\s\xa0]*Joh(?:annes)?|[\s\xa0]*Joh(?:annes)?)|2(?:\.[\s\xa0]*Joh(?:annes)?|[\s\xa0]*Joh(?:annes)?))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["3John"]
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:3John)|(?:III(?:\.[\s\xa0]*Joh(?:annes)?|[\s\xa0]*Joh(?:annes)?)|3(?:\.[\s\xa0]*Joh(?:annes)?|[\s\xa0]*Joh(?:annes)?))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["John"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Joh(?:annes|n)?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Acts"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Hand(?:elinge)?|Acts)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Rom"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Rom(?:eine)?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["2Cor"]
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:II(?:\.[\s\xa0]*Kor(?:int(?:hi[e\xEB]|i[e\xEB])rs)?|[\s\xa0]*Kor(?:int(?:hi[e\xEB]|i[e\xEB])rs)?)|2(?:\.[\s\xa0]*Kor(?:int(?:hi[e\xEB]|i[e\xEB])rs)?|[\s\xa0]*Kor(?:int(?:hi[e\xEB]|i[e\xEB])rs)?|Cor))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["1Cor"]
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:I(?:\.[\s\xa0]*Kor(?:int(?:hi[e\xEB]|i[e\xEB])rs)?|[\s\xa0]*Kor(?:int(?:hi[e\xEB]|i[e\xEB])rs)?)|1(?:\.[\s\xa0]*Kor(?:int(?:hi[e\xEB]|i[e\xEB])rs)?|[\s\xa0]*Kor(?:int(?:hi[e\xEB]|i[e\xEB])rs)?|Cor))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Gal"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Gal(?:[a\xE1]si[e\xEB]rs)?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Eph"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:E(?:f(?:[e\xE9]si[e\xEB]rs)?|ph))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Phil"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Fil(?:ippense)?|Phil)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Col"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Kol(?:ossense)?|Col)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["2Thess"]
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:II(?:\.[\s\xa0]*T(?:ess(?:a(?:lonis|onic)ense)?|hess)|[\s\xa0]*T(?:ess(?:a(?:lonis|onic)ense)?|hess))|2(?:\.[\s\xa0]*T(?:ess(?:a(?:lonis|onic)ense)?|hess)|[\s\xa0]*T(?:ess(?:a(?:lonis|onic)ense)?|hess)|Thess))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["1Thess"]
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:I(?:\.[\s\xa0]*T(?:ess(?:a(?:lonis|onic)ense)?|hess)|[\s\xa0]*T(?:ess(?:a(?:lonis|onic)ense)?|hess))|1(?:\.[\s\xa0]*T(?:ess(?:a(?:lonis|onic)ense)?|hess)|[\s\xa0]*T(?:ess(?:a(?:lonis|onic)ense)?|hess)|Thess))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["2Tim"]
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:II(?:\.[\s\xa0]*Tim(?:oteus)?|[\s\xa0]*Tim(?:oteus)?)|2(?:\.[\s\xa0]*Tim(?:oteus)?|[\s\xa0]*Tim(?:oteus)?|Tim))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["1Tim"]
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:I(?:\.[\s\xa0]*Tim(?:oteus)?|[\s\xa0]*Tim(?:oteus)?)|1(?:\.[\s\xa0]*Tim(?:oteus)?|[\s\xa0]*Tim(?:oteus)?|Tim))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Titus"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Tit(?:us)?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Phlm"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Filem(?:on)?|Phlm)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Heb"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Heb(?:re[e\xEB]rs)?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Jas"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Ja(?:k(?:obus)?|s))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["2Pet"]
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:II(?:\.[\s\xa0]*Pet(?:rus)?|[\s\xa0]*Pet(?:rus)?)|2(?:\.[\s\xa0]*Pet(?:rus)?|[\s\xa0]*Pet(?:rus)?|Pet))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["1Pet"]
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:I(?:\.[\s\xa0]*Pet(?:rus)?|[\s\xa0]*Pet(?:rus)?)|1(?:\.[\s\xa0]*Pet(?:rus)?|[\s\xa0]*Pet(?:rus)?|Pet))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Jude"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Jud(?:as|e)?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Tob"]
apocrypha: true
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Tob)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Jdt"]
apocrypha: true
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Jdt)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Bar"]
apocrypha: true
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Bar)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Sus"]
apocrypha: true
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Sus)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["2Macc"]
apocrypha: true
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:2Macc)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["3Macc"]
apocrypha: true
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:3Macc)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["4Macc"]
apocrypha: true
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:4Macc)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["1Macc"]
apocrypha: true
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:1Macc)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
]
# Short-circuit the look if we know we want all the books.
return books if include_apocrypha is true and case_sensitive is "none"
# Filter out books in the Apocrypha if we don't want them. `Array.map` isn't supported below IE9.
out = []
for book in books
continue if include_apocrypha is false and book.apocrypha? and book.apocrypha is true
if case_sensitive is "books"
book.regexp = new RegExp book.regexp.source, "g"
out.push book
out
# Default to not using the Apocrypha
bcv_parser::regexps.books = bcv_parser::regexps.get_books false, "none"
| 140342 | bcv_parser::regexps.space = "[\\s\\xa0]"
bcv_parser::regexps.escaped_passage = ///
(?:^ | [^\x1f\x1e\dA-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ] ) # Beginning of string or not in the middle of a word or immediately following another book. Only count a book if it's part of a sequence: `Matt5John3` is OK, but not `1Matt5John3`
(
# Start inverted book/chapter (cb)
(?:
(?: ch (?: apters? | a?pts?\.? | a?p?s?\.? )? \s*
\d+ \s* (?: [\u2013\u2014\-] | through | thru | to) \s* \d+ \s*
(?: from | of | in ) (?: \s+ the \s+ book \s+ of )?\s* )
| (?: ch (?: apters? | a?pts?\.? | a?p?s?\.? )? \s*
\d+ \s*
(?: from | of | in ) (?: \s+ the \s+ book \s+ of )?\s* )
| (?: \d+ (?: th | nd | st ) \s*
ch (?: apter | a?pt\.? | a?p?\.? )? \s* #no plurals here since it's a single chapter
(?: from | of | in ) (?: \s+ the \s+ book \s+ of )? \s* )
)? # End inverted book/chapter (cb)
\x1f(\d+)(?:/\d+)?\x1f #book
(?:
/\d+\x1f #special Psalm chapters
| [\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014]
| title (?! [a-z] ) #could be followed by a number
| Bybelvers | hoofstuk | asook | hfst | vers | tot | ff | en
| [a-e] (?! \w ) #a-e allows 1:1a
| $ #or the end of the string
)+
)
///gi
# These are the only valid ways to end a potential passage match. The closing parenthesis allows for fully capturing parentheses surrounding translations (ESV**)**. The last one, `[\d\x1f]` needs not to be +; otherwise `Gen5ff` becomes `\x1f0\x1f5ff`, and `adjust_regexp_end` matches the `\x1f5` and incorrectly dangles the ff.
bcv_parser::regexps.match_end_split = ///
\d \W* title
| \d \W* ff (?: [\s\xa0*]* \.)?
| \d [\s\xa0*]* [a-e] (?! \w )
| \x1e (?: [\s\xa0*]* [)\]\uff09] )? #ff09 is a full-width closing parenthesis
| [\d\x1f]
///gi
bcv_parser::regexps.control = /[\x1e\x1f]/g
bcv_parser::regexps.pre_book = "[^A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ]"
bcv_parser::regexps.first = "(?:1|I)\\.?#{bcv_parser::regexps.space}*"
bcv_parser::regexps.second = "(?:2|II)\\.?#{bcv_parser::regexps.space}*"
bcv_parser::regexps.third = "(?:3|III)\\.?#{bcv_parser::regexps.space}*"
bcv_parser::regexps.range_and = "(?:[&\u2013\u2014-]|(?:en|asook)|tot)"
bcv_parser::regexps.range_only = "(?:[\u2013\u2014-]|tot)"
# Each book regexp should return two parenthesized objects: an optional preliminary character and the book itself.
bcv_parser::regexps.get_books = (include_apocrypha, case_sensitive) ->
books = [
osis: ["Ps"]
apocrypha: true
extra: "2"
regexp: ///(\b)( # Don't match a preceding \d like usual because we only want to match a valid OSIS, which will never have a preceding digit.
Ps151
# Always follwed by ".1"; the regular Psalms parser can handle `Ps151` on its own.
)(?=\.1)///g # Case-sensitive because we only want to match a valid OSIS.
,
osis: ["Gen"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Gen(?:esis)?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Exod"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:E(?:ks(?:odus)?|xod(?:us)?))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Bel"]
apocrypha: true
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Bel)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Lev"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Lev(?:[i\xED]tikus)?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Num"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:N(?:\xFAmeri|um(?:eri)?))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Sir"]
apocrypha: true
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Sir)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Wis"]
apocrypha: true
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Wis)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Lam"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Klaagl(?:iedere)?|Lam)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["EpJer"]
apocrypha: true
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:EpJer)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Rev"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Op(?:enbaring)?|Rev)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["PrMan"]
apocrypha: true
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:PrMan)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Deut"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Deut(?:eron[o\xF3]mium)?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Josh"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Jos(?:ua|h)?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Judg"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Rig(?:ters)?|Judg)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Ruth"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Ruth?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["1Esd"]
apocrypha: true
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:1Esd)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["2Esd"]
apocrypha: true
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:2Esd)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Isa"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Jes(?:aja)?|Isa)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["2Sam"]
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:2(?:[\s\xa0]*Sam(?:uel)?|Sam))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["1Sam"]
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:1(?:[\s\xa0]*Sam(?:uel)?|Sam))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["2Kgs"]
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:2(?:[\s\xa0]*Kon(?:ings)?|Kgs))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["1Kgs"]
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:1(?:[\s\xa0]*Kon(?:ings)?|Kgs))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["2Chr"]
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:2(?:[\s\xa0]*Kron(?:ieke)?|Chr))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["1Chr"]
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:1(?:[\s\xa0]*Kron(?:ieke)?|Chr))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Ezra"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:E[sz]ra)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Neh"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Neh(?:emia)?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["GkEsth"]
apocrypha: true
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:GkEsth)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Esth"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Est(?:er|h)?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Job"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Job)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Ps"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Ps(?:alms?)?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["PrAzar"]
apocrypha: true
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:PrAzar)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Prov"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Spr(?:euke)?|Prov)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Eccl"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Pred(?:iker)?|Eccl)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["SgThree"]
apocrypha: true
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:SgThree)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Song"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Hoogl(?:ied)?|Song)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Jer"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Jer(?:emia)?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Ezek"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:E(?:seg(?:i[e\xEB]l)?|zek))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Dan"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Dan(?:i[e\xEB]l)?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Hos"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Hos(?:ea)?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Joel"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Jo[e\xEB]l)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Amos"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Am(?:os)?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Obad"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Ob(?:ad(?:ja)?)?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Jonah"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Jonah?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Mic"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Mi(?:ga|c))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Nah"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Nah(?:um)?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Hab"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Hab(?:akuk)?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Zeph"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Sef(?:anja)?|Zeph)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Hag"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Hag(?:gai)?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Zech"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Sag(?:aria)?|Zech)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Mal"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Mal(?:eagi)?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Matt"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Matt(?:h[e\xE9][u\xFC]s|eus)?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Mark"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Mark(?:us)?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Luke"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Luk(?:as|e)?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["1<NAME>"]
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:1John)|(?:[1I](?:\.[\s\xa0]*Joh(?:annes)?|[\s\xa0]*Joh(?:annes)?))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["2<NAME>"]
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:2John)|(?:II(?:\.[\s\xa0]*Joh(?:annes)?|[\s\xa0]*Joh(?:annes)?)|2(?:\.[\s\xa0]*Joh(?:annes)?|[\s\xa0]*Joh(?:annes)?))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["3<NAME>"]
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:3<NAME>)|(?:III(?:\.[\s\xa0]*Joh(?:annes)?|[\s\xa0]*Joh(?:annes)?)|3(?:\.[\s\xa0]*Joh(?:annes)?|[\s\xa0]*Joh(?:annes)?))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["John"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Joh(?:annes|n)?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Acts"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Hand(?:elinge)?|Acts)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Rom"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Rom(?:eine)?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["2Cor"]
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:II(?:\.[\s\xa0]*Kor(?:int(?:hi[e\xEB]|i[e\xEB])rs)?|[\s\xa0]*Kor(?:int(?:hi[e\xEB]|i[e\xEB])rs)?)|2(?:\.[\s\xa0]*Kor(?:int(?:hi[e\xEB]|i[e\xEB])rs)?|[\s\xa0]*Kor(?:int(?:hi[e\xEB]|i[e\xEB])rs)?|Cor))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["1Cor"]
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:I(?:\.[\s\xa0]*Kor(?:int(?:hi[e\xEB]|i[e\xEB])rs)?|[\s\xa0]*Kor(?:int(?:hi[e\xEB]|i[e\xEB])rs)?)|1(?:\.[\s\xa0]*Kor(?:int(?:hi[e\xEB]|i[e\xEB])rs)?|[\s\xa0]*Kor(?:int(?:hi[e\xEB]|i[e\xEB])rs)?|Cor))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Gal"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Gal(?:[a\xE1]si[e\xEB]rs)?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Eph"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:E(?:f(?:[e\xE9]si[e\xEB]rs)?|ph))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Phil"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Fil(?:ippense)?|Phil)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Col"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Kol(?:ossense)?|Col)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["2Thess"]
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:II(?:\.[\s\xa0]*T(?:ess(?:a(?:lonis|onic)ense)?|hess)|[\s\xa0]*T(?:ess(?:a(?:lonis|onic)ense)?|hess))|2(?:\.[\s\xa0]*T(?:ess(?:a(?:lonis|onic)ense)?|hess)|[\s\xa0]*T(?:ess(?:a(?:lonis|onic)ense)?|hess)|Thess))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["1Thess"]
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:I(?:\.[\s\xa0]*T(?:ess(?:a(?:lonis|onic)ense)?|hess)|[\s\xa0]*T(?:ess(?:a(?:lonis|onic)ense)?|hess))|1(?:\.[\s\xa0]*T(?:ess(?:a(?:lonis|onic)ense)?|hess)|[\s\xa0]*T(?:ess(?:a(?:lonis|onic)ense)?|hess)|Thess))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["2Tim"]
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:II(?:\.[\s\xa0]*Tim(?:oteus)?|[\s\xa0]*Tim(?:oteus)?)|2(?:\.[\s\xa0]*Tim(?:oteus)?|[\s\xa0]*Tim(?:oteus)?|Tim))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["1Tim"]
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:I(?:\.[\s\xa0]*Tim(?:oteus)?|[\s\xa0]*Tim(?:oteus)?)|1(?:\.[\s\xa0]*Tim(?:oteus)?|[\s\xa0]*Tim(?:oteus)?|Tim))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Titus"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Tit(?:us)?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Phlm"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Filem(?:on)?|Phlm)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Heb"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Heb(?:re[e\xEB]rs)?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Jas"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Ja(?:k(?:obus)?|s))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["2Pet"]
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:II(?:\.[\s\xa0]*Pet(?:rus)?|[\s\xa0]*Pet(?:rus)?)|2(?:\.[\s\xa0]*Pet(?:rus)?|[\s\xa0]*Pet(?:rus)?|Pet))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["1Pet"]
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:I(?:\.[\s\xa0]*Pet(?:rus)?|[\s\xa0]*Pet(?:rus)?)|1(?:\.[\s\xa0]*Pet(?:rus)?|[\s\xa0]*Pet(?:rus)?|Pet))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Jude"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Jud(?:as|e)?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Tob"]
apocrypha: true
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Tob)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Jdt"]
apocrypha: true
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Jdt)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Bar"]
apocrypha: true
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Bar)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Sus"]
apocrypha: true
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Sus)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["2Macc"]
apocrypha: true
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:2Macc)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["3Macc"]
apocrypha: true
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:3Macc)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["4Macc"]
apocrypha: true
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:4Macc)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["1Macc"]
apocrypha: true
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:1Macc)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
]
# Short-circuit the look if we know we want all the books.
return books if include_apocrypha is true and case_sensitive is "none"
# Filter out books in the Apocrypha if we don't want them. `Array.map` isn't supported below IE9.
out = []
for book in books
continue if include_apocrypha is false and book.apocrypha? and book.apocrypha is true
if case_sensitive is "books"
book.regexp = new RegExp book.regexp.source, "g"
out.push book
out
# Default to not using the Apocrypha
bcv_parser::regexps.books = bcv_parser::regexps.get_books false, "none"
| true | bcv_parser::regexps.space = "[\\s\\xa0]"
bcv_parser::regexps.escaped_passage = ///
(?:^ | [^\x1f\x1e\dA-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ] ) # Beginning of string or not in the middle of a word or immediately following another book. Only count a book if it's part of a sequence: `Matt5John3` is OK, but not `1Matt5John3`
(
# Start inverted book/chapter (cb)
(?:
(?: ch (?: apters? | a?pts?\.? | a?p?s?\.? )? \s*
\d+ \s* (?: [\u2013\u2014\-] | through | thru | to) \s* \d+ \s*
(?: from | of | in ) (?: \s+ the \s+ book \s+ of )?\s* )
| (?: ch (?: apters? | a?pts?\.? | a?p?s?\.? )? \s*
\d+ \s*
(?: from | of | in ) (?: \s+ the \s+ book \s+ of )?\s* )
| (?: \d+ (?: th | nd | st ) \s*
ch (?: apter | a?pt\.? | a?p?\.? )? \s* #no plurals here since it's a single chapter
(?: from | of | in ) (?: \s+ the \s+ book \s+ of )? \s* )
)? # End inverted book/chapter (cb)
\x1f(\d+)(?:/\d+)?\x1f #book
(?:
/\d+\x1f #special Psalm chapters
| [\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014]
| title (?! [a-z] ) #could be followed by a number
| Bybelvers | hoofstuk | asook | hfst | vers | tot | ff | en
| [a-e] (?! \w ) #a-e allows 1:1a
| $ #or the end of the string
)+
)
///gi
# These are the only valid ways to end a potential passage match. The closing parenthesis allows for fully capturing parentheses surrounding translations (ESV**)**. The last one, `[\d\x1f]` needs not to be +; otherwise `Gen5ff` becomes `\x1f0\x1f5ff`, and `adjust_regexp_end` matches the `\x1f5` and incorrectly dangles the ff.
bcv_parser::regexps.match_end_split = ///
\d \W* title
| \d \W* ff (?: [\s\xa0*]* \.)?
| \d [\s\xa0*]* [a-e] (?! \w )
| \x1e (?: [\s\xa0*]* [)\]\uff09] )? #ff09 is a full-width closing parenthesis
| [\d\x1f]
///gi
bcv_parser::regexps.control = /[\x1e\x1f]/g
bcv_parser::regexps.pre_book = "[^A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ]"
bcv_parser::regexps.first = "(?:1|I)\\.?#{bcv_parser::regexps.space}*"
bcv_parser::regexps.second = "(?:2|II)\\.?#{bcv_parser::regexps.space}*"
bcv_parser::regexps.third = "(?:3|III)\\.?#{bcv_parser::regexps.space}*"
bcv_parser::regexps.range_and = "(?:[&\u2013\u2014-]|(?:en|asook)|tot)"
bcv_parser::regexps.range_only = "(?:[\u2013\u2014-]|tot)"
# Each book regexp should return two parenthesized objects: an optional preliminary character and the book itself.
bcv_parser::regexps.get_books = (include_apocrypha, case_sensitive) ->
books = [
osis: ["Ps"]
apocrypha: true
extra: "2"
regexp: ///(\b)( # Don't match a preceding \d like usual because we only want to match a valid OSIS, which will never have a preceding digit.
Ps151
# Always follwed by ".1"; the regular Psalms parser can handle `Ps151` on its own.
)(?=\.1)///g # Case-sensitive because we only want to match a valid OSIS.
,
osis: ["Gen"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Gen(?:esis)?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Exod"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:E(?:ks(?:odus)?|xod(?:us)?))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Bel"]
apocrypha: true
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Bel)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Lev"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Lev(?:[i\xED]tikus)?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Num"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:N(?:\xFAmeri|um(?:eri)?))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Sir"]
apocrypha: true
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Sir)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Wis"]
apocrypha: true
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Wis)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Lam"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Klaagl(?:iedere)?|Lam)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["EpJer"]
apocrypha: true
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:EpJer)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Rev"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Op(?:enbaring)?|Rev)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["PrMan"]
apocrypha: true
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:PrMan)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Deut"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Deut(?:eron[o\xF3]mium)?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Josh"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Jos(?:ua|h)?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Judg"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Rig(?:ters)?|Judg)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Ruth"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Ruth?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["1Esd"]
apocrypha: true
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:1Esd)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["2Esd"]
apocrypha: true
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:2Esd)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Isa"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Jes(?:aja)?|Isa)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["2Sam"]
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:2(?:[\s\xa0]*Sam(?:uel)?|Sam))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["1Sam"]
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:1(?:[\s\xa0]*Sam(?:uel)?|Sam))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["2Kgs"]
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:2(?:[\s\xa0]*Kon(?:ings)?|Kgs))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["1Kgs"]
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:1(?:[\s\xa0]*Kon(?:ings)?|Kgs))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["2Chr"]
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:2(?:[\s\xa0]*Kron(?:ieke)?|Chr))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["1Chr"]
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:1(?:[\s\xa0]*Kron(?:ieke)?|Chr))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Ezra"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:E[sz]ra)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Neh"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Neh(?:emia)?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["GkEsth"]
apocrypha: true
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:GkEsth)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Esth"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Est(?:er|h)?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Job"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Job)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Ps"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Ps(?:alms?)?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["PrAzar"]
apocrypha: true
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:PrAzar)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Prov"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Spr(?:euke)?|Prov)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Eccl"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Pred(?:iker)?|Eccl)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["SgThree"]
apocrypha: true
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:SgThree)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Song"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Hoogl(?:ied)?|Song)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Jer"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Jer(?:emia)?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Ezek"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:E(?:seg(?:i[e\xEB]l)?|zek))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Dan"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Dan(?:i[e\xEB]l)?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Hos"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Hos(?:ea)?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Joel"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Jo[e\xEB]l)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Amos"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Am(?:os)?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Obad"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Ob(?:ad(?:ja)?)?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Jonah"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Jonah?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Mic"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Mi(?:ga|c))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Nah"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Nah(?:um)?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Hab"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Hab(?:akuk)?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Zeph"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Sef(?:anja)?|Zeph)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Hag"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Hag(?:gai)?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Zech"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Sag(?:aria)?|Zech)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Mal"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Mal(?:eagi)?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Matt"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Matt(?:h[e\xE9][u\xFC]s|eus)?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Mark"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Mark(?:us)?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Luke"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Luk(?:as|e)?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["1PI:NAME:<NAME>END_PI"]
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:1John)|(?:[1I](?:\.[\s\xa0]*Joh(?:annes)?|[\s\xa0]*Joh(?:annes)?))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["2PI:NAME:<NAME>END_PI"]
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:2John)|(?:II(?:\.[\s\xa0]*Joh(?:annes)?|[\s\xa0]*Joh(?:annes)?)|2(?:\.[\s\xa0]*Joh(?:annes)?|[\s\xa0]*Joh(?:annes)?))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["3PI:NAME:<NAME>END_PI"]
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:3PI:NAME:<NAME>END_PI)|(?:III(?:\.[\s\xa0]*Joh(?:annes)?|[\s\xa0]*Joh(?:annes)?)|3(?:\.[\s\xa0]*Joh(?:annes)?|[\s\xa0]*Joh(?:annes)?))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["John"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Joh(?:annes|n)?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Acts"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Hand(?:elinge)?|Acts)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Rom"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Rom(?:eine)?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["2Cor"]
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:II(?:\.[\s\xa0]*Kor(?:int(?:hi[e\xEB]|i[e\xEB])rs)?|[\s\xa0]*Kor(?:int(?:hi[e\xEB]|i[e\xEB])rs)?)|2(?:\.[\s\xa0]*Kor(?:int(?:hi[e\xEB]|i[e\xEB])rs)?|[\s\xa0]*Kor(?:int(?:hi[e\xEB]|i[e\xEB])rs)?|Cor))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["1Cor"]
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:I(?:\.[\s\xa0]*Kor(?:int(?:hi[e\xEB]|i[e\xEB])rs)?|[\s\xa0]*Kor(?:int(?:hi[e\xEB]|i[e\xEB])rs)?)|1(?:\.[\s\xa0]*Kor(?:int(?:hi[e\xEB]|i[e\xEB])rs)?|[\s\xa0]*Kor(?:int(?:hi[e\xEB]|i[e\xEB])rs)?|Cor))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Gal"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Gal(?:[a\xE1]si[e\xEB]rs)?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Eph"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:E(?:f(?:[e\xE9]si[e\xEB]rs)?|ph))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Phil"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Fil(?:ippense)?|Phil)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Col"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Kol(?:ossense)?|Col)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["2Thess"]
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:II(?:\.[\s\xa0]*T(?:ess(?:a(?:lonis|onic)ense)?|hess)|[\s\xa0]*T(?:ess(?:a(?:lonis|onic)ense)?|hess))|2(?:\.[\s\xa0]*T(?:ess(?:a(?:lonis|onic)ense)?|hess)|[\s\xa0]*T(?:ess(?:a(?:lonis|onic)ense)?|hess)|Thess))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["1Thess"]
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:I(?:\.[\s\xa0]*T(?:ess(?:a(?:lonis|onic)ense)?|hess)|[\s\xa0]*T(?:ess(?:a(?:lonis|onic)ense)?|hess))|1(?:\.[\s\xa0]*T(?:ess(?:a(?:lonis|onic)ense)?|hess)|[\s\xa0]*T(?:ess(?:a(?:lonis|onic)ense)?|hess)|Thess))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["2Tim"]
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:II(?:\.[\s\xa0]*Tim(?:oteus)?|[\s\xa0]*Tim(?:oteus)?)|2(?:\.[\s\xa0]*Tim(?:oteus)?|[\s\xa0]*Tim(?:oteus)?|Tim))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["1Tim"]
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:I(?:\.[\s\xa0]*Tim(?:oteus)?|[\s\xa0]*Tim(?:oteus)?)|1(?:\.[\s\xa0]*Tim(?:oteus)?|[\s\xa0]*Tim(?:oteus)?|Tim))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Titus"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Tit(?:us)?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Phlm"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Filem(?:on)?|Phlm)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Heb"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Heb(?:re[e\xEB]rs)?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Jas"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Ja(?:k(?:obus)?|s))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["2Pet"]
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:II(?:\.[\s\xa0]*Pet(?:rus)?|[\s\xa0]*Pet(?:rus)?)|2(?:\.[\s\xa0]*Pet(?:rus)?|[\s\xa0]*Pet(?:rus)?|Pet))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["1Pet"]
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:I(?:\.[\s\xa0]*Pet(?:rus)?|[\s\xa0]*Pet(?:rus)?)|1(?:\.[\s\xa0]*Pet(?:rus)?|[\s\xa0]*Pet(?:rus)?|Pet))
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Jude"]
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Jud(?:as|e)?)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Tob"]
apocrypha: true
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Tob)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Jdt"]
apocrypha: true
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Jdt)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Bar"]
apocrypha: true
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Bar)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["Sus"]
apocrypha: true
regexp: ///(^|#{bcv_parser::regexps.pre_book})(
(?:Sus)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["2Macc"]
apocrypha: true
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:2Macc)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["3Macc"]
apocrypha: true
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:3Macc)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["4Macc"]
apocrypha: true
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:4Macc)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
,
osis: ["1Macc"]
apocrypha: true
regexp: ///(^|[^0-9A-Za-zªµºÀ-ÖØ-öø-ɏḀ-ỿⱠ-ⱿꜢ-ꞈꞋ-ꞎꞐ-ꞓꞠ-Ɦꟸ-ꟿ])(
(?:1Macc)
)(?:(?=[\d\s\xa0.:,;\x1e\x1f&\(\)\uff08\uff09\[\]/"'\*=~\-\u2013\u2014])|$)///gi
]
# Short-circuit the look if we know we want all the books.
return books if include_apocrypha is true and case_sensitive is "none"
# Filter out books in the Apocrypha if we don't want them. `Array.map` isn't supported below IE9.
out = []
for book in books
continue if include_apocrypha is false and book.apocrypha? and book.apocrypha is true
if case_sensitive is "books"
book.regexp = new RegExp book.regexp.source, "g"
out.push book
out
# Default to not using the Apocrypha
bcv_parser::regexps.books = bcv_parser::regexps.get_books false, "none"
|
[
{
"context": "nput',\n group: 'from html'\n label: 'Sample'\n description: 'From html template'\n ",
"end": 203,
"score": 0.8798021078109741,
"start": 197,
"tag": "NAME",
"value": "Sample"
}
] | example/demo.coffee | 18601673727/angular-form-builder | 0 | angular.module 'app', ['builder', 'builder.components', 'validator.rules']
.run ['$builder', ($builder) ->
$builder.registerComponent 'sampleInput',
group: 'from html'
label: 'Sample'
description: 'From html template'
placeholder: 'placeholder'
required: no
validationOptions: [
{label: 'none', rule: '/.*/'}
{label: 'number', rule: '[number]'}
{label: 'email', rule: '[email]'}
{label: 'url', rule: '[url]'}
]
handleTemplate:
"""
<label class='fb-outline'>{{label}}</label>
"""
templateUrl: 'example/template.html'
popoverTemplateUrl: 'example/popoverTemplate.html'
# ----------------------------------------
# two text input
# ----------------------------------------
$builder.registerComponent 'name',
group: 'Default'
label: 'Name'
required: no
arrayToText: yes
handleTemplate:
"""
<label class='fb-outline'>{{label}}</label>
"""
template:
"""
<div class="form-group clearfix">
<label for="{{formName+index}}" class="col-md-4 control-label" ng-class="{'fb-required':required}">{{label}}</label>
<div class="col-md-8 clearfix">
<input type='hidden' ng-model="inputText" validator-required="{{required}}" validator-group="{{formName}}"/>
<div class="col-sm-6" style="padding-left: 0;">
<input type="text"
ng-model="inputArray[0]"
class="form-control" id="{{formName+index}}-0"/>
<p class='help-block'>First name</p>
</div>
<div class="col-sm-6" style="padding-left: 0;">
<input type="text"
ng-model="inputArray[1]"
class="form-control" id="{{formName+index}}-1"/>
<p class='help-block'>Last name</p>
</div>
</div>
</div>
"""
popoverTemplate:
"""
<form>
<div class="form-group">
<label class='control-label'>Label</label>
<input type='text' ng-model="label" validator="[required]" class='form-control'/>
</div>
<div class="checkbox">
<label>
<input type='checkbox' ng-model="required" />
Required
</label>
</div>
<hr/>
<div class='form-group'>
<input type='submit' ng-click="popover.save($event)" class='btn btn-primary' value='Save'/>
<input type='button' ng-click="popover.cancel($event)" class='btn btn-default' value='Cancel'/>
<input type='button' ng-click="popover.remove($event)" class='btn btn-danger' value='Delete'/>
</div>
</form>
"""
]
.controller 'DemoController', ['$scope', '$builder', '$validator', ($scope, $builder, $validator) ->
# ----------------------------------------
# builder
# ----------------------------------------
textbox = $builder.addFormObject 'default',
id: 'textbox'
component: 'textInput'
label: 'Name'
description: 'Your name'
placeholder: 'Your name'
required: yes
editable: no
checkbox = $builder.addFormObject 'default',
id: 'checkbox'
component: 'checkbox'
label: 'Pets'
description: 'Do you have any pets?'
options: ['Dog', 'Cat', 'Tiger']
$builder.addFormObject 'default',
component: 'sampleInput'
# formObjects
$scope.form = $builder.forms['default']
# ----------------------------------------
# form
# ----------------------------------------
# user input value
$scope.input = []
$scope.defaultValue = {}
# formObjectId: default value
$scope.defaultValue[textbox.id] = 'default value'
$scope.defaultValue[checkbox.id] = [yes, yes, no]
$scope.cleanComponent = () ->
$builder.cleanComponent('default')
$scope.submit = ->
$validator.validate $scope, 'default'
.success -> console.log 'success'
.error -> console.log 'error'
]
| 130279 | angular.module 'app', ['builder', 'builder.components', 'validator.rules']
.run ['$builder', ($builder) ->
$builder.registerComponent 'sampleInput',
group: 'from html'
label: '<NAME>'
description: 'From html template'
placeholder: 'placeholder'
required: no
validationOptions: [
{label: 'none', rule: '/.*/'}
{label: 'number', rule: '[number]'}
{label: 'email', rule: '[email]'}
{label: 'url', rule: '[url]'}
]
handleTemplate:
"""
<label class='fb-outline'>{{label}}</label>
"""
templateUrl: 'example/template.html'
popoverTemplateUrl: 'example/popoverTemplate.html'
# ----------------------------------------
# two text input
# ----------------------------------------
$builder.registerComponent 'name',
group: 'Default'
label: 'Name'
required: no
arrayToText: yes
handleTemplate:
"""
<label class='fb-outline'>{{label}}</label>
"""
template:
"""
<div class="form-group clearfix">
<label for="{{formName+index}}" class="col-md-4 control-label" ng-class="{'fb-required':required}">{{label}}</label>
<div class="col-md-8 clearfix">
<input type='hidden' ng-model="inputText" validator-required="{{required}}" validator-group="{{formName}}"/>
<div class="col-sm-6" style="padding-left: 0;">
<input type="text"
ng-model="inputArray[0]"
class="form-control" id="{{formName+index}}-0"/>
<p class='help-block'>First name</p>
</div>
<div class="col-sm-6" style="padding-left: 0;">
<input type="text"
ng-model="inputArray[1]"
class="form-control" id="{{formName+index}}-1"/>
<p class='help-block'>Last name</p>
</div>
</div>
</div>
"""
popoverTemplate:
"""
<form>
<div class="form-group">
<label class='control-label'>Label</label>
<input type='text' ng-model="label" validator="[required]" class='form-control'/>
</div>
<div class="checkbox">
<label>
<input type='checkbox' ng-model="required" />
Required
</label>
</div>
<hr/>
<div class='form-group'>
<input type='submit' ng-click="popover.save($event)" class='btn btn-primary' value='Save'/>
<input type='button' ng-click="popover.cancel($event)" class='btn btn-default' value='Cancel'/>
<input type='button' ng-click="popover.remove($event)" class='btn btn-danger' value='Delete'/>
</div>
</form>
"""
]
.controller 'DemoController', ['$scope', '$builder', '$validator', ($scope, $builder, $validator) ->
# ----------------------------------------
# builder
# ----------------------------------------
textbox = $builder.addFormObject 'default',
id: 'textbox'
component: 'textInput'
label: 'Name'
description: 'Your name'
placeholder: 'Your name'
required: yes
editable: no
checkbox = $builder.addFormObject 'default',
id: 'checkbox'
component: 'checkbox'
label: 'Pets'
description: 'Do you have any pets?'
options: ['Dog', 'Cat', 'Tiger']
$builder.addFormObject 'default',
component: 'sampleInput'
# formObjects
$scope.form = $builder.forms['default']
# ----------------------------------------
# form
# ----------------------------------------
# user input value
$scope.input = []
$scope.defaultValue = {}
# formObjectId: default value
$scope.defaultValue[textbox.id] = 'default value'
$scope.defaultValue[checkbox.id] = [yes, yes, no]
$scope.cleanComponent = () ->
$builder.cleanComponent('default')
$scope.submit = ->
$validator.validate $scope, 'default'
.success -> console.log 'success'
.error -> console.log 'error'
]
| true | angular.module 'app', ['builder', 'builder.components', 'validator.rules']
.run ['$builder', ($builder) ->
$builder.registerComponent 'sampleInput',
group: 'from html'
label: 'PI:NAME:<NAME>END_PI'
description: 'From html template'
placeholder: 'placeholder'
required: no
validationOptions: [
{label: 'none', rule: '/.*/'}
{label: 'number', rule: '[number]'}
{label: 'email', rule: '[email]'}
{label: 'url', rule: '[url]'}
]
handleTemplate:
"""
<label class='fb-outline'>{{label}}</label>
"""
templateUrl: 'example/template.html'
popoverTemplateUrl: 'example/popoverTemplate.html'
# ----------------------------------------
# two text input
# ----------------------------------------
$builder.registerComponent 'name',
group: 'Default'
label: 'Name'
required: no
arrayToText: yes
handleTemplate:
"""
<label class='fb-outline'>{{label}}</label>
"""
template:
"""
<div class="form-group clearfix">
<label for="{{formName+index}}" class="col-md-4 control-label" ng-class="{'fb-required':required}">{{label}}</label>
<div class="col-md-8 clearfix">
<input type='hidden' ng-model="inputText" validator-required="{{required}}" validator-group="{{formName}}"/>
<div class="col-sm-6" style="padding-left: 0;">
<input type="text"
ng-model="inputArray[0]"
class="form-control" id="{{formName+index}}-0"/>
<p class='help-block'>First name</p>
</div>
<div class="col-sm-6" style="padding-left: 0;">
<input type="text"
ng-model="inputArray[1]"
class="form-control" id="{{formName+index}}-1"/>
<p class='help-block'>Last name</p>
</div>
</div>
</div>
"""
popoverTemplate:
"""
<form>
<div class="form-group">
<label class='control-label'>Label</label>
<input type='text' ng-model="label" validator="[required]" class='form-control'/>
</div>
<div class="checkbox">
<label>
<input type='checkbox' ng-model="required" />
Required
</label>
</div>
<hr/>
<div class='form-group'>
<input type='submit' ng-click="popover.save($event)" class='btn btn-primary' value='Save'/>
<input type='button' ng-click="popover.cancel($event)" class='btn btn-default' value='Cancel'/>
<input type='button' ng-click="popover.remove($event)" class='btn btn-danger' value='Delete'/>
</div>
</form>
"""
]
.controller 'DemoController', ['$scope', '$builder', '$validator', ($scope, $builder, $validator) ->
# ----------------------------------------
# builder
# ----------------------------------------
textbox = $builder.addFormObject 'default',
id: 'textbox'
component: 'textInput'
label: 'Name'
description: 'Your name'
placeholder: 'Your name'
required: yes
editable: no
checkbox = $builder.addFormObject 'default',
id: 'checkbox'
component: 'checkbox'
label: 'Pets'
description: 'Do you have any pets?'
options: ['Dog', 'Cat', 'Tiger']
$builder.addFormObject 'default',
component: 'sampleInput'
# formObjects
$scope.form = $builder.forms['default']
# ----------------------------------------
# form
# ----------------------------------------
# user input value
$scope.input = []
$scope.defaultValue = {}
# formObjectId: default value
$scope.defaultValue[textbox.id] = 'default value'
$scope.defaultValue[checkbox.id] = [yes, yes, no]
$scope.cleanComponent = () ->
$builder.cleanComponent('default')
$scope.submit = ->
$validator.validate $scope, 'default'
.success -> console.log 'success'
.error -> console.log 'error'
]
|
[
{
"context": "st Suite\", ->\n TEST_RESPONSE = \"{\\\"author\\\": \\\"John Doe\\\",\\\"relativeDate\\\": \\\"2 Hours ago\\\",\\\"fullDate\\\":",
"end": 139,
"score": 0.9987518787384033,
"start": 131,
"tag": "NAME",
"value": "John Doe"
}
] | spec/git-history-view-spec.coffee | jakesankey/git-history | 30 | GitHistoryView = require '../lib/git-history-view'
describe "Git History View Test Suite", ->
TEST_RESPONSE = "{\"author\": \"John Doe\",\"relativeDate\": \"2 Hours ago\",\"fullDate\": \"2014-09-08\",\"message\": \"Foo Bar with \"quotes\" and {stuff}\",\"hash\": \"12345\"},"
it "should use 'message' as the filter key", ->
view = new GitHistoryView()
expect(view.getFilterKey()).toBe "message"
it "should load selected revision", ->
logItem = {hash: 12345}
view = new GitHistoryView()
passedItem = null
callbackCalled = no
view._loadRevision = (item) ->
passedItem = item
callbackCalled = yes
view.confirmed(logItem)
expect(passedItem).toEqual logItem.hash
expect(callbackCalled).toBe yes
it "should load selected revision with diff", ->
logItem = {hash: 12345}
view = new GitHistoryView()
view._isDiffEnabled = ->
return yes
passedItem = null
callbackCalled = no
view._loadRevision = (item) ->
passedItem = item
callbackCalled = yes
view.confirmed(logItem)
expect(passedItem).toEqual logItem.hash
expect(callbackCalled).toBe yes
it "should load selected revision with diff", ->
logItem = {hash: 12345}
view = new GitHistoryView()
view._isDiffEnabled = ->
return yes
passedItem = null
callbackCalled = no
view._loadRevision = (item) ->
passedItem = item
callbackCalled = yes
view.confirmed(logItem)
expect(passedItem).toEqual logItem.hash
expect(callbackCalled).toBe yes
it "should parse comma delimited objects in string to separate items", ->
view = new GitHistoryView()
logItems = null
view._fetchFileHistory = (stdout, exit) ->
stdout TEST_RESPONSE + TEST_RESPONSE
exit 0
view.setItems = (items) ->
logItems = items
view._loadLogData()
expect(logItems.length).toBe 2
| 69478 | GitHistoryView = require '../lib/git-history-view'
describe "Git History View Test Suite", ->
TEST_RESPONSE = "{\"author\": \"<NAME>\",\"relativeDate\": \"2 Hours ago\",\"fullDate\": \"2014-09-08\",\"message\": \"Foo Bar with \"quotes\" and {stuff}\",\"hash\": \"12345\"},"
it "should use 'message' as the filter key", ->
view = new GitHistoryView()
expect(view.getFilterKey()).toBe "message"
it "should load selected revision", ->
logItem = {hash: 12345}
view = new GitHistoryView()
passedItem = null
callbackCalled = no
view._loadRevision = (item) ->
passedItem = item
callbackCalled = yes
view.confirmed(logItem)
expect(passedItem).toEqual logItem.hash
expect(callbackCalled).toBe yes
it "should load selected revision with diff", ->
logItem = {hash: 12345}
view = new GitHistoryView()
view._isDiffEnabled = ->
return yes
passedItem = null
callbackCalled = no
view._loadRevision = (item) ->
passedItem = item
callbackCalled = yes
view.confirmed(logItem)
expect(passedItem).toEqual logItem.hash
expect(callbackCalled).toBe yes
it "should load selected revision with diff", ->
logItem = {hash: 12345}
view = new GitHistoryView()
view._isDiffEnabled = ->
return yes
passedItem = null
callbackCalled = no
view._loadRevision = (item) ->
passedItem = item
callbackCalled = yes
view.confirmed(logItem)
expect(passedItem).toEqual logItem.hash
expect(callbackCalled).toBe yes
it "should parse comma delimited objects in string to separate items", ->
view = new GitHistoryView()
logItems = null
view._fetchFileHistory = (stdout, exit) ->
stdout TEST_RESPONSE + TEST_RESPONSE
exit 0
view.setItems = (items) ->
logItems = items
view._loadLogData()
expect(logItems.length).toBe 2
| true | GitHistoryView = require '../lib/git-history-view'
describe "Git History View Test Suite", ->
TEST_RESPONSE = "{\"author\": \"PI:NAME:<NAME>END_PI\",\"relativeDate\": \"2 Hours ago\",\"fullDate\": \"2014-09-08\",\"message\": \"Foo Bar with \"quotes\" and {stuff}\",\"hash\": \"12345\"},"
it "should use 'message' as the filter key", ->
view = new GitHistoryView()
expect(view.getFilterKey()).toBe "message"
it "should load selected revision", ->
logItem = {hash: 12345}
view = new GitHistoryView()
passedItem = null
callbackCalled = no
view._loadRevision = (item) ->
passedItem = item
callbackCalled = yes
view.confirmed(logItem)
expect(passedItem).toEqual logItem.hash
expect(callbackCalled).toBe yes
it "should load selected revision with diff", ->
logItem = {hash: 12345}
view = new GitHistoryView()
view._isDiffEnabled = ->
return yes
passedItem = null
callbackCalled = no
view._loadRevision = (item) ->
passedItem = item
callbackCalled = yes
view.confirmed(logItem)
expect(passedItem).toEqual logItem.hash
expect(callbackCalled).toBe yes
it "should load selected revision with diff", ->
logItem = {hash: 12345}
view = new GitHistoryView()
view._isDiffEnabled = ->
return yes
passedItem = null
callbackCalled = no
view._loadRevision = (item) ->
passedItem = item
callbackCalled = yes
view.confirmed(logItem)
expect(passedItem).toEqual logItem.hash
expect(callbackCalled).toBe yes
it "should parse comma delimited objects in string to separate items", ->
view = new GitHistoryView()
logItems = null
view._fetchFileHistory = (stdout, exit) ->
stdout TEST_RESPONSE + TEST_RESPONSE
exit 0
view.setItems = (items) ->
logItems = items
view._loadLogData()
expect(logItems.length).toBe 2
|
[
{
"context": "s to have a shouldComponentUpdate method\n# @author Evgueni Naverniouk\n###\n'use strict'\n\nrule = require 'eslint-plugin-r",
"end": 113,
"score": 0.9998710751533508,
"start": 95,
"tag": "NAME",
"value": "Evgueni Naverniouk"
}
] | src/tests/rules/require-optimization.coffee | danielbayley/eslint-plugin-coffee | 21 | ###*
# @fileoverview Enforce React components to have a shouldComponentUpdate method
# @author Evgueni Naverniouk
###
'use strict'
rule = require 'eslint-plugin-react/lib/rules/require-optimization'
{RuleTester} = require 'eslint'
path = require 'path'
MESSAGE =
'Component is not optimized. Please add a shouldComponentUpdate method.'
ruleTester = new RuleTester parser: path.join __dirname, '../../..'
ruleTester.run 'react-require-optimization', rule,
valid: [
code: '''
class A
'''
,
code: '''
import React from "react"
class YourComponent extends React.Component
shouldComponentUpdate : ->
'''
,
code: '''
import React, {Component} from "react"
class YourComponent extends Component
shouldComponentUpdate : ->
'''
,
# ,
# code: """
# import React, {Component} from "react"
# @reactMixin.decorate(PureRenderMixin)
# class YourComponent extends Component
# componetnDidMount : ->
# render: ->
# """
# parser: 'babel-eslint'
code: '''
import React from "react"
createReactClass({
shouldComponentUpdate: ->
})
'''
,
code: '''
import React from "react"
createReactClass({
mixins: [PureRenderMixin]
})
'''
,
# ,
# code: """
# @reactMixin.decorate(PureRenderMixin)
# class DecoratedComponent extends Component
# """
# parser: 'babel-eslint'
code: '''
FunctionalComponent = (props) ->
<div />
'''
,
# parser: 'babel-eslint'
code: '''
FunctionalComponent = (props) ->
return <div />
'''
,
# parser: 'babel-eslint'
code: '''
FunctionalComponent = (props) =>
return <div />
'''
,
# parser: 'babel-eslint'
# ,
# code: """
# @bar
# @pureRender
# @foo
# class DecoratedComponent extends Component
# """
# parser: 'babel-eslint'
# options: [allowDecorators: ['renderPure', 'pureRender']]
code: '''
import React from "react"
class YourComponent extends React.PureComponent
'''
# parser: 'babel-eslint'
options: [allowDecorators: ['renderPure', 'pureRender']]
,
code: '''
import React, {PureComponent} from "react"
class YourComponent extends PureComponent
'''
# parser: 'babel-eslint'
options: [allowDecorators: ['renderPure', 'pureRender']]
,
code: '''
obj = { prop: [,,,,,] }
'''
]
invalid: [
code: '''
import React from "react"
class YourComponent extends React.Component
'''
errors: [message: MESSAGE]
,
code: '''
import React from "react"
class YourComponent extends React.Component
handleClick: ->
render: ->
return <div onClick={this.handleClick}>123</div>
'''
# parser: 'babel-eslint'
errors: [message: MESSAGE]
,
code: '''
import React, {Component} from "react"
class YourComponent extends Component
'''
errors: [message: MESSAGE]
,
code: '''
import React from "react"
createReactClass({})
'''
errors: [message: MESSAGE]
,
code: '''
import React from "react"
createReactClass({
mixins: [RandomMixin]
})
'''
errors: [message: MESSAGE]
# ,
# code: """
# @reactMixin.decorate(SomeOtherMixin)
# class DecoratedComponent extends Component
# """
# errors: [message: MESSAGE]
# parser: 'babel-eslint'
# ,
# code: """
# @bar
# @pure
# @foo
# class DecoratedComponent extends Component
# """
# errors: [message: MESSAGE]
# parser: 'babel-eslint'
# options: [allowDecorators: ['renderPure', 'pureRender']]
]
| 34458 | ###*
# @fileoverview Enforce React components to have a shouldComponentUpdate method
# @author <NAME>
###
'use strict'
rule = require 'eslint-plugin-react/lib/rules/require-optimization'
{RuleTester} = require 'eslint'
path = require 'path'
MESSAGE =
'Component is not optimized. Please add a shouldComponentUpdate method.'
ruleTester = new RuleTester parser: path.join __dirname, '../../..'
ruleTester.run 'react-require-optimization', rule,
valid: [
code: '''
class A
'''
,
code: '''
import React from "react"
class YourComponent extends React.Component
shouldComponentUpdate : ->
'''
,
code: '''
import React, {Component} from "react"
class YourComponent extends Component
shouldComponentUpdate : ->
'''
,
# ,
# code: """
# import React, {Component} from "react"
# @reactMixin.decorate(PureRenderMixin)
# class YourComponent extends Component
# componetnDidMount : ->
# render: ->
# """
# parser: 'babel-eslint'
code: '''
import React from "react"
createReactClass({
shouldComponentUpdate: ->
})
'''
,
code: '''
import React from "react"
createReactClass({
mixins: [PureRenderMixin]
})
'''
,
# ,
# code: """
# @reactMixin.decorate(PureRenderMixin)
# class DecoratedComponent extends Component
# """
# parser: 'babel-eslint'
code: '''
FunctionalComponent = (props) ->
<div />
'''
,
# parser: 'babel-eslint'
code: '''
FunctionalComponent = (props) ->
return <div />
'''
,
# parser: 'babel-eslint'
code: '''
FunctionalComponent = (props) =>
return <div />
'''
,
# parser: 'babel-eslint'
# ,
# code: """
# @bar
# @pureRender
# @foo
# class DecoratedComponent extends Component
# """
# parser: 'babel-eslint'
# options: [allowDecorators: ['renderPure', 'pureRender']]
code: '''
import React from "react"
class YourComponent extends React.PureComponent
'''
# parser: 'babel-eslint'
options: [allowDecorators: ['renderPure', 'pureRender']]
,
code: '''
import React, {PureComponent} from "react"
class YourComponent extends PureComponent
'''
# parser: 'babel-eslint'
options: [allowDecorators: ['renderPure', 'pureRender']]
,
code: '''
obj = { prop: [,,,,,] }
'''
]
invalid: [
code: '''
import React from "react"
class YourComponent extends React.Component
'''
errors: [message: MESSAGE]
,
code: '''
import React from "react"
class YourComponent extends React.Component
handleClick: ->
render: ->
return <div onClick={this.handleClick}>123</div>
'''
# parser: 'babel-eslint'
errors: [message: MESSAGE]
,
code: '''
import React, {Component} from "react"
class YourComponent extends Component
'''
errors: [message: MESSAGE]
,
code: '''
import React from "react"
createReactClass({})
'''
errors: [message: MESSAGE]
,
code: '''
import React from "react"
createReactClass({
mixins: [RandomMixin]
})
'''
errors: [message: MESSAGE]
# ,
# code: """
# @reactMixin.decorate(SomeOtherMixin)
# class DecoratedComponent extends Component
# """
# errors: [message: MESSAGE]
# parser: 'babel-eslint'
# ,
# code: """
# @bar
# @pure
# @foo
# class DecoratedComponent extends Component
# """
# errors: [message: MESSAGE]
# parser: 'babel-eslint'
# options: [allowDecorators: ['renderPure', 'pureRender']]
]
| true | ###*
# @fileoverview Enforce React components to have a shouldComponentUpdate method
# @author PI:NAME:<NAME>END_PI
###
'use strict'
rule = require 'eslint-plugin-react/lib/rules/require-optimization'
{RuleTester} = require 'eslint'
path = require 'path'
MESSAGE =
'Component is not optimized. Please add a shouldComponentUpdate method.'
ruleTester = new RuleTester parser: path.join __dirname, '../../..'
ruleTester.run 'react-require-optimization', rule,
valid: [
code: '''
class A
'''
,
code: '''
import React from "react"
class YourComponent extends React.Component
shouldComponentUpdate : ->
'''
,
code: '''
import React, {Component} from "react"
class YourComponent extends Component
shouldComponentUpdate : ->
'''
,
# ,
# code: """
# import React, {Component} from "react"
# @reactMixin.decorate(PureRenderMixin)
# class YourComponent extends Component
# componetnDidMount : ->
# render: ->
# """
# parser: 'babel-eslint'
code: '''
import React from "react"
createReactClass({
shouldComponentUpdate: ->
})
'''
,
code: '''
import React from "react"
createReactClass({
mixins: [PureRenderMixin]
})
'''
,
# ,
# code: """
# @reactMixin.decorate(PureRenderMixin)
# class DecoratedComponent extends Component
# """
# parser: 'babel-eslint'
code: '''
FunctionalComponent = (props) ->
<div />
'''
,
# parser: 'babel-eslint'
code: '''
FunctionalComponent = (props) ->
return <div />
'''
,
# parser: 'babel-eslint'
code: '''
FunctionalComponent = (props) =>
return <div />
'''
,
# parser: 'babel-eslint'
# ,
# code: """
# @bar
# @pureRender
# @foo
# class DecoratedComponent extends Component
# """
# parser: 'babel-eslint'
# options: [allowDecorators: ['renderPure', 'pureRender']]
code: '''
import React from "react"
class YourComponent extends React.PureComponent
'''
# parser: 'babel-eslint'
options: [allowDecorators: ['renderPure', 'pureRender']]
,
code: '''
import React, {PureComponent} from "react"
class YourComponent extends PureComponent
'''
# parser: 'babel-eslint'
options: [allowDecorators: ['renderPure', 'pureRender']]
,
code: '''
obj = { prop: [,,,,,] }
'''
]
invalid: [
code: '''
import React from "react"
class YourComponent extends React.Component
'''
errors: [message: MESSAGE]
,
code: '''
import React from "react"
class YourComponent extends React.Component
handleClick: ->
render: ->
return <div onClick={this.handleClick}>123</div>
'''
# parser: 'babel-eslint'
errors: [message: MESSAGE]
,
code: '''
import React, {Component} from "react"
class YourComponent extends Component
'''
errors: [message: MESSAGE]
,
code: '''
import React from "react"
createReactClass({})
'''
errors: [message: MESSAGE]
,
code: '''
import React from "react"
createReactClass({
mixins: [RandomMixin]
})
'''
errors: [message: MESSAGE]
# ,
# code: """
# @reactMixin.decorate(SomeOtherMixin)
# class DecoratedComponent extends Component
# """
# errors: [message: MESSAGE]
# parser: 'babel-eslint'
# ,
# code: """
# @bar
# @pure
# @foo
# class DecoratedComponent extends Component
# """
# errors: [message: MESSAGE]
# parser: 'babel-eslint'
# options: [allowDecorators: ['renderPure', 'pureRender']]
]
|
[
{
"context": "r _id\", ->\n indicatorAttributes = _id: 5, name: 'hat'\n\n section = new Backbone.Models.Section(indicat",
"end": 692,
"score": 0.6977673768997192,
"start": 689,
"tag": "NAME",
"value": "hat"
},
{
"context": "(\n narrative:\n _id: 23423\n content: 'bees'... | client/test/src/models/section_model.coffee | unepwcmc/NRT | 0 | assert = chai.assert
suite('Section Model')
test("When intialising a section with a nested visualisation,
visualisation.toJSON should return section: as section._id", ->
indicator = Factory.indicator()
visualisation = new Backbone.Models.Visualisation(indicator: indicator)
sectionAttributes =
indicator: indicator
visualisation: visualisation
section = Factory.section(sectionAttributes)
assert.strictEqual section.get('visualisation').toJSON().section, sectionAttributes._id
)
test("When calling .toJSON on a section with an indicator model attribute,
the indicator model should be deserialized to the indicator _id", ->
indicatorAttributes = _id: 5, name: 'hat'
section = new Backbone.Models.Section(indicator: indicatorAttributes)
assert.equal section.toJSON().indicator, indicatorAttributes._id
)
test('.toJSON should not include a visualisation attribute
(as visualisations save themselves with the section._id)', ->
section = new Backbone.Models.Section(
visualisation:
_id: 23423
indicator: {}
)
assert.isUndefined section.toJSON().visualisation
)
test('.toJSON should not include a narrative attribute
(as narratives save themselves with the section._id)', ->
section = new Backbone.Models.Section(
narrative:
_id: 23423
content: 'bees'
)
assert.isUndefined section.toJSON().narrative
)
test(".hasTitleOrIndicator returns false if there is no title and no indicator assigned", ->
section = new Backbone.Models.Section()
assert.notOk section.hasTitleOrIndicator()
)
test(".hasTitleOrIndicator returns true if there is a title present", ->
section = new Backbone.Models.Section(title: 'title')
assert.ok section.hasTitleOrIndicator()
)
test(".hasTitleOrIndicator returns true if there is an indicator present", ->
section = new Backbone.Models.Section(indicator: {name: 'an indicator'})
assert.ok section.hasTitleOrIndicator()
)
test("When initialised with visualisation attributes,
it creates a Backbone.Models.Visualisation model in the visualisation attribute", ->
visualisationAttributes = data: {some: 'data'}, indicator: Factory.indicator()
section = new Backbone.Models.Section(visualisation: visualisationAttributes)
assert.equal section.get('visualisation').constructor.name, 'Visualisation'
assert.equal section.get('visualisation').get('data'), visualisationAttributes.data
)
test("When initialised with narrative attributes,
it creates a Backbone.Models.Narrative model in the narrative attribute", ->
narrativeAttributes = content: "I'm narrative text"
section = new Backbone.Models.Section(narrative: narrativeAttributes)
assert.equal section.get('narrative').constructor.name, 'Narrative'
assert.equal section.get('narrative').get('content'), narrativeAttributes.content
)
test("When setting 'indicator' with indicator attributes,
it creates a Backbone.Models.Indicator model in the indicator attribute", ->
indicatorAttributes = name: "I'm an indicator"
section = new Backbone.Models.Section()
section.set('indicator', indicatorAttributes)
assert.equal section.get('indicator').constructor.name, 'Indicator'
assert.equal section.get('indicator').get('name'), indicatorAttributes.name
)
test(".save should actually call save on the parent page model", (done)->
page = new Backbone.Models.Page(
sections: [{
title: 'dat title'
}]
)
section = page.get('sections').at(0)
pageSaveSpy = sinon.stub(page, 'save', (attributes, options)->
options.success(page, 200, options)
)
section.save(null,
success: (model, response, options) ->
assert.ok pageSaveSpy.calledOnce, "page save not called"
done()
error: ->
throw 'Section saved failed'
)
)
test('.getPage should get the parent page', ->
page = Factory.page(sections: [{title: 'A section'}])
section = page.get('sections').at(0)
assert.strictEqual(
section.getPage().cid,
page.cid,
"Expected the Section page to be the same as page"
)
)
test('EditModeMixin is mixed in', ->
section = new Backbone.Models.Section()
assert.isDefined section.isEditable, 'Expected Section to have method .isEditable'
)
| 220375 | assert = chai.assert
suite('Section Model')
test("When intialising a section with a nested visualisation,
visualisation.toJSON should return section: as section._id", ->
indicator = Factory.indicator()
visualisation = new Backbone.Models.Visualisation(indicator: indicator)
sectionAttributes =
indicator: indicator
visualisation: visualisation
section = Factory.section(sectionAttributes)
assert.strictEqual section.get('visualisation').toJSON().section, sectionAttributes._id
)
test("When calling .toJSON on a section with an indicator model attribute,
the indicator model should be deserialized to the indicator _id", ->
indicatorAttributes = _id: 5, name: '<NAME>'
section = new Backbone.Models.Section(indicator: indicatorAttributes)
assert.equal section.toJSON().indicator, indicatorAttributes._id
)
test('.toJSON should not include a visualisation attribute
(as visualisations save themselves with the section._id)', ->
section = new Backbone.Models.Section(
visualisation:
_id: 23423
indicator: {}
)
assert.isUndefined section.toJSON().visualisation
)
test('.toJSON should not include a narrative attribute
(as narratives save themselves with the section._id)', ->
section = new Backbone.Models.Section(
narrative:
_id: 23423
content: '<NAME>es'
)
assert.isUndefined section.toJSON().narrative
)
test(".hasTitleOrIndicator returns false if there is no title and no indicator assigned", ->
section = new Backbone.Models.Section()
assert.notOk section.hasTitleOrIndicator()
)
test(".hasTitleOrIndicator returns true if there is a title present", ->
section = new Backbone.Models.Section(title: 'title')
assert.ok section.hasTitleOrIndicator()
)
test(".hasTitleOrIndicator returns true if there is an indicator present", ->
section = new Backbone.Models.Section(indicator: {name: 'an indicator'})
assert.ok section.hasTitleOrIndicator()
)
test("When initialised with visualisation attributes,
it creates a Backbone.Models.Visualisation model in the visualisation attribute", ->
visualisationAttributes = data: {some: 'data'}, indicator: Factory.indicator()
section = new Backbone.Models.Section(visualisation: visualisationAttributes)
assert.equal section.get('visualisation').constructor.name, 'Visualisation'
assert.equal section.get('visualisation').get('data'), visualisationAttributes.data
)
test("When initialised with narrative attributes,
it creates a Backbone.Models.Narrative model in the narrative attribute", ->
narrativeAttributes = content: "I'm narrative text"
section = new Backbone.Models.Section(narrative: narrativeAttributes)
assert.equal section.get('narrative').constructor.name, 'Narrative'
assert.equal section.get('narrative').get('content'), narrativeAttributes.content
)
test("When setting 'indicator' with indicator attributes,
it creates a Backbone.Models.Indicator model in the indicator attribute", ->
indicatorAttributes = name: "I'm an indicator"
section = new Backbone.Models.Section()
section.set('indicator', indicatorAttributes)
assert.equal section.get('indicator').constructor.name, 'Indicator'
assert.equal section.get('indicator').get('name'), indicatorAttributes.name
)
test(".save should actually call save on the parent page model", (done)->
page = new Backbone.Models.Page(
sections: [{
title: 'dat title'
}]
)
section = page.get('sections').at(0)
pageSaveSpy = sinon.stub(page, 'save', (attributes, options)->
options.success(page, 200, options)
)
section.save(null,
success: (model, response, options) ->
assert.ok pageSaveSpy.calledOnce, "page save not called"
done()
error: ->
throw 'Section saved failed'
)
)
test('.getPage should get the parent page', ->
page = Factory.page(sections: [{title: 'A section'}])
section = page.get('sections').at(0)
assert.strictEqual(
section.getPage().cid,
page.cid,
"Expected the Section page to be the same as page"
)
)
test('EditModeMixin is mixed in', ->
section = new Backbone.Models.Section()
assert.isDefined section.isEditable, 'Expected Section to have method .isEditable'
)
| true | assert = chai.assert
suite('Section Model')
test("When intialising a section with a nested visualisation,
visualisation.toJSON should return section: as section._id", ->
indicator = Factory.indicator()
visualisation = new Backbone.Models.Visualisation(indicator: indicator)
sectionAttributes =
indicator: indicator
visualisation: visualisation
section = Factory.section(sectionAttributes)
assert.strictEqual section.get('visualisation').toJSON().section, sectionAttributes._id
)
test("When calling .toJSON on a section with an indicator model attribute,
the indicator model should be deserialized to the indicator _id", ->
indicatorAttributes = _id: 5, name: 'PI:NAME:<NAME>END_PI'
section = new Backbone.Models.Section(indicator: indicatorAttributes)
assert.equal section.toJSON().indicator, indicatorAttributes._id
)
test('.toJSON should not include a visualisation attribute
(as visualisations save themselves with the section._id)', ->
section = new Backbone.Models.Section(
visualisation:
_id: 23423
indicator: {}
)
assert.isUndefined section.toJSON().visualisation
)
test('.toJSON should not include a narrative attribute
(as narratives save themselves with the section._id)', ->
section = new Backbone.Models.Section(
narrative:
_id: 23423
content: 'PI:NAME:<NAME>END_PIes'
)
assert.isUndefined section.toJSON().narrative
)
test(".hasTitleOrIndicator returns false if there is no title and no indicator assigned", ->
section = new Backbone.Models.Section()
assert.notOk section.hasTitleOrIndicator()
)
test(".hasTitleOrIndicator returns true if there is a title present", ->
section = new Backbone.Models.Section(title: 'title')
assert.ok section.hasTitleOrIndicator()
)
test(".hasTitleOrIndicator returns true if there is an indicator present", ->
section = new Backbone.Models.Section(indicator: {name: 'an indicator'})
assert.ok section.hasTitleOrIndicator()
)
test("When initialised with visualisation attributes,
it creates a Backbone.Models.Visualisation model in the visualisation attribute", ->
visualisationAttributes = data: {some: 'data'}, indicator: Factory.indicator()
section = new Backbone.Models.Section(visualisation: visualisationAttributes)
assert.equal section.get('visualisation').constructor.name, 'Visualisation'
assert.equal section.get('visualisation').get('data'), visualisationAttributes.data
)
test("When initialised with narrative attributes,
it creates a Backbone.Models.Narrative model in the narrative attribute", ->
narrativeAttributes = content: "I'm narrative text"
section = new Backbone.Models.Section(narrative: narrativeAttributes)
assert.equal section.get('narrative').constructor.name, 'Narrative'
assert.equal section.get('narrative').get('content'), narrativeAttributes.content
)
test("When setting 'indicator' with indicator attributes,
it creates a Backbone.Models.Indicator model in the indicator attribute", ->
indicatorAttributes = name: "I'm an indicator"
section = new Backbone.Models.Section()
section.set('indicator', indicatorAttributes)
assert.equal section.get('indicator').constructor.name, 'Indicator'
assert.equal section.get('indicator').get('name'), indicatorAttributes.name
)
test(".save should actually call save on the parent page model", (done)->
page = new Backbone.Models.Page(
sections: [{
title: 'dat title'
}]
)
section = page.get('sections').at(0)
pageSaveSpy = sinon.stub(page, 'save', (attributes, options)->
options.success(page, 200, options)
)
section.save(null,
success: (model, response, options) ->
assert.ok pageSaveSpy.calledOnce, "page save not called"
done()
error: ->
throw 'Section saved failed'
)
)
test('.getPage should get the parent page', ->
page = Factory.page(sections: [{title: 'A section'}])
section = page.get('sections').at(0)
assert.strictEqual(
section.getPage().cid,
page.cid,
"Expected the Section page to be the same as page"
)
)
test('EditModeMixin is mixed in', ->
section = new Backbone.Models.Section()
assert.isDefined section.isEditable, 'Expected Section to have method .isEditable'
)
|
[
{
"context": "# Copyright (c) ppy Pty Ltd <contact@ppy.sh>. Licensed under the GNU Affero General Public Li",
"end": 43,
"score": 0.9999145269393921,
"start": 29,
"tag": "EMAIL",
"value": "contact@ppy.sh"
}
] | resources/assets/coffee/_classes/form-error.coffee | osu-katakuna/osu-katakuna-web | 5 | # Copyright (c) ppy Pty Ltd <contact@ppy.sh>. Licensed under the GNU Affero General Public License v3.0.
# See the LICENCE file in the repository root for full licence text.
class @FormError
constructor: ->
$(document).on 'ajax:error', '.js-form-error', @showError
$(document).on 'ajax:success', '.js-form-error', @clearError
clearError: (e) =>
@setError e.target
flattenData: (data, prefixes = []) =>
flat = {}
for own key, value of data
if Array.isArray(value)
flatKey = ''
for prefix, i in prefixes
flatKey +=
if i == 0
prefix
else
"[#{prefix}]"
flatKey +=
if flatKey == ''
key
else
"[#{key}]"
flat[flatKey] = value
else if value instanceof Object
_.merge flat, @flattenData(value, prefixes.concat(key))
flat
showError: (e, xhr) =>
data = xhr.responseJSON?.form_error
return osu.ajaxError(xhr) if !data?
@setError e.target, @flattenData(data)
setError: (form, data = {}) =>
$(form)
.find '[name]'
.each (_i, el) =>
@setOneError el, data[el.name]
setOneError: (el, errors = []) =>
state = if errors.length > 0 then 'error' else ''
$(el)
.closest 'label, .js-form-error--field'
.attr 'data-form-error-state', state
.find '.js-form-error--error'
.text errors.join(' ')
| 87217 | # Copyright (c) ppy Pty Ltd <<EMAIL>>. Licensed under the GNU Affero General Public License v3.0.
# See the LICENCE file in the repository root for full licence text.
class @FormError
constructor: ->
$(document).on 'ajax:error', '.js-form-error', @showError
$(document).on 'ajax:success', '.js-form-error', @clearError
clearError: (e) =>
@setError e.target
flattenData: (data, prefixes = []) =>
flat = {}
for own key, value of data
if Array.isArray(value)
flatKey = ''
for prefix, i in prefixes
flatKey +=
if i == 0
prefix
else
"[#{prefix}]"
flatKey +=
if flatKey == ''
key
else
"[#{key}]"
flat[flatKey] = value
else if value instanceof Object
_.merge flat, @flattenData(value, prefixes.concat(key))
flat
showError: (e, xhr) =>
data = xhr.responseJSON?.form_error
return osu.ajaxError(xhr) if !data?
@setError e.target, @flattenData(data)
setError: (form, data = {}) =>
$(form)
.find '[name]'
.each (_i, el) =>
@setOneError el, data[el.name]
setOneError: (el, errors = []) =>
state = if errors.length > 0 then 'error' else ''
$(el)
.closest 'label, .js-form-error--field'
.attr 'data-form-error-state', state
.find '.js-form-error--error'
.text errors.join(' ')
| true | # Copyright (c) ppy Pty Ltd <PI:EMAIL:<EMAIL>END_PI>. Licensed under the GNU Affero General Public License v3.0.
# See the LICENCE file in the repository root for full licence text.
class @FormError
constructor: ->
$(document).on 'ajax:error', '.js-form-error', @showError
$(document).on 'ajax:success', '.js-form-error', @clearError
clearError: (e) =>
@setError e.target
flattenData: (data, prefixes = []) =>
flat = {}
for own key, value of data
if Array.isArray(value)
flatKey = ''
for prefix, i in prefixes
flatKey +=
if i == 0
prefix
else
"[#{prefix}]"
flatKey +=
if flatKey == ''
key
else
"[#{key}]"
flat[flatKey] = value
else if value instanceof Object
_.merge flat, @flattenData(value, prefixes.concat(key))
flat
showError: (e, xhr) =>
data = xhr.responseJSON?.form_error
return osu.ajaxError(xhr) if !data?
@setError e.target, @flattenData(data)
setError: (form, data = {}) =>
$(form)
.find '[name]'
.each (_i, el) =>
@setOneError el, data[el.name]
setOneError: (el, errors = []) =>
state = if errors.length > 0 then 'error' else ''
$(el)
.closest 'label, .js-form-error--field'
.attr 'data-form-error-state', state
.find '.js-form-error--error'
.text errors.join(' ')
|
[
{
"context": "ersation\n#\n# $> markMessageLines(['answer', 'From: foo@bar.com', '', '> question'])\n# 'tsem'\n#\nexports.markMe",
"end": 5964,
"score": 0.9999286532402039,
"start": 5953,
"tag": "EMAIL",
"value": "foo@bar.com"
},
{
"context": "ge lines.\n# $> processMarkedLines(['... | src/planer.coffee | tbenton/planer | 0 | htmlPlaner = require './htmlPlaner'
REGEXES = require './regexes'
SPLITTER_MAX_LINES = 4
MAX_LINES_COUNT = 1000
MAX_LINE_LENGTH = 200000
# Extract actual message from email.
#
# Will use provided `contentType` to decide which algorithm to use (plain text or html).
#
# @param msgBody [String] the html content of the email
# @param contentType [String] the contentType of the email. Only `text/plain` and `text/html` are supported.
# @param dom [Document] the document object to use for html parsing.
# @return [String] the text/html of the actual message without quotations
exports.extractFrom = (msgBody, contentType= 'text/plain', dom = null) ->
if contentType == 'text/plain'
return exports.extractFromPlain msgBody
else if contentType == 'text/html'
return exports.extractFromHtml msgBody, dom
else
console.warn('Unknown contentType', contentType)
return msgBody
# Extract actual message from provided textual email.
#
# Store delimiter used by the email (\n or \r\n),
# split the email into lines,
# use regexes to mark each line as either part of the message or quotation,
# remove lines that are part of the quotation,
# put message back together using the saved delimeter,
# remove changes made by algorithm.
#
# @param msgBody [String] the html content of the email
# @return [String] the text of the message without quotations
exports.extractFromPlain = (msgBody) ->
delimiter = getDelimiter msgBody
msgBody = preprocess msgBody, delimiter
lines = msgBody.split delimiter, MAX_LINES_COUNT
markers = exports.markMessageLines lines
lines = exports.processMarkedLines lines, markers
msgBody = lines.join delimiter
msgBody = postprocess msgBody
return msgBody
# Extract actual message from provided html message body
# using tags and plain text algorithm.
#
# Cut out the 'blockquote', 'gmail_quote' tags.
# Cut out Microsoft (Outlook, Windows mail) quotations.
#
# Then use plain text algorithm to cut out splitter or
# leftover quotation.
# This works by adding checkpoint text to all html tags,
# then converting html to text,
# then extracting quotations from text,
# then checking deleted checkpoints,
# then deleting necessary tags.
#
# Will use the document provided to create a new document using:
# Document.implementation.createHTMLDocument()
#
# @param msgBody [String] the html content of the email
# @param dom [Document] a document object or equivalent implementation.
# Must respond to `DOMImplementation.createHTMLDocument()`.
# @see https://developer.mozilla.org/en-US/docs/Web/API/DOMImplementation/createHTMLDocument
exports.extractFromHtml = (msgBody, dom) ->
unless dom?
console.error("No dom provided to parse html.")
return msgBody
if msgBody.trim() == ''
return msgBody
[msgBody, crlfReplaced] = _CRLF_to_LF msgBody
emailDocument = htmlPlaner.createEmailDocument msgBody, dom
# handle cases of emails between various email providers by running all checks instead of
# stopping at whichever check returns positive first
haveCutQuotationsGMail = htmlPlaner.cutGmailQuote(emailDocument)
haveCutQuotationsBlock = htmlPlaner.cutBlockQuote(emailDocument)
haveCutQuotationsMicrosoft = htmlPlaner.cutMicrosoftQuote(emailDocument)
haveCutQuotationsById = htmlPlaner.cutById(emailDocument)
haveCutQuotationsFromBlock = htmlPlaner.cutFromBlock(emailDocument)
haveCutQuotations = (
haveCutQuotationsGMail ||
haveCutQuotationsBlock ||
haveCutQuotationsMicrosoft ||
haveCutQuotationsById ||
haveCutQuotationsFromBlock
)
# Create unaltered copy of email document
emailDocumentCopy = htmlPlaner.createEmailDocument emailDocument.documentElement.outerHTML, dom
# Add checkpoints to html document
numberOfCheckpoints = htmlPlaner.addCheckpoints emailDocument.body, 0
quotationCheckpoints = Array.apply(null, Array(numberOfCheckpoints)).map(-> false)
# Get plain text version to put through plain text algorithm
htmlPlaner.replaceBreakTagsWithLineFeeds(emailDocument)
plainTextMsg = emailDocument.body.textContent
plainTextMsg = preprocess plainTextMsg, "\n", 'text/html'
lines = plainTextMsg.split '\n'
if lines.length > MAX_LINES_COUNT
return msgBody
# Collect checkpoints for each line
lineCheckpoints = new Array(lines.length)
for line, index in lines
matches = line.match(htmlPlaner.CHECKPOINT_PATTERN) || []
lineCheckpoints[index] = matches.map((match) -> parseInt(match.slice(4, -4)))
# Remove checkpoints from lines to pass through plain text algorithm
lines = lines.map((line) -> line.replace(htmlPlaner.CHECKPOINT_PATTERN, ''))
markers = exports.markMessageLines lines
returnFlags = {}
exports.processMarkedLines(lines, markers, returnFlags)
# No lines deleted by plain text algorithm, ready to return
if !returnFlags.wereLinesDeleted
if haveCutQuotations
# If we cut a quotation element out of the html, return the html output of the copied document.
return _restore_CRLF(emailDocumentCopy.documentElement.outerHTML, crlfReplaced)
else
# There was nothing to remove, return original message.
return msgBody
# Set quotationCheckpoints to true for checkpoints on lines that were removed
for i in [returnFlags.firstLine..returnFlags.lastLine]
continue unless lineCheckpoints[i]
for checkpoint in lineCheckpoints[i]
quotationCheckpoints[checkpoint] = true
# Remove the element that have been identified as part of the quoted message
htmlPlaner.deleteQuotationTags emailDocumentCopy.body, 0, quotationCheckpoints
return emailDocumentCopy.documentElement.outerHTML
# Mark message lines with markers to distinguish quotation lines.
#
# Markers:
# * e - empty line
# * f - Forwarded message line, see REGEXES.FWD
# * m - line that starts with quotation marker '>'
# * s - splitter line
# * t - presumably lines from the last message in the conversation
#
# $> markMessageLines(['answer', 'From: foo@bar.com', '', '> question'])
# 'tsem'
#
exports.markMessageLines = (lines) ->
markers = []
i = 0
while i < lines.length
if lines[i].trim() == ''
markers[i] = 'e' # empty line
else if REGEXES.QUOT_PATTERN.test(lines[i])
markers[i] = 'm' # line with quotation marker
else if REGEXES.FWD.test(lines[i])
markers[i] = 'f' # ---- Forwarded message ----
else
splitter = isSplitter(lines.slice(i, i + SPLITTER_MAX_LINES).join("\n"))
if splitter
# splitter[0] is the entire match
splitterLines = splitter[0].split("\n")
for j in [0..splitterLines.length]
markers[i + j] = 's'
i += (splitterLines.length - 1)
else
markers[i] = 't'
i++
return markers.join('')
# Check the line for each splitter regex.
isSplitter = (line) ->
return null if line.length > MAX_LINE_LENGTH
for pattern in REGEXES.SPLITTER_PATTERNS
matchArray = pattern.exec line
if matchArray && matchArray.index == 0
return matchArray
return null
# Run regexes against message's marked lines to strip quotations.
#
# Return only last message lines.
# $> processMarkedLines(['Hello', 'From: foo@bar.com', '', '> Hi'], 'tsem'])
# ['Hello']
#
# Will also modify the provided returnFlags object and set the following properties:
# returnFlags = { wereLinesDeleted: (true|false), firstLine: (Number), lastLine: (Number) }
# @see setReturnFlags
exports.processMarkedLines = (lines, markers, returnFlags = {}) ->
# If there are no splitters there should be no markers
if markers.indexOf('s') < 0 && !/(me*){3}/.test(markers)
markers = markers.replace(/m/g, 't')
# If the message is a forward do nothing.
if /^[te]*f/.test(markers)
setReturnFlags returnFlags, false, -1, -1
return lines
# Find inline replies (tm's following the first m in markers string)
inlineMatchRegex = new RegExp('m(?=e*((?:t+e*)+)m)', 'g')
while inlineReplyMatch = inlineMatchRegex.exec(lines)
inlineReplyIndex = markers.indexOf(inlineReplyMatch[1], inlineReplyMatch.index)
isInlineReplyLink = false
if inlineReplyIndex > -1
isInlineReplyLink =
(REGEXES.PARENTHESIS_LINK.test(lines[inlineReplyIndex - 1]) ||
lines[inlineReplyIndex].trim().search(REGEXES.PARENTHESIS_LINK) == 0)
if !isInlineReplyLink
setReturnFlags returnFlags, false, -1, -1
return lines
# Cut out text lines coming after splitter if there are no markers there
quotationMatch = new RegExp('(se*)+((t|f)+e*)+', 'g').exec(markers)
if quotationMatch
setReturnFlags returnFlags, true, quotationMatch.index, lines.length
return lines.slice(0, quotationMatch.index)
# Handle the case with markers
quotationMatch = REGEXES.QUOTATION.exec(markers) || REGEXES.EMPTY_QUOTATION.exec(markers)
if quotationMatch
quotationEnd = quotationMatch.index + quotationMatch[1].length
setReturnFlags returnFlags, true, quotationMatch.index, quotationEnd
return lines.slice(0, quotationMatch.index).concat(lines.slice(quotationEnd))
setReturnFlags returnFlags, false, -1, -1
return lines
setReturnFlags = (returnFlags, wereLinesDeleted, firstLine, lastLine) ->
returnFlags.wereLinesDeleted = wereLinesDeleted
returnFlags.firstLine = firstLine
returnFlags.lastLine = lastLine
# Prepares msgBody for being stripped.
#
# Replaces link brackets so that they couldn't be taken for quotation marker.
# Splits line in two if splitter pattern preceded by some text on the same
# line (done only for 'On <date> <person> wrote:' pattern).
#
preprocess = (msgBody, delimiter, contentType = 'text/plain') ->
# Normalize links i.e. replace '<', '>' wrapping the link with some symbols
# so that '>' closing the link couldn't be mistakenly taken for quotation
# marker.
# REGEXES.LINK has 1 captured group
msgBody = msgBody.replace REGEXES.LINK, (entireMatch, groupMatch1, matchIndex) ->
# Look for closest newline character
newLineIndex = msgBody.lastIndexOf("\n", matchIndex)
# If the new current line starts with a '>' quotation marker, don't mess with the link
if newLineIndex > 0 && msgBody[newLineIndex + 1] == '>'
return entireMatch
else
return "@@#{ groupMatch1 }@@"
if contentType == 'text/plain' && msgBody.length < MAX_LINE_LENGTH
# ON_DATE_SMB_WROTE has 4 captured groups
msgBody = msgBody.replace REGEXES.ON_DATE_SMB_WROTE, (entireMatch, groupMatch1, groupMatch2, groupMatch3, groupMatch4, matchIndex) ->
if matchIndex && msgBody[matchIndex - 1] != "\n"
return "#{ delimiter }#{ entireMatch }"
else
return entireMatch
return msgBody
# Make up for changes done at preprocessing message.
# Replace link brackets back to '<' and '>'.
postprocess = (msgBody) ->
return msgBody.replace(REGEXES.NORMALIZED_LINK, '<$1>').trim()
CONTENT_CHUNK_SIZE = 100
getDelimiter = (msgBody) ->
contentLength = msgBody.length
currentIndex = 0
bodyChunk = msgBody.substr(currentIndex, CONTENT_CHUNK_SIZE)
while !(delimiterMatch = REGEXES.DELIMITER.exec(bodyChunk)) && currentIndex < contentLength
currentIndex += CONTENT_CHUNK_SIZE
bodyChunk = msgBody.substr(currentIndex, CONTENT_CHUNK_SIZE)
if delimiterMatch
return delimiterMatch[0]
else
return "\n"
_CRLF_to_LF = (msgBody) ->
delimiter = getDelimiter msgBody
if delimiter == '\r\n'
return [msgBody.replace(new RegExp(delimiter, 'g'), '\n'), true]
return [msgBody, false]
_restore_CRLF = (msgBody, replaced = true) ->
if replaced
return msgBody.replace(new RegExp('\n', 'g'), '\r\n')
return msgBody
| 222937 | htmlPlaner = require './htmlPlaner'
REGEXES = require './regexes'
SPLITTER_MAX_LINES = 4
MAX_LINES_COUNT = 1000
MAX_LINE_LENGTH = 200000
# Extract actual message from email.
#
# Will use provided `contentType` to decide which algorithm to use (plain text or html).
#
# @param msgBody [String] the html content of the email
# @param contentType [String] the contentType of the email. Only `text/plain` and `text/html` are supported.
# @param dom [Document] the document object to use for html parsing.
# @return [String] the text/html of the actual message without quotations
exports.extractFrom = (msgBody, contentType= 'text/plain', dom = null) ->
if contentType == 'text/plain'
return exports.extractFromPlain msgBody
else if contentType == 'text/html'
return exports.extractFromHtml msgBody, dom
else
console.warn('Unknown contentType', contentType)
return msgBody
# Extract actual message from provided textual email.
#
# Store delimiter used by the email (\n or \r\n),
# split the email into lines,
# use regexes to mark each line as either part of the message or quotation,
# remove lines that are part of the quotation,
# put message back together using the saved delimeter,
# remove changes made by algorithm.
#
# @param msgBody [String] the html content of the email
# @return [String] the text of the message without quotations
exports.extractFromPlain = (msgBody) ->
delimiter = getDelimiter msgBody
msgBody = preprocess msgBody, delimiter
lines = msgBody.split delimiter, MAX_LINES_COUNT
markers = exports.markMessageLines lines
lines = exports.processMarkedLines lines, markers
msgBody = lines.join delimiter
msgBody = postprocess msgBody
return msgBody
# Extract actual message from provided html message body
# using tags and plain text algorithm.
#
# Cut out the 'blockquote', 'gmail_quote' tags.
# Cut out Microsoft (Outlook, Windows mail) quotations.
#
# Then use plain text algorithm to cut out splitter or
# leftover quotation.
# This works by adding checkpoint text to all html tags,
# then converting html to text,
# then extracting quotations from text,
# then checking deleted checkpoints,
# then deleting necessary tags.
#
# Will use the document provided to create a new document using:
# Document.implementation.createHTMLDocument()
#
# @param msgBody [String] the html content of the email
# @param dom [Document] a document object or equivalent implementation.
# Must respond to `DOMImplementation.createHTMLDocument()`.
# @see https://developer.mozilla.org/en-US/docs/Web/API/DOMImplementation/createHTMLDocument
exports.extractFromHtml = (msgBody, dom) ->
unless dom?
console.error("No dom provided to parse html.")
return msgBody
if msgBody.trim() == ''
return msgBody
[msgBody, crlfReplaced] = _CRLF_to_LF msgBody
emailDocument = htmlPlaner.createEmailDocument msgBody, dom
# handle cases of emails between various email providers by running all checks instead of
# stopping at whichever check returns positive first
haveCutQuotationsGMail = htmlPlaner.cutGmailQuote(emailDocument)
haveCutQuotationsBlock = htmlPlaner.cutBlockQuote(emailDocument)
haveCutQuotationsMicrosoft = htmlPlaner.cutMicrosoftQuote(emailDocument)
haveCutQuotationsById = htmlPlaner.cutById(emailDocument)
haveCutQuotationsFromBlock = htmlPlaner.cutFromBlock(emailDocument)
haveCutQuotations = (
haveCutQuotationsGMail ||
haveCutQuotationsBlock ||
haveCutQuotationsMicrosoft ||
haveCutQuotationsById ||
haveCutQuotationsFromBlock
)
# Create unaltered copy of email document
emailDocumentCopy = htmlPlaner.createEmailDocument emailDocument.documentElement.outerHTML, dom
# Add checkpoints to html document
numberOfCheckpoints = htmlPlaner.addCheckpoints emailDocument.body, 0
quotationCheckpoints = Array.apply(null, Array(numberOfCheckpoints)).map(-> false)
# Get plain text version to put through plain text algorithm
htmlPlaner.replaceBreakTagsWithLineFeeds(emailDocument)
plainTextMsg = emailDocument.body.textContent
plainTextMsg = preprocess plainTextMsg, "\n", 'text/html'
lines = plainTextMsg.split '\n'
if lines.length > MAX_LINES_COUNT
return msgBody
# Collect checkpoints for each line
lineCheckpoints = new Array(lines.length)
for line, index in lines
matches = line.match(htmlPlaner.CHECKPOINT_PATTERN) || []
lineCheckpoints[index] = matches.map((match) -> parseInt(match.slice(4, -4)))
# Remove checkpoints from lines to pass through plain text algorithm
lines = lines.map((line) -> line.replace(htmlPlaner.CHECKPOINT_PATTERN, ''))
markers = exports.markMessageLines lines
returnFlags = {}
exports.processMarkedLines(lines, markers, returnFlags)
# No lines deleted by plain text algorithm, ready to return
if !returnFlags.wereLinesDeleted
if haveCutQuotations
# If we cut a quotation element out of the html, return the html output of the copied document.
return _restore_CRLF(emailDocumentCopy.documentElement.outerHTML, crlfReplaced)
else
# There was nothing to remove, return original message.
return msgBody
# Set quotationCheckpoints to true for checkpoints on lines that were removed
for i in [returnFlags.firstLine..returnFlags.lastLine]
continue unless lineCheckpoints[i]
for checkpoint in lineCheckpoints[i]
quotationCheckpoints[checkpoint] = true
# Remove the element that have been identified as part of the quoted message
htmlPlaner.deleteQuotationTags emailDocumentCopy.body, 0, quotationCheckpoints
return emailDocumentCopy.documentElement.outerHTML
# Mark message lines with markers to distinguish quotation lines.
#
# Markers:
# * e - empty line
# * f - Forwarded message line, see REGEXES.FWD
# * m - line that starts with quotation marker '>'
# * s - splitter line
# * t - presumably lines from the last message in the conversation
#
# $> markMessageLines(['answer', 'From: <EMAIL>', '', '> question'])
# 'tsem'
#
exports.markMessageLines = (lines) ->
markers = []
i = 0
while i < lines.length
if lines[i].trim() == ''
markers[i] = 'e' # empty line
else if REGEXES.QUOT_PATTERN.test(lines[i])
markers[i] = 'm' # line with quotation marker
else if REGEXES.FWD.test(lines[i])
markers[i] = 'f' # ---- Forwarded message ----
else
splitter = isSplitter(lines.slice(i, i + SPLITTER_MAX_LINES).join("\n"))
if splitter
# splitter[0] is the entire match
splitterLines = splitter[0].split("\n")
for j in [0..splitterLines.length]
markers[i + j] = 's'
i += (splitterLines.length - 1)
else
markers[i] = 't'
i++
return markers.join('')
# Check the line for each splitter regex.
isSplitter = (line) ->
return null if line.length > MAX_LINE_LENGTH
for pattern in REGEXES.SPLITTER_PATTERNS
matchArray = pattern.exec line
if matchArray && matchArray.index == 0
return matchArray
return null
# Run regexes against message's marked lines to strip quotations.
#
# Return only last message lines.
# $> processMarkedLines(['Hello', 'From: <EMAIL>', '', '> Hi'], 'tsem'])
# ['Hello']
#
# Will also modify the provided returnFlags object and set the following properties:
# returnFlags = { wereLinesDeleted: (true|false), firstLine: (Number), lastLine: (Number) }
# @see setReturnFlags
exports.processMarkedLines = (lines, markers, returnFlags = {}) ->
# If there are no splitters there should be no markers
if markers.indexOf('s') < 0 && !/(me*){3}/.test(markers)
markers = markers.replace(/m/g, 't')
# If the message is a forward do nothing.
if /^[te]*f/.test(markers)
setReturnFlags returnFlags, false, -1, -1
return lines
# Find inline replies (tm's following the first m in markers string)
inlineMatchRegex = new RegExp('m(?=e*((?:t+e*)+)m)', 'g')
while inlineReplyMatch = inlineMatchRegex.exec(lines)
inlineReplyIndex = markers.indexOf(inlineReplyMatch[1], inlineReplyMatch.index)
isInlineReplyLink = false
if inlineReplyIndex > -1
isInlineReplyLink =
(REGEXES.PARENTHESIS_LINK.test(lines[inlineReplyIndex - 1]) ||
lines[inlineReplyIndex].trim().search(REGEXES.PARENTHESIS_LINK) == 0)
if !isInlineReplyLink
setReturnFlags returnFlags, false, -1, -1
return lines
# Cut out text lines coming after splitter if there are no markers there
quotationMatch = new RegExp('(se*)+((t|f)+e*)+', 'g').exec(markers)
if quotationMatch
setReturnFlags returnFlags, true, quotationMatch.index, lines.length
return lines.slice(0, quotationMatch.index)
# Handle the case with markers
quotationMatch = REGEXES.QUOTATION.exec(markers) || REGEXES.EMPTY_QUOTATION.exec(markers)
if quotationMatch
quotationEnd = quotationMatch.index + quotationMatch[1].length
setReturnFlags returnFlags, true, quotationMatch.index, quotationEnd
return lines.slice(0, quotationMatch.index).concat(lines.slice(quotationEnd))
setReturnFlags returnFlags, false, -1, -1
return lines
setReturnFlags = (returnFlags, wereLinesDeleted, firstLine, lastLine) ->
returnFlags.wereLinesDeleted = wereLinesDeleted
returnFlags.firstLine = firstLine
returnFlags.lastLine = lastLine
# Prepares msgBody for being stripped.
#
# Replaces link brackets so that they couldn't be taken for quotation marker.
# Splits line in two if splitter pattern preceded by some text on the same
# line (done only for 'On <date> <person> wrote:' pattern).
#
preprocess = (msgBody, delimiter, contentType = 'text/plain') ->
# Normalize links i.e. replace '<', '>' wrapping the link with some symbols
# so that '>' closing the link couldn't be mistakenly taken for quotation
# marker.
# REGEXES.LINK has 1 captured group
msgBody = msgBody.replace REGEXES.LINK, (entireMatch, groupMatch1, matchIndex) ->
# Look for closest newline character
newLineIndex = msgBody.lastIndexOf("\n", matchIndex)
# If the new current line starts with a '>' quotation marker, don't mess with the link
if newLineIndex > 0 && msgBody[newLineIndex + 1] == '>'
return entireMatch
else
return "@@#{ groupMatch1 }@@"
if contentType == 'text/plain' && msgBody.length < MAX_LINE_LENGTH
# ON_DATE_SMB_WROTE has 4 captured groups
msgBody = msgBody.replace REGEXES.ON_DATE_SMB_WROTE, (entireMatch, groupMatch1, groupMatch2, groupMatch3, groupMatch4, matchIndex) ->
if matchIndex && msgBody[matchIndex - 1] != "\n"
return "#{ delimiter }#{ entireMatch }"
else
return entireMatch
return msgBody
# Make up for changes done at preprocessing message.
# Replace link brackets back to '<' and '>'.
postprocess = (msgBody) ->
return msgBody.replace(REGEXES.NORMALIZED_LINK, '<$1>').trim()
CONTENT_CHUNK_SIZE = 100
getDelimiter = (msgBody) ->
contentLength = msgBody.length
currentIndex = 0
bodyChunk = msgBody.substr(currentIndex, CONTENT_CHUNK_SIZE)
while !(delimiterMatch = REGEXES.DELIMITER.exec(bodyChunk)) && currentIndex < contentLength
currentIndex += CONTENT_CHUNK_SIZE
bodyChunk = msgBody.substr(currentIndex, CONTENT_CHUNK_SIZE)
if delimiterMatch
return delimiterMatch[0]
else
return "\n"
_CRLF_to_LF = (msgBody) ->
delimiter = getDelimiter msgBody
if delimiter == '\r\n'
return [msgBody.replace(new RegExp(delimiter, 'g'), '\n'), true]
return [msgBody, false]
_restore_CRLF = (msgBody, replaced = true) ->
if replaced
return msgBody.replace(new RegExp('\n', 'g'), '\r\n')
return msgBody
| true | htmlPlaner = require './htmlPlaner'
REGEXES = require './regexes'
SPLITTER_MAX_LINES = 4
MAX_LINES_COUNT = 1000
MAX_LINE_LENGTH = 200000
# Extract actual message from email.
#
# Will use provided `contentType` to decide which algorithm to use (plain text or html).
#
# @param msgBody [String] the html content of the email
# @param contentType [String] the contentType of the email. Only `text/plain` and `text/html` are supported.
# @param dom [Document] the document object to use for html parsing.
# @return [String] the text/html of the actual message without quotations
exports.extractFrom = (msgBody, contentType= 'text/plain', dom = null) ->
if contentType == 'text/plain'
return exports.extractFromPlain msgBody
else if contentType == 'text/html'
return exports.extractFromHtml msgBody, dom
else
console.warn('Unknown contentType', contentType)
return msgBody
# Extract actual message from provided textual email.
#
# Store delimiter used by the email (\n or \r\n),
# split the email into lines,
# use regexes to mark each line as either part of the message or quotation,
# remove lines that are part of the quotation,
# put message back together using the saved delimeter,
# remove changes made by algorithm.
#
# @param msgBody [String] the html content of the email
# @return [String] the text of the message without quotations
exports.extractFromPlain = (msgBody) ->
delimiter = getDelimiter msgBody
msgBody = preprocess msgBody, delimiter
lines = msgBody.split delimiter, MAX_LINES_COUNT
markers = exports.markMessageLines lines
lines = exports.processMarkedLines lines, markers
msgBody = lines.join delimiter
msgBody = postprocess msgBody
return msgBody
# Extract actual message from provided html message body
# using tags and plain text algorithm.
#
# Cut out the 'blockquote', 'gmail_quote' tags.
# Cut out Microsoft (Outlook, Windows mail) quotations.
#
# Then use plain text algorithm to cut out splitter or
# leftover quotation.
# This works by adding checkpoint text to all html tags,
# then converting html to text,
# then extracting quotations from text,
# then checking deleted checkpoints,
# then deleting necessary tags.
#
# Will use the document provided to create a new document using:
# Document.implementation.createHTMLDocument()
#
# @param msgBody [String] the html content of the email
# @param dom [Document] a document object or equivalent implementation.
# Must respond to `DOMImplementation.createHTMLDocument()`.
# @see https://developer.mozilla.org/en-US/docs/Web/API/DOMImplementation/createHTMLDocument
exports.extractFromHtml = (msgBody, dom) ->
unless dom?
console.error("No dom provided to parse html.")
return msgBody
if msgBody.trim() == ''
return msgBody
[msgBody, crlfReplaced] = _CRLF_to_LF msgBody
emailDocument = htmlPlaner.createEmailDocument msgBody, dom
# handle cases of emails between various email providers by running all checks instead of
# stopping at whichever check returns positive first
haveCutQuotationsGMail = htmlPlaner.cutGmailQuote(emailDocument)
haveCutQuotationsBlock = htmlPlaner.cutBlockQuote(emailDocument)
haveCutQuotationsMicrosoft = htmlPlaner.cutMicrosoftQuote(emailDocument)
haveCutQuotationsById = htmlPlaner.cutById(emailDocument)
haveCutQuotationsFromBlock = htmlPlaner.cutFromBlock(emailDocument)
haveCutQuotations = (
haveCutQuotationsGMail ||
haveCutQuotationsBlock ||
haveCutQuotationsMicrosoft ||
haveCutQuotationsById ||
haveCutQuotationsFromBlock
)
# Create unaltered copy of email document
emailDocumentCopy = htmlPlaner.createEmailDocument emailDocument.documentElement.outerHTML, dom
# Add checkpoints to html document
numberOfCheckpoints = htmlPlaner.addCheckpoints emailDocument.body, 0
quotationCheckpoints = Array.apply(null, Array(numberOfCheckpoints)).map(-> false)
# Get plain text version to put through plain text algorithm
htmlPlaner.replaceBreakTagsWithLineFeeds(emailDocument)
plainTextMsg = emailDocument.body.textContent
plainTextMsg = preprocess plainTextMsg, "\n", 'text/html'
lines = plainTextMsg.split '\n'
if lines.length > MAX_LINES_COUNT
return msgBody
# Collect checkpoints for each line
lineCheckpoints = new Array(lines.length)
for line, index in lines
matches = line.match(htmlPlaner.CHECKPOINT_PATTERN) || []
lineCheckpoints[index] = matches.map((match) -> parseInt(match.slice(4, -4)))
# Remove checkpoints from lines to pass through plain text algorithm
lines = lines.map((line) -> line.replace(htmlPlaner.CHECKPOINT_PATTERN, ''))
markers = exports.markMessageLines lines
returnFlags = {}
exports.processMarkedLines(lines, markers, returnFlags)
# No lines deleted by plain text algorithm, ready to return
if !returnFlags.wereLinesDeleted
if haveCutQuotations
# If we cut a quotation element out of the html, return the html output of the copied document.
return _restore_CRLF(emailDocumentCopy.documentElement.outerHTML, crlfReplaced)
else
# There was nothing to remove, return original message.
return msgBody
# Set quotationCheckpoints to true for checkpoints on lines that were removed
for i in [returnFlags.firstLine..returnFlags.lastLine]
continue unless lineCheckpoints[i]
for checkpoint in lineCheckpoints[i]
quotationCheckpoints[checkpoint] = true
# Remove the element that have been identified as part of the quoted message
htmlPlaner.deleteQuotationTags emailDocumentCopy.body, 0, quotationCheckpoints
return emailDocumentCopy.documentElement.outerHTML
# Mark message lines with markers to distinguish quotation lines.
#
# Markers:
# * e - empty line
# * f - Forwarded message line, see REGEXES.FWD
# * m - line that starts with quotation marker '>'
# * s - splitter line
# * t - presumably lines from the last message in the conversation
#
# $> markMessageLines(['answer', 'From: PI:EMAIL:<EMAIL>END_PI', '', '> question'])
# 'tsem'
#
exports.markMessageLines = (lines) ->
markers = []
i = 0
while i < lines.length
if lines[i].trim() == ''
markers[i] = 'e' # empty line
else if REGEXES.QUOT_PATTERN.test(lines[i])
markers[i] = 'm' # line with quotation marker
else if REGEXES.FWD.test(lines[i])
markers[i] = 'f' # ---- Forwarded message ----
else
splitter = isSplitter(lines.slice(i, i + SPLITTER_MAX_LINES).join("\n"))
if splitter
# splitter[0] is the entire match
splitterLines = splitter[0].split("\n")
for j in [0..splitterLines.length]
markers[i + j] = 's'
i += (splitterLines.length - 1)
else
markers[i] = 't'
i++
return markers.join('')
# Check the line for each splitter regex.
isSplitter = (line) ->
return null if line.length > MAX_LINE_LENGTH
for pattern in REGEXES.SPLITTER_PATTERNS
matchArray = pattern.exec line
if matchArray && matchArray.index == 0
return matchArray
return null
# Run regexes against message's marked lines to strip quotations.
#
# Return only last message lines.
# $> processMarkedLines(['Hello', 'From: PI:EMAIL:<EMAIL>END_PI', '', '> Hi'], 'tsem'])
# ['Hello']
#
# Will also modify the provided returnFlags object and set the following properties:
# returnFlags = { wereLinesDeleted: (true|false), firstLine: (Number), lastLine: (Number) }
# @see setReturnFlags
exports.processMarkedLines = (lines, markers, returnFlags = {}) ->
# If there are no splitters there should be no markers
if markers.indexOf('s') < 0 && !/(me*){3}/.test(markers)
markers = markers.replace(/m/g, 't')
# If the message is a forward do nothing.
if /^[te]*f/.test(markers)
setReturnFlags returnFlags, false, -1, -1
return lines
# Find inline replies (tm's following the first m in markers string)
inlineMatchRegex = new RegExp('m(?=e*((?:t+e*)+)m)', 'g')
while inlineReplyMatch = inlineMatchRegex.exec(lines)
inlineReplyIndex = markers.indexOf(inlineReplyMatch[1], inlineReplyMatch.index)
isInlineReplyLink = false
if inlineReplyIndex > -1
isInlineReplyLink =
(REGEXES.PARENTHESIS_LINK.test(lines[inlineReplyIndex - 1]) ||
lines[inlineReplyIndex].trim().search(REGEXES.PARENTHESIS_LINK) == 0)
if !isInlineReplyLink
setReturnFlags returnFlags, false, -1, -1
return lines
# Cut out text lines coming after splitter if there are no markers there
quotationMatch = new RegExp('(se*)+((t|f)+e*)+', 'g').exec(markers)
if quotationMatch
setReturnFlags returnFlags, true, quotationMatch.index, lines.length
return lines.slice(0, quotationMatch.index)
# Handle the case with markers
quotationMatch = REGEXES.QUOTATION.exec(markers) || REGEXES.EMPTY_QUOTATION.exec(markers)
if quotationMatch
quotationEnd = quotationMatch.index + quotationMatch[1].length
setReturnFlags returnFlags, true, quotationMatch.index, quotationEnd
return lines.slice(0, quotationMatch.index).concat(lines.slice(quotationEnd))
setReturnFlags returnFlags, false, -1, -1
return lines
setReturnFlags = (returnFlags, wereLinesDeleted, firstLine, lastLine) ->
returnFlags.wereLinesDeleted = wereLinesDeleted
returnFlags.firstLine = firstLine
returnFlags.lastLine = lastLine
# Prepares msgBody for being stripped.
#
# Replaces link brackets so that they couldn't be taken for quotation marker.
# Splits line in two if splitter pattern preceded by some text on the same
# line (done only for 'On <date> <person> wrote:' pattern).
#
preprocess = (msgBody, delimiter, contentType = 'text/plain') ->
# Normalize links i.e. replace '<', '>' wrapping the link with some symbols
# so that '>' closing the link couldn't be mistakenly taken for quotation
# marker.
# REGEXES.LINK has 1 captured group
msgBody = msgBody.replace REGEXES.LINK, (entireMatch, groupMatch1, matchIndex) ->
# Look for closest newline character
newLineIndex = msgBody.lastIndexOf("\n", matchIndex)
# If the new current line starts with a '>' quotation marker, don't mess with the link
if newLineIndex > 0 && msgBody[newLineIndex + 1] == '>'
return entireMatch
else
return "@@#{ groupMatch1 }@@"
if contentType == 'text/plain' && msgBody.length < MAX_LINE_LENGTH
# ON_DATE_SMB_WROTE has 4 captured groups
msgBody = msgBody.replace REGEXES.ON_DATE_SMB_WROTE, (entireMatch, groupMatch1, groupMatch2, groupMatch3, groupMatch4, matchIndex) ->
if matchIndex && msgBody[matchIndex - 1] != "\n"
return "#{ delimiter }#{ entireMatch }"
else
return entireMatch
return msgBody
# Make up for changes done at preprocessing message.
# Replace link brackets back to '<' and '>'.
postprocess = (msgBody) ->
return msgBody.replace(REGEXES.NORMALIZED_LINK, '<$1>').trim()
CONTENT_CHUNK_SIZE = 100
getDelimiter = (msgBody) ->
contentLength = msgBody.length
currentIndex = 0
bodyChunk = msgBody.substr(currentIndex, CONTENT_CHUNK_SIZE)
while !(delimiterMatch = REGEXES.DELIMITER.exec(bodyChunk)) && currentIndex < contentLength
currentIndex += CONTENT_CHUNK_SIZE
bodyChunk = msgBody.substr(currentIndex, CONTENT_CHUNK_SIZE)
if delimiterMatch
return delimiterMatch[0]
else
return "\n"
_CRLF_to_LF = (msgBody) ->
delimiter = getDelimiter msgBody
if delimiter == '\r\n'
return [msgBody.replace(new RegExp(delimiter, 'g'), '\n'), true]
return [msgBody, false]
_restore_CRLF = (msgBody, replaced = true) ->
if replaced
return msgBody.replace(new RegExp('\n', 'g'), '\r\n')
return msgBody
|
[
{
"context": "erface and experience for your app.\n *\n * @author André Lademann <vergissberlin@googlemail.com>\n * @link https:",
"end": 465,
"score": 0.9998817443847656,
"start": 451,
"tag": "NAME",
"value": "André Lademann"
},
{
"context": "ence for your app.\n *\n * @author ... | src/firebase-login-email.coffee | programmerqeu/firebase-login-simple | 1 | ###*
* FirebaseLoginEmail
*
* Authenticating Users with Email & Password
* Firebase makes it easy to integrate email and password authentication
* into your app. Firebase automatically stores your users' credentials
* securely (using bcrypt) and redundantly (daily off-site backups).
* This separates sensitive user credentials from your application data,
* and lets you focus on the user interface and experience for your app.
*
* @author André Lademann <vergissberlin@googlemail.com>
* @link https://www.firebase.com/docs/web/guide/login/password.html
* @param {*} ref Firebase object reference
* @param {*} data Authentication object with email and password
* @param {*} callback Callback function
###
class FirebaseLoginEmail
constructor: (ref = {}, data = {}, callback = {}) ->
# Validation
if typeof data.email isnt "string"
throw new Error "Data object must have an \"email\" field!"
if typeof data.password isnt "string"
throw new Error "Data object must have an \"password\" field!"
ref.authWithPassword {email: data.email, password: data.password}
, (error, authData)->
if error
switch error.code
when "INVALID_EMAIL"
error = "The specified user account email is invalid."
when "INVALID_PASSWORD"
error = "The specified user account password is incorrect."
when "INVALID_USER"
error = "The specified user account does not exist."
else
error = "Error logging user in: " + error.toString()
callback error, authData
module.exports = FirebaseLoginEmail
| 118877 | ###*
* FirebaseLoginEmail
*
* Authenticating Users with Email & Password
* Firebase makes it easy to integrate email and password authentication
* into your app. Firebase automatically stores your users' credentials
* securely (using bcrypt) and redundantly (daily off-site backups).
* This separates sensitive user credentials from your application data,
* and lets you focus on the user interface and experience for your app.
*
* @author <NAME> <<EMAIL>>
* @link https://www.firebase.com/docs/web/guide/login/password.html
* @param {*} ref Firebase object reference
* @param {*} data Authentication object with email and password
* @param {*} callback Callback function
###
class FirebaseLoginEmail
constructor: (ref = {}, data = {}, callback = {}) ->
# Validation
if typeof data.email isnt "string"
throw new Error "Data object must have an \"email\" field!"
if typeof data.password isnt "string"
throw new Error "Data object must have an \"password\" field!"
ref.authWithPassword {email: data.email, password: data.password}
, (error, authData)->
if error
switch error.code
when "INVALID_EMAIL"
error = "The specified user account email is invalid."
when "INVALID_PASSWORD"
error = "The specified user account password is incorrect."
when "INVALID_USER"
error = "The specified user account does not exist."
else
error = "Error logging user in: " + error.toString()
callback error, authData
module.exports = FirebaseLoginEmail
| true | ###*
* FirebaseLoginEmail
*
* Authenticating Users with Email & Password
* Firebase makes it easy to integrate email and password authentication
* into your app. Firebase automatically stores your users' credentials
* securely (using bcrypt) and redundantly (daily off-site backups).
* This separates sensitive user credentials from your application data,
* and lets you focus on the user interface and experience for your app.
*
* @author PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI>
* @link https://www.firebase.com/docs/web/guide/login/password.html
* @param {*} ref Firebase object reference
* @param {*} data Authentication object with email and password
* @param {*} callback Callback function
###
class FirebaseLoginEmail
constructor: (ref = {}, data = {}, callback = {}) ->
# Validation
if typeof data.email isnt "string"
throw new Error "Data object must have an \"email\" field!"
if typeof data.password isnt "string"
throw new Error "Data object must have an \"password\" field!"
ref.authWithPassword {email: data.email, password: data.password}
, (error, authData)->
if error
switch error.code
when "INVALID_EMAIL"
error = "The specified user account email is invalid."
when "INVALID_PASSWORD"
error = "The specified user account password is incorrect."
when "INVALID_USER"
error = "The specified user account does not exist."
else
error = "Error logging user in: " + error.toString()
callback error, authData
module.exports = FirebaseLoginEmail
|
[
{
"context": "'''\r\n/*!\r\n * typescript-after-extends\r\n * (C) 2014 Pine Mizune / MIT License\r\n */\r\n'''\r\n\r\ngulp.task 'lint', ->\r\n",
"end": 391,
"score": 0.9998726844787598,
"start": 380,
"tag": "NAME",
"value": "Pine Mizune"
}
] | gulpfile.coffee | pine/typescript-after-extends | 0 | gulp = require 'gulp'
rename = require 'gulp-rename'
header = require 'gulp-header'
uglify = require 'gulp-uglify'
typescript = require 'gulp-tsc'
webpack = require 'gulp-webpack'
plumber = require 'gulp-plumber'
jshint = require 'gulp-jshint'
tslint = require 'gulp-tslint'
pkg = require './package.json'
banner = '''
/*!
* typescript-after-extends
* (C) 2014 Pine Mizune / MIT License
*/
'''
gulp.task 'lint', ->
gulp.src ['index.js', 'lib/**/*.js']
.pipe jshint()
.pipe jshint.reporter('jshint-stylish')
gulp.src 'test/**/*.ts'
.pipe tslint()
.pipe tslint.report('prose')
gulp.task 'webpack', ->
gulp.src 'index.js'
.pipe webpack
output:
library: 'TypeScriptAfterExtends'
libraryTarget: 'umd'
filename: pkg.name + '.js'
.pipe header(banner)
.pipe gulp.dest('dist/')
.pipe uglify()
.pipe header(banner)
.pipe rename( suffix: '.min' )
.pipe gulp.dest('dist/')
gulp.task 'build-tests', ->
gulp.src 'test/**/*.ts'
.pipe plumber()
.pipe typescript
target: 'ES5'
module: 'commonjs'
noImplicitAny: true
.pipe gulp.dest('test/')
gulp.task 'watch', ->
gulp.watch 'test/**/*.ts', ['build-tests']
gulp.watch ['index.js', 'lib/**/*.js'], ['lint']
gulp.task 'default', ['build']
gulp.task 'build', ['webpack', 'lint'] | 174712 | gulp = require 'gulp'
rename = require 'gulp-rename'
header = require 'gulp-header'
uglify = require 'gulp-uglify'
typescript = require 'gulp-tsc'
webpack = require 'gulp-webpack'
plumber = require 'gulp-plumber'
jshint = require 'gulp-jshint'
tslint = require 'gulp-tslint'
pkg = require './package.json'
banner = '''
/*!
* typescript-after-extends
* (C) 2014 <NAME> / MIT License
*/
'''
gulp.task 'lint', ->
gulp.src ['index.js', 'lib/**/*.js']
.pipe jshint()
.pipe jshint.reporter('jshint-stylish')
gulp.src 'test/**/*.ts'
.pipe tslint()
.pipe tslint.report('prose')
gulp.task 'webpack', ->
gulp.src 'index.js'
.pipe webpack
output:
library: 'TypeScriptAfterExtends'
libraryTarget: 'umd'
filename: pkg.name + '.js'
.pipe header(banner)
.pipe gulp.dest('dist/')
.pipe uglify()
.pipe header(banner)
.pipe rename( suffix: '.min' )
.pipe gulp.dest('dist/')
gulp.task 'build-tests', ->
gulp.src 'test/**/*.ts'
.pipe plumber()
.pipe typescript
target: 'ES5'
module: 'commonjs'
noImplicitAny: true
.pipe gulp.dest('test/')
gulp.task 'watch', ->
gulp.watch 'test/**/*.ts', ['build-tests']
gulp.watch ['index.js', 'lib/**/*.js'], ['lint']
gulp.task 'default', ['build']
gulp.task 'build', ['webpack', 'lint'] | true | gulp = require 'gulp'
rename = require 'gulp-rename'
header = require 'gulp-header'
uglify = require 'gulp-uglify'
typescript = require 'gulp-tsc'
webpack = require 'gulp-webpack'
plumber = require 'gulp-plumber'
jshint = require 'gulp-jshint'
tslint = require 'gulp-tslint'
pkg = require './package.json'
banner = '''
/*!
* typescript-after-extends
* (C) 2014 PI:NAME:<NAME>END_PI / MIT License
*/
'''
gulp.task 'lint', ->
gulp.src ['index.js', 'lib/**/*.js']
.pipe jshint()
.pipe jshint.reporter('jshint-stylish')
gulp.src 'test/**/*.ts'
.pipe tslint()
.pipe tslint.report('prose')
gulp.task 'webpack', ->
gulp.src 'index.js'
.pipe webpack
output:
library: 'TypeScriptAfterExtends'
libraryTarget: 'umd'
filename: pkg.name + '.js'
.pipe header(banner)
.pipe gulp.dest('dist/')
.pipe uglify()
.pipe header(banner)
.pipe rename( suffix: '.min' )
.pipe gulp.dest('dist/')
gulp.task 'build-tests', ->
gulp.src 'test/**/*.ts'
.pipe plumber()
.pipe typescript
target: 'ES5'
module: 'commonjs'
noImplicitAny: true
.pipe gulp.dest('test/')
gulp.task 'watch', ->
gulp.watch 'test/**/*.ts', ['build-tests']
gulp.watch ['index.js', 'lib/**/*.js'], ['lint']
gulp.task 'default', ['build']
gulp.task 'build', ['webpack', 'lint'] |
[
{
"context": " users: {\n test: {\n password: 'testpw'\n } \n }\n})\n\n# => CORS (Cross-Origin R",
"end": 511,
"score": 0.9992567896842957,
"start": 505,
"tag": "PASSWORD",
"value": "testpw"
},
{
"context": "\n user = router.req.body.user\n pass =... | examples/example.coffee | Burnett01/sys-api | 6 | API = require 'sys-api'
api = new API({
#'plugins.root' : ['/plugins/', '/home/user/plugins/']
#'plugins.autoload' : true,
#'logger' : 'dev'
})
# Optionally pass an object to restify's createServer-function
# http://restify.com/#creating-a-server
# example: api = new API({ restify: { name: 'MyApp' } })
# ´´´´´´´ SETUP ´´´´´´´
# => Authorization
api.auth({
enabled: false,
method: 'basic',
bcrypt: true,
anon: false,
users: {
test: {
password: 'testpw'
}
}
})
# => CORS (Cross-Origin Resource Sharing)
api.cors({
enabled: false
})
# => BodyParser
api.bodyParser({
enabled: true
})
# ´´´´´´´ DEMO ROUTES ´´´´´´´
#<-- Desc: Health-Status | Path: /heartbeat -->#
# Simple GET-Response
api.get('/heartbeat', "dub")
# Simple POST-Response
# Access POST-values via req.body.value
api.post('/postman', (router) ->
router.send(router.req.body)
)
# ´´´´´´´ ADDON ROUTES ´´´´´´´
# Each route correspondens to an addon
# Check the src/addon/ folder for more information
#<-- Addon: Net | Path: /net -->#
api.get('/net/isip/:ip', api.net.isIP)
api.get('/net/isv4/:ip', api.net.isIPv4)
api.get('/net/isv6/:ip', api.net.isIPv6)
#<-- Addon: FS | Path: /fs -->#
api.post('/fs/readfile', (router) ->
api.fs.readFile(router.req.body.path, (err, content) ->
router.next.ifError(err)
router.send(content)
)
)
#<-- Addon: OS | Path: /os/users -->#
api.get('/os/users/all', (router) ->
api.os.users.all((err, users) ->
router.next.ifError(err)
router.send(users)
)
)
api.get('/os/users/get/:user', (router) ->
api.os.users.get(router.req.params.user, (err, user) ->
router.next.ifError(err)
router.send(user)
)
)
api.post('/os/users/add', (router) ->
opts = {
createHome: false,
sudo: true
}
user = router.req.body.user
pass = router.req.body.pass
api.os.users.add(user, pass, opts, (err, status) ->
router.next.ifError(err)
router.send(status)
)
)
api.post('/os/users/lock', (router) ->
opts = {
sudo: true
}
user = router.req.body.user
api.os.users.lock(user, opts, (err, status) ->
router.next.ifError(err)
router.send(status)
)
)
api.post('/os/users/unlock', (router) ->
opts = {
sudo: true
}
user = router.req.body.user
api.os.users.unlock(user, opts, (err, status) ->
router.next.ifError(err)
router.send(status)
)
)
api.post('/os/users/del', (router) ->
opts = {
sudo: true
}
user = router.req.body.user
api.os.users.del(user, opts, (err, status) ->
router.next.ifError(err)
router.send(status)
)
)
#<-- Addon: OS | Path: /os/groups -->#
api.get('/os/groups/all', (router) ->
api.os.groups.all((err, groups) ->
router.next.ifError(err)
router.send(groups)
)
)
api.get('/os/group/get/:group', (router) ->
api.os.group.get(router.req.params.group, (err, group) ->
router.next.ifError(err)
router.send(group)
)
)
api.post('/os/groups/add', (router) ->
opts = {
#system: false,
sudo: true
}
group = router.req.body.group
api.os.groups.add(group, opts, (err, status) ->
router.next.ifError(err)
router.send(status)
)
)
api.post('/os/groups/del', (router) ->
opts = {
sudo: true
}
group = router.req.body.group
api.os.groups.del(group, opts, (err, status) ->
router.next.ifError(err)
router.send(status)
)
)
#<-- Addon: OS | Path: /os/system -->#
api.get('/os/system/all', {
"hostname": api.os.system.hostname(),
"type": api.os.system.type(),
"platform": api.os.system.platform(),
"arch": api.os.system.arch(),
"release": api.os.system.release(),
"eol": api.os.system.eol,
"uptime": api.os.system.uptime(),
"loadavg": api.os.system.loadavg(),
"memory": {
"total": api.os.system.memory.total(),
"free": api.os.system.memory.free()
},
"cpus" : api.os.system.cpus(),
"networkInterfaces" : api.os.system.networkInterfaces()
})
api.get('/os/system/hostname', api.os.system.hostname())
api.get('/os/system/type', api.os.system.type())
api.get('/os/system/platform', api.os.system.platform())
api.get('/os/system/arch', api.os.system.arch())
api.get('/os/system/release', api.os.system.release())
api.get('/os/system/eol', api.os.system.eol)
api.get('/os/system/uptime', api.os.system.uptime())
api.get('/os/system/loadavg', api.os.system.loadavg())
api.get('/os/system/memory/total', api.os.system.memory.total())
api.get('/os/system/memory/free', api.os.system.memory.free())
api.get('/os/system/cpus', api.os.system.cpus())
api.get('/os/system/networkInterfaces', api.os.system.networkInterfaces())
# ´´´´´´´ HIT IT UP! ´´´´´´´
api.listen(8080)
| 54920 | API = require 'sys-api'
api = new API({
#'plugins.root' : ['/plugins/', '/home/user/plugins/']
#'plugins.autoload' : true,
#'logger' : 'dev'
})
# Optionally pass an object to restify's createServer-function
# http://restify.com/#creating-a-server
# example: api = new API({ restify: { name: 'MyApp' } })
# ´´´´´´´ SETUP ´´´´´´´
# => Authorization
api.auth({
enabled: false,
method: 'basic',
bcrypt: true,
anon: false,
users: {
test: {
password: '<PASSWORD>'
}
}
})
# => CORS (Cross-Origin Resource Sharing)
api.cors({
enabled: false
})
# => BodyParser
api.bodyParser({
enabled: true
})
# ´´´´´´´ DEMO ROUTES ´´´´´´´
#<-- Desc: Health-Status | Path: /heartbeat -->#
# Simple GET-Response
api.get('/heartbeat', "dub")
# Simple POST-Response
# Access POST-values via req.body.value
api.post('/postman', (router) ->
router.send(router.req.body)
)
# ´´´´´´´ ADDON ROUTES ´´´´´´´
# Each route correspondens to an addon
# Check the src/addon/ folder for more information
#<-- Addon: Net | Path: /net -->#
api.get('/net/isip/:ip', api.net.isIP)
api.get('/net/isv4/:ip', api.net.isIPv4)
api.get('/net/isv6/:ip', api.net.isIPv6)
#<-- Addon: FS | Path: /fs -->#
api.post('/fs/readfile', (router) ->
api.fs.readFile(router.req.body.path, (err, content) ->
router.next.ifError(err)
router.send(content)
)
)
#<-- Addon: OS | Path: /os/users -->#
api.get('/os/users/all', (router) ->
api.os.users.all((err, users) ->
router.next.ifError(err)
router.send(users)
)
)
api.get('/os/users/get/:user', (router) ->
api.os.users.get(router.req.params.user, (err, user) ->
router.next.ifError(err)
router.send(user)
)
)
api.post('/os/users/add', (router) ->
opts = {
createHome: false,
sudo: true
}
user = router.req.body.user
pass = router.<PASSWORD>
api.os.users.add(user, pass, opts, (err, status) ->
router.next.ifError(err)
router.send(status)
)
)
api.post('/os/users/lock', (router) ->
opts = {
sudo: true
}
user = router.req.body.user
api.os.users.lock(user, opts, (err, status) ->
router.next.ifError(err)
router.send(status)
)
)
api.post('/os/users/unlock', (router) ->
opts = {
sudo: true
}
user = router.req.body.user
api.os.users.unlock(user, opts, (err, status) ->
router.next.ifError(err)
router.send(status)
)
)
api.post('/os/users/del', (router) ->
opts = {
sudo: true
}
user = router.req.body.user
api.os.users.del(user, opts, (err, status) ->
router.next.ifError(err)
router.send(status)
)
)
#<-- Addon: OS | Path: /os/groups -->#
api.get('/os/groups/all', (router) ->
api.os.groups.all((err, groups) ->
router.next.ifError(err)
router.send(groups)
)
)
api.get('/os/group/get/:group', (router) ->
api.os.group.get(router.req.params.group, (err, group) ->
router.next.ifError(err)
router.send(group)
)
)
api.post('/os/groups/add', (router) ->
opts = {
#system: false,
sudo: true
}
group = router.req.body.group
api.os.groups.add(group, opts, (err, status) ->
router.next.ifError(err)
router.send(status)
)
)
api.post('/os/groups/del', (router) ->
opts = {
sudo: true
}
group = router.req.body.group
api.os.groups.del(group, opts, (err, status) ->
router.next.ifError(err)
router.send(status)
)
)
#<-- Addon: OS | Path: /os/system -->#
api.get('/os/system/all', {
"hostname": api.os.system.hostname(),
"type": api.os.system.type(),
"platform": api.os.system.platform(),
"arch": api.os.system.arch(),
"release": api.os.system.release(),
"eol": api.os.system.eol,
"uptime": api.os.system.uptime(),
"loadavg": api.os.system.loadavg(),
"memory": {
"total": api.os.system.memory.total(),
"free": api.os.system.memory.free()
},
"cpus" : api.os.system.cpus(),
"networkInterfaces" : api.os.system.networkInterfaces()
})
api.get('/os/system/hostname', api.os.system.hostname())
api.get('/os/system/type', api.os.system.type())
api.get('/os/system/platform', api.os.system.platform())
api.get('/os/system/arch', api.os.system.arch())
api.get('/os/system/release', api.os.system.release())
api.get('/os/system/eol', api.os.system.eol)
api.get('/os/system/uptime', api.os.system.uptime())
api.get('/os/system/loadavg', api.os.system.loadavg())
api.get('/os/system/memory/total', api.os.system.memory.total())
api.get('/os/system/memory/free', api.os.system.memory.free())
api.get('/os/system/cpus', api.os.system.cpus())
api.get('/os/system/networkInterfaces', api.os.system.networkInterfaces())
# ´´´´´´´ HIT IT UP! ´´´´´´´
api.listen(8080)
| true | API = require 'sys-api'
api = new API({
#'plugins.root' : ['/plugins/', '/home/user/plugins/']
#'plugins.autoload' : true,
#'logger' : 'dev'
})
# Optionally pass an object to restify's createServer-function
# http://restify.com/#creating-a-server
# example: api = new API({ restify: { name: 'MyApp' } })
# ´´´´´´´ SETUP ´´´´´´´
# => Authorization
api.auth({
enabled: false,
method: 'basic',
bcrypt: true,
anon: false,
users: {
test: {
password: 'PI:PASSWORD:<PASSWORD>END_PI'
}
}
})
# => CORS (Cross-Origin Resource Sharing)
api.cors({
enabled: false
})
# => BodyParser
api.bodyParser({
enabled: true
})
# ´´´´´´´ DEMO ROUTES ´´´´´´´
#<-- Desc: Health-Status | Path: /heartbeat -->#
# Simple GET-Response
api.get('/heartbeat', "dub")
# Simple POST-Response
# Access POST-values via req.body.value
api.post('/postman', (router) ->
router.send(router.req.body)
)
# ´´´´´´´ ADDON ROUTES ´´´´´´´
# Each route correspondens to an addon
# Check the src/addon/ folder for more information
#<-- Addon: Net | Path: /net -->#
api.get('/net/isip/:ip', api.net.isIP)
api.get('/net/isv4/:ip', api.net.isIPv4)
api.get('/net/isv6/:ip', api.net.isIPv6)
#<-- Addon: FS | Path: /fs -->#
api.post('/fs/readfile', (router) ->
api.fs.readFile(router.req.body.path, (err, content) ->
router.next.ifError(err)
router.send(content)
)
)
#<-- Addon: OS | Path: /os/users -->#
api.get('/os/users/all', (router) ->
api.os.users.all((err, users) ->
router.next.ifError(err)
router.send(users)
)
)
api.get('/os/users/get/:user', (router) ->
api.os.users.get(router.req.params.user, (err, user) ->
router.next.ifError(err)
router.send(user)
)
)
api.post('/os/users/add', (router) ->
opts = {
createHome: false,
sudo: true
}
user = router.req.body.user
pass = router.PI:PASSWORD:<PASSWORD>END_PI
api.os.users.add(user, pass, opts, (err, status) ->
router.next.ifError(err)
router.send(status)
)
)
api.post('/os/users/lock', (router) ->
opts = {
sudo: true
}
user = router.req.body.user
api.os.users.lock(user, opts, (err, status) ->
router.next.ifError(err)
router.send(status)
)
)
api.post('/os/users/unlock', (router) ->
opts = {
sudo: true
}
user = router.req.body.user
api.os.users.unlock(user, opts, (err, status) ->
router.next.ifError(err)
router.send(status)
)
)
api.post('/os/users/del', (router) ->
opts = {
sudo: true
}
user = router.req.body.user
api.os.users.del(user, opts, (err, status) ->
router.next.ifError(err)
router.send(status)
)
)
#<-- Addon: OS | Path: /os/groups -->#
api.get('/os/groups/all', (router) ->
api.os.groups.all((err, groups) ->
router.next.ifError(err)
router.send(groups)
)
)
api.get('/os/group/get/:group', (router) ->
api.os.group.get(router.req.params.group, (err, group) ->
router.next.ifError(err)
router.send(group)
)
)
api.post('/os/groups/add', (router) ->
opts = {
#system: false,
sudo: true
}
group = router.req.body.group
api.os.groups.add(group, opts, (err, status) ->
router.next.ifError(err)
router.send(status)
)
)
api.post('/os/groups/del', (router) ->
opts = {
sudo: true
}
group = router.req.body.group
api.os.groups.del(group, opts, (err, status) ->
router.next.ifError(err)
router.send(status)
)
)
#<-- Addon: OS | Path: /os/system -->#
api.get('/os/system/all', {
"hostname": api.os.system.hostname(),
"type": api.os.system.type(),
"platform": api.os.system.platform(),
"arch": api.os.system.arch(),
"release": api.os.system.release(),
"eol": api.os.system.eol,
"uptime": api.os.system.uptime(),
"loadavg": api.os.system.loadavg(),
"memory": {
"total": api.os.system.memory.total(),
"free": api.os.system.memory.free()
},
"cpus" : api.os.system.cpus(),
"networkInterfaces" : api.os.system.networkInterfaces()
})
api.get('/os/system/hostname', api.os.system.hostname())
api.get('/os/system/type', api.os.system.type())
api.get('/os/system/platform', api.os.system.platform())
api.get('/os/system/arch', api.os.system.arch())
api.get('/os/system/release', api.os.system.release())
api.get('/os/system/eol', api.os.system.eol)
api.get('/os/system/uptime', api.os.system.uptime())
api.get('/os/system/loadavg', api.os.system.loadavg())
api.get('/os/system/memory/total', api.os.system.memory.total())
api.get('/os/system/memory/free', api.os.system.memory.free())
api.get('/os/system/cpus', api.os.system.cpus())
api.get('/os/system/networkInterfaces', api.os.system.networkInterfaces())
# ´´´´´´´ HIT IT UP! ´´´´´´´
api.listen(8080)
|
[
{
"context": "plyEmblems\"\n\n\t@isKeyworded: true\n\t@modifierName: \"Destiny\"\n\t@description: null\n\t@keywordDefinition: \"Summon",
"end": 278,
"score": 0.9939844608306885,
"start": 271,
"tag": "NAME",
"value": "Destiny"
}
] | app/sdk/modifiers/modifierOnSummonFromHandApplyEmblems.coffee | willroberts/duelyst | 5 | ModifierOnSummonFromHand = require './modifierOnSummonFromHand'
class ModifierOnSummonFromHandApplyEmblems extends ModifierOnSummonFromHand
type:"ModifierOnSummonFromHandApplyEmblems"
@type:"ModifierOnSummonFromHandApplyEmblems"
@isKeyworded: true
@modifierName: "Destiny"
@description: null
@keywordDefinition: "Summon to gain a permanent game-changing effect."
emblems: null #player modifiers for the emblem's ongoing effect
applyToSelf: true
applyToEnemy: false
@createContextObject: (emblems, applyToSelf=true, applyToEnemy=false, options) ->
contextObject = super(options)
contextObject.emblems = emblems
contextObject.applyToSelf = applyToSelf
contextObject.applyToEnemy = applyToEnemy
return contextObject
onSummonFromHand: () ->
if @emblems?
general = @getCard().getGameSession().getGeneralForPlayerId(@getCard().getOwnerId())
enemyGeneral = @getCard().getGameSession().getGeneralForOpponentOfPlayerId(@getCard().getOwnerId())
for emblem in @emblems
emblem.isRemovable = false
if emblem?
if @applyToSelf
@getGameSession().applyModifierContextObject(emblem, general)
if @applyToEnemy
@getGameSession().applyModifierContextObject(emblem, enemyGeneral)
module.exports = ModifierOnSummonFromHandApplyEmblems
| 100447 | ModifierOnSummonFromHand = require './modifierOnSummonFromHand'
class ModifierOnSummonFromHandApplyEmblems extends ModifierOnSummonFromHand
type:"ModifierOnSummonFromHandApplyEmblems"
@type:"ModifierOnSummonFromHandApplyEmblems"
@isKeyworded: true
@modifierName: "<NAME>"
@description: null
@keywordDefinition: "Summon to gain a permanent game-changing effect."
emblems: null #player modifiers for the emblem's ongoing effect
applyToSelf: true
applyToEnemy: false
@createContextObject: (emblems, applyToSelf=true, applyToEnemy=false, options) ->
contextObject = super(options)
contextObject.emblems = emblems
contextObject.applyToSelf = applyToSelf
contextObject.applyToEnemy = applyToEnemy
return contextObject
onSummonFromHand: () ->
if @emblems?
general = @getCard().getGameSession().getGeneralForPlayerId(@getCard().getOwnerId())
enemyGeneral = @getCard().getGameSession().getGeneralForOpponentOfPlayerId(@getCard().getOwnerId())
for emblem in @emblems
emblem.isRemovable = false
if emblem?
if @applyToSelf
@getGameSession().applyModifierContextObject(emblem, general)
if @applyToEnemy
@getGameSession().applyModifierContextObject(emblem, enemyGeneral)
module.exports = ModifierOnSummonFromHandApplyEmblems
| true | ModifierOnSummonFromHand = require './modifierOnSummonFromHand'
class ModifierOnSummonFromHandApplyEmblems extends ModifierOnSummonFromHand
type:"ModifierOnSummonFromHandApplyEmblems"
@type:"ModifierOnSummonFromHandApplyEmblems"
@isKeyworded: true
@modifierName: "PI:NAME:<NAME>END_PI"
@description: null
@keywordDefinition: "Summon to gain a permanent game-changing effect."
emblems: null #player modifiers for the emblem's ongoing effect
applyToSelf: true
applyToEnemy: false
@createContextObject: (emblems, applyToSelf=true, applyToEnemy=false, options) ->
contextObject = super(options)
contextObject.emblems = emblems
contextObject.applyToSelf = applyToSelf
contextObject.applyToEnemy = applyToEnemy
return contextObject
onSummonFromHand: () ->
if @emblems?
general = @getCard().getGameSession().getGeneralForPlayerId(@getCard().getOwnerId())
enemyGeneral = @getCard().getGameSession().getGeneralForOpponentOfPlayerId(@getCard().getOwnerId())
for emblem in @emblems
emblem.isRemovable = false
if emblem?
if @applyToSelf
@getGameSession().applyModifierContextObject(emblem, general)
if @applyToEnemy
@getGameSession().applyModifierContextObject(emblem, enemyGeneral)
module.exports = ModifierOnSummonFromHandApplyEmblems
|
[
{
"context": "er, a drive-in, or a dive. There I met TV food guy Guy Fieri. Between mouthfuls of cheese and enthusiasm, he p",
"end": 1723,
"score": 0.9837857484817505,
"start": 1714,
"tag": "NAME",
"value": "Guy Fieri"
}
] | stories/around-the-world.coffee | STRd6/zine | 12 | module.exports = """
# Around the World in 20 Years and a Weekend
The history of human progress is rife with us entering things. [Tombs](https://en.wikipedia.org/wiki/Raiders_of_the_Lost_Ark), [dragons](https://en.wikipedia.org/wiki/Enter_the_Dragon), [voids](https://en.wikipedia.org/wiki/Enter_the_Void), even [gungeons](http://dodgeroll.com/gungeon/).
But have you ever stopped to think about what fuels this endless urge to ingress? Rather fearlessly, I decided to find out.

It's now 20 years later. I've waded through the jungles of the Amazon, skied the slopes of Norway, swam the Ganges, and braved long lines. I've lost a lot too. The years have aged me dramatically - I'm a hot mess. And I lost my phone a month ago, so my friends and family are dead to me now.
Here's what I've learned on my journey,

The Freudian says we want to go in things for obvious sex reasons. Freudian? More like ‘Fraud’-ian, right? Har har.

My Peyote spirit animal is a horse. Under the desert sky he whispers in my ear that what we really want is to enter the stable of the mind. I'm not so sure though. I don't even really like normal earthly stables.

After the peyote trip, things get a little fuzzy. I stumbled into a place. It was either a diner, a drive-in, or a dive. There I met TV food guy Guy Fieri. Between mouthfuls of cheese and enthusiasm, he proclaimed that we enter so that we may exit.
“We’re all searching for something that’s everything we imagine it to be.”
...
If I'm honest, this odyssey only really took a weekend. After that I didn't really feel like going back to work, so I just hung out for a while.

[🐸 cool art made by cool people at [Frog Feels](http://frogfeels.com)]
"""
| 102141 | module.exports = """
# Around the World in 20 Years and a Weekend
The history of human progress is rife with us entering things. [Tombs](https://en.wikipedia.org/wiki/Raiders_of_the_Lost_Ark), [dragons](https://en.wikipedia.org/wiki/Enter_the_Dragon), [voids](https://en.wikipedia.org/wiki/Enter_the_Void), even [gungeons](http://dodgeroll.com/gungeon/).
But have you ever stopped to think about what fuels this endless urge to ingress? Rather fearlessly, I decided to find out.

It's now 20 years later. I've waded through the jungles of the Amazon, skied the slopes of Norway, swam the Ganges, and braved long lines. I've lost a lot too. The years have aged me dramatically - I'm a hot mess. And I lost my phone a month ago, so my friends and family are dead to me now.
Here's what I've learned on my journey,

The Freudian says we want to go in things for obvious sex reasons. Freudian? More like ‘Fraud’-ian, right? Har har.

My Peyote spirit animal is a horse. Under the desert sky he whispers in my ear that what we really want is to enter the stable of the mind. I'm not so sure though. I don't even really like normal earthly stables.

After the peyote trip, things get a little fuzzy. I stumbled into a place. It was either a diner, a drive-in, or a dive. There I met TV food guy <NAME>. Between mouthfuls of cheese and enthusiasm, he proclaimed that we enter so that we may exit.
“We’re all searching for something that’s everything we imagine it to be.”
...
If I'm honest, this odyssey only really took a weekend. After that I didn't really feel like going back to work, so I just hung out for a while.

[🐸 cool art made by cool people at [Frog Feels](http://frogfeels.com)]
"""
| true | module.exports = """
# Around the World in 20 Years and a Weekend
The history of human progress is rife with us entering things. [Tombs](https://en.wikipedia.org/wiki/Raiders_of_the_Lost_Ark), [dragons](https://en.wikipedia.org/wiki/Enter_the_Dragon), [voids](https://en.wikipedia.org/wiki/Enter_the_Void), even [gungeons](http://dodgeroll.com/gungeon/).
But have you ever stopped to think about what fuels this endless urge to ingress? Rather fearlessly, I decided to find out.

It's now 20 years later. I've waded through the jungles of the Amazon, skied the slopes of Norway, swam the Ganges, and braved long lines. I've lost a lot too. The years have aged me dramatically - I'm a hot mess. And I lost my phone a month ago, so my friends and family are dead to me now.
Here's what I've learned on my journey,

The Freudian says we want to go in things for obvious sex reasons. Freudian? More like ‘Fraud’-ian, right? Har har.

My Peyote spirit animal is a horse. Under the desert sky he whispers in my ear that what we really want is to enter the stable of the mind. I'm not so sure though. I don't even really like normal earthly stables.

After the peyote trip, things get a little fuzzy. I stumbled into a place. It was either a diner, a drive-in, or a dive. There I met TV food guy PI:NAME:<NAME>END_PI. Between mouthfuls of cheese and enthusiasm, he proclaimed that we enter so that we may exit.
“We’re all searching for something that’s everything we imagine it to be.”
...
If I'm honest, this odyssey only really took a weekend. After that I didn't really feel like going back to work, so I just hung out for a while.

[🐸 cool art made by cool people at [Frog Feels](http://frogfeels.com)]
"""
|
[
{
"context": "Run new server with specified name\n#\n# Author:\n# Nagaev Maksim\n\ngeneratePassword = require 'password-generator'\n",
"end": 1691,
"score": 0.9998642206192017,
"start": 1678,
"tag": "NAME",
"value": "Nagaev Maksim"
},
{
"context": "rt\": true,\n \"name\": server... | scripts/hubot-vscale.coffee | menemy/hubot-vscale | 0 | # Description:
# Interact with vscale API using token
#
# Dependencies:
# None
#
# Configuration:
# HUBOT_VSCALE_LOCATION - datacenter location spb0 or msk0
# HUBOT_VSCALE_RPLAN - type of instance small or medium or large or huge or monster
# HUBOT_VSCALE_OS - OS to use
# debian_8.1_64_001_master
# centos_7.1_64_001_master
# ubuntu_14.04_64_002_master
# centos_6.7_64_001_master
# debian_7_64_001_master
# opensuse_13.2_64_001_preseed
# fedora_23_64_001_master
# centos_7.2_64_001_master
# ubuntu_16.04_64_001_master
# ubuntu_14.04_64_001_ajenti
# ubuntu_14.04_64_001_vesta
# debian_8.1_64_001_master
# ubuntu_14.04_64_002_master
# centos_7.1_64_001_master
# debian_7_64_001_master
# opensuse_13.2_64_001_preseed
# centos_6.7_64_001_master
# fedora_23_64_001_master
# centos_7.2_64_001_master
# ubuntu_16.04_64_001_master
# ubuntu_14.04_64_001_ajenti
# ubuntu_14.04_64_001_vesta
#
# Commands:
# hubot vscale set auth <apitoken> - Set vscale credentials (get token from https://vscale.io/panel/settings/tokens/)
# hubot vscale list - lists all your servers
# hubot vscale describe <serverId> - Describe the server with specified id
# hubot vscale start <serverId> - Start the server with specified id
# hubot vscale stop <serverId> - Stop the server with specified id
# hubot vscale delete <serverId> - Delete the server with specified id
# hubot vscale restart <serverId> - Restart the server with specified id
# hubot vscale run <serverName> - Run new server with specified name
#
# Author:
# Nagaev Maksim
generatePassword = require 'password-generator'
querystring = require 'querystring'
default_location = process.env.HUBOT_VSCALE_LOCATION || "spb0"
default_rplan = process.env.HUBOT_VSCALE_RPLAN || "small"
default_os = process.env.HUBOT_VSCALE_OS || "ubuntu_16.04_64_001_master"
api_url = "https://api.vscale.io/v1"
createSignedRequest = (url, msg) ->
user_id = msg.envelope.user.id
token = msg.robot.brain.data.users[user_id].vscale_auth
if !token
msg.send "Please set auth token first"
return false
req = msg.robot.http(url)
req.header('X-Token', token)
req.header('Accept', 'application/json')
req.header('Content-Type', 'application/json;charset=UTF-8')
return req
vscaleRun = (msg) ->
serverName = querystring.escape msg.match[1]
password = generatePassword()
req = createSignedRequest("#{api_url}/scalets", msg)
if req == false
return
dataObj = {
"make_from": default_os,
"rplan": default_rplan,
"do_start": true,
"name": serverName,
"password": password,
"location": default_location
}
req.post(JSON.stringify(dataObj)) (err, res, body) ->
if err
msg.send "Vscale says: #{err}"
else
try
response = ""
content = JSON.parse(body)
response += "Server created:\n"
response += "Please save password: #{password}\n\n"
response += "Status: #{content.status}\n"
response += "Rplan: #{content.rplan}\n"
response += "Locked: #{content.locked}\n"
response += "Name: #{content.name}\n"
response += "PublicIP: #{content.public_address.address}\n"
response += "Hostname: #{content.hostname}\n"
response += "Locations: #{content.locations}\n"
response += "Active: #{content.active}\n"
response += "Made From: #{content.made_from}\n"
msg.send response
catch error
msg.send error
vscaleAction = (msg, action) ->
ctid = querystring.escape msg.match[1]
safeAction = querystring.escape action
req = createSignedRequest("#{api_url}/scalets/"+ctid+"/"+safeAction, msg)
if req == false
return
data = JSON.stringify({
id: ctid
})
req.patch(data) (err, res, body) ->
if err
msg.send "Vscale says: #{err}"
else
try
content = JSON.parse(body)
response = action+" successful"
msg.send response
catch error
msg.send error
vscaleDelete = (msg) ->
ctid = querystring.escape msg.match[1]
req = createSignedRequest("#{api_url}/scalets/"+ctid, msg)
if req == false
return
req.delete() (err, res, body) ->
if err
msg.send "Vscale says: #{err}"
else
try
content = JSON.parse(body)
response = "Server deleted"
msg.send response
catch error
msg.send error
vscaleDescribe = (msg) ->
ctid = querystring.escape msg.match[1]
req = createSignedRequest("#{api_url}/scalets/"+ctid, msg)
if req == false
return
req.get() (err, res, body) ->
if err
msg.send "Vscale says: #{err}"
else
response = ""
try
content = JSON.parse(body)
response += "Status: #{content.status}\n"
response += "Rplan: #{content.rplan}\n"
response += "Locked: #{content.locked}\n"
response += "Name: #{content.name}\n"
response += "PublicIP: #{content.public_address.address}\n"
response += "Hostname: #{content.hostname}\n"
response += "Locations: #{content.locations}\n"
response += "Active: #{content.active}\n"
response += "Made From: #{content.made_from}\n"
msg.send response
catch error
msg.send error
vscaleAuth = (msg) ->
user_id = msg.envelope.user.id
credentials = msg.match[1].trim()
msg.robot.brain.data.users[user_id].vscale_auth = credentials
msg.send "Saved vscale token for #{user_id}"
vscaleList = (msg) ->
req = createSignedRequest("#{api_url}/scalets", msg)
if req == false
return
req.get() (err, res, body) ->
response = ""
response += "Name-------ID---------IP-------Status-------Plan\n"
if err
msg.send "Vscale says: #{err}"
else
try
content = JSON.parse(body)
for server in content
response += "#{server.name}-------#{server.ctid}---------#{server.public_address.address}-------#{server.status}-------#{server.rplan}\n"
msg.send response
catch error
msg.send error
printWelcome = () ->
process.stdout.write("\n"+
" .. \n"+
" .';::::;'. \n"+
" .;dkxdl::::::::,.. \n"+
" .,lxOOOOOko,..,:::::::;'. \n"+
" .cxOOOOOOkl, .';:::::::;' \n"+
" ;OOOOOxc. .';:::::. \n"+
" ;OOOOd ;::::. \n"+
" ;OOOOo ;::::. ====================================================\n"+
" ;OOOOo ;::::. == Vscale.io management initialized, see ==\n"+
" ;OOOOo ;::::. == scripts/hubot-vscale.coffee for documentation ==\n"+
" ;OOOOo ;::::. ====================================================\n"+
" ;OOOOd. ,:::::. \n"+
" ;OOOOOOd:. .;okOc::::. \n"+
" ,lkOOOOOOko;. .:xOOOOOc::,. \n"+
" .:dkOOOOOOxclkOOOOOOko'. \n"+
" 'cxOOOOOOOOOOxc' \n"+
" .;lkOOkl;. \n"+
" .. \n")
module.exports = (robot) ->
# Used for debug purposes
# proxy = require 'proxy-agent'
# process.env.NODE_TLS_REJECT_UNAUTHORIZED = "0";
# robot.globalHttpOptions.httpAgent = proxy('http://127.0.0.1:8888', false)
# robot.globalHttpOptions.httpsAgent = proxy('http://127.0.0.1:8888', true)
printWelcome()
robot.respond /vs(?:cale)? list( (.+))?/i, (msg) ->
vscaleList(msg)
robot.respond /vs(?:cale)? describe (\d+)/i, (msg) ->
vscaleDescribe(msg)
robot.respond /vs(?:cale)? restart (\d+)/i, (msg) ->
vscaleAction(msg, "restart")
robot.respond /vs(?:cale)? start (\d+)/i, (msg) ->
vscaleAction(msg, "start")
robot.respond /vs(?:cale)? stop (\d+)/i, (msg) ->
vscaleAction(msg, "stop")
robot.respond /vs(?:cale)? delete (\d+)/i, (msg) ->
vscaleDelete(msg)
robot.respond /vs(?:cale)? set auth (.*)/i, (msg) ->
vscaleAuth(msg)
robot.respond /vs(?:cale)? run ([\w-_.]*)/i, (msg) ->
vscaleRun(msg) | 160069 | # Description:
# Interact with vscale API using token
#
# Dependencies:
# None
#
# Configuration:
# HUBOT_VSCALE_LOCATION - datacenter location spb0 or msk0
# HUBOT_VSCALE_RPLAN - type of instance small or medium or large or huge or monster
# HUBOT_VSCALE_OS - OS to use
# debian_8.1_64_001_master
# centos_7.1_64_001_master
# ubuntu_14.04_64_002_master
# centos_6.7_64_001_master
# debian_7_64_001_master
# opensuse_13.2_64_001_preseed
# fedora_23_64_001_master
# centos_7.2_64_001_master
# ubuntu_16.04_64_001_master
# ubuntu_14.04_64_001_ajenti
# ubuntu_14.04_64_001_vesta
# debian_8.1_64_001_master
# ubuntu_14.04_64_002_master
# centos_7.1_64_001_master
# debian_7_64_001_master
# opensuse_13.2_64_001_preseed
# centos_6.7_64_001_master
# fedora_23_64_001_master
# centos_7.2_64_001_master
# ubuntu_16.04_64_001_master
# ubuntu_14.04_64_001_ajenti
# ubuntu_14.04_64_001_vesta
#
# Commands:
# hubot vscale set auth <apitoken> - Set vscale credentials (get token from https://vscale.io/panel/settings/tokens/)
# hubot vscale list - lists all your servers
# hubot vscale describe <serverId> - Describe the server with specified id
# hubot vscale start <serverId> - Start the server with specified id
# hubot vscale stop <serverId> - Stop the server with specified id
# hubot vscale delete <serverId> - Delete the server with specified id
# hubot vscale restart <serverId> - Restart the server with specified id
# hubot vscale run <serverName> - Run new server with specified name
#
# Author:
# <NAME>
generatePassword = require 'password-generator'
querystring = require 'querystring'
default_location = process.env.HUBOT_VSCALE_LOCATION || "spb0"
default_rplan = process.env.HUBOT_VSCALE_RPLAN || "small"
default_os = process.env.HUBOT_VSCALE_OS || "ubuntu_16.04_64_001_master"
api_url = "https://api.vscale.io/v1"
createSignedRequest = (url, msg) ->
user_id = msg.envelope.user.id
token = msg.robot.brain.data.users[user_id].vscale_auth
if !token
msg.send "Please set auth token first"
return false
req = msg.robot.http(url)
req.header('X-Token', token)
req.header('Accept', 'application/json')
req.header('Content-Type', 'application/json;charset=UTF-8')
return req
vscaleRun = (msg) ->
serverName = querystring.escape msg.match[1]
password = generatePassword()
req = createSignedRequest("#{api_url}/scalets", msg)
if req == false
return
dataObj = {
"make_from": default_os,
"rplan": default_rplan,
"do_start": true,
"name": serverName,
"password": <PASSWORD>,
"location": default_location
}
req.post(JSON.stringify(dataObj)) (err, res, body) ->
if err
msg.send "Vscale says: #{err}"
else
try
response = ""
content = JSON.parse(body)
response += "Server created:\n"
response += "Please save password: #{password}\n\n"
response += "Status: #{content.status}\n"
response += "Rplan: #{content.rplan}\n"
response += "Locked: #{content.locked}\n"
response += "Name: #{content.name}\n"
response += "PublicIP: #{content.public_address.address}\n"
response += "Hostname: #{content.hostname}\n"
response += "Locations: #{content.locations}\n"
response += "Active: #{content.active}\n"
response += "Made From: #{content.made_from}\n"
msg.send response
catch error
msg.send error
vscaleAction = (msg, action) ->
ctid = querystring.escape msg.match[1]
safeAction = querystring.escape action
req = createSignedRequest("#{api_url}/scalets/"+ctid+"/"+safeAction, msg)
if req == false
return
data = JSON.stringify({
id: ctid
})
req.patch(data) (err, res, body) ->
if err
msg.send "Vscale says: #{err}"
else
try
content = JSON.parse(body)
response = action+" successful"
msg.send response
catch error
msg.send error
vscaleDelete = (msg) ->
ctid = querystring.escape msg.match[1]
req = createSignedRequest("#{api_url}/scalets/"+ctid, msg)
if req == false
return
req.delete() (err, res, body) ->
if err
msg.send "Vscale says: #{err}"
else
try
content = JSON.parse(body)
response = "Server deleted"
msg.send response
catch error
msg.send error
vscaleDescribe = (msg) ->
ctid = querystring.escape msg.match[1]
req = createSignedRequest("#{api_url}/scalets/"+ctid, msg)
if req == false
return
req.get() (err, res, body) ->
if err
msg.send "Vscale says: #{err}"
else
response = ""
try
content = JSON.parse(body)
response += "Status: #{content.status}\n"
response += "Rplan: #{content.rplan}\n"
response += "Locked: #{content.locked}\n"
response += "Name: #{content.name}\n"
response += "PublicIP: #{content.public_address.address}\n"
response += "Hostname: #{content.hostname}\n"
response += "Locations: #{content.locations}\n"
response += "Active: #{content.active}\n"
response += "Made From: #{content.made_from}\n"
msg.send response
catch error
msg.send error
vscaleAuth = (msg) ->
user_id = msg.envelope.user.id
credentials = msg.match[1].trim()
msg.robot.brain.data.users[user_id].vscale_auth = credentials
msg.send "Saved vscale token for #{user_id}"
vscaleList = (msg) ->
req = createSignedRequest("#{api_url}/scalets", msg)
if req == false
return
req.get() (err, res, body) ->
response = ""
response += "Name-------ID---------IP-------Status-------Plan\n"
if err
msg.send "Vscale says: #{err}"
else
try
content = JSON.parse(body)
for server in content
response += "#{server.name}-------#{server.ctid}---------#{server.public_address.address}-------#{server.status}-------#{server.rplan}\n"
msg.send response
catch error
msg.send error
printWelcome = () ->
process.stdout.write("\n"+
" .. \n"+
" .';::::;'. \n"+
" .;dkxdl::::::::,.. \n"+
" .,lxOOOOOko,..,:::::::;'. \n"+
" .cxOOOOOOkl, .';:::::::;' \n"+
" ;OOOOOxc. .';:::::. \n"+
" ;OOOOd ;::::. \n"+
" ;OOOOo ;::::. ====================================================\n"+
" ;OOOOo ;::::. == Vscale.io management initialized, see ==\n"+
" ;OOOOo ;::::. == scripts/hubot-vscale.coffee for documentation ==\n"+
" ;OOOOo ;::::. ====================================================\n"+
" ;OOOOd. ,:::::. \n"+
" ;OOOOOOd:. .;okOc::::. \n"+
" ,lkOOOOOOko;. .:xOOOOOc::,. \n"+
" .:dkOOOOOOxclkOOOOOOko'. \n"+
" 'cxOOOOOOOOOOxc' \n"+
" .;lkOOkl;. \n"+
" .. \n")
module.exports = (robot) ->
# Used for debug purposes
# proxy = require 'proxy-agent'
# process.env.NODE_TLS_REJECT_UNAUTHORIZED = "0";
# robot.globalHttpOptions.httpAgent = proxy('http://127.0.0.1:8888', false)
# robot.globalHttpOptions.httpsAgent = proxy('http://127.0.0.1:8888', true)
printWelcome()
robot.respond /vs(?:cale)? list( (.+))?/i, (msg) ->
vscaleList(msg)
robot.respond /vs(?:cale)? describe (\d+)/i, (msg) ->
vscaleDescribe(msg)
robot.respond /vs(?:cale)? restart (\d+)/i, (msg) ->
vscaleAction(msg, "restart")
robot.respond /vs(?:cale)? start (\d+)/i, (msg) ->
vscaleAction(msg, "start")
robot.respond /vs(?:cale)? stop (\d+)/i, (msg) ->
vscaleAction(msg, "stop")
robot.respond /vs(?:cale)? delete (\d+)/i, (msg) ->
vscaleDelete(msg)
robot.respond /vs(?:cale)? set auth (.*)/i, (msg) ->
vscaleAuth(msg)
robot.respond /vs(?:cale)? run ([\w-_.]*)/i, (msg) ->
vscaleRun(msg) | true | # Description:
# Interact with vscale API using token
#
# Dependencies:
# None
#
# Configuration:
# HUBOT_VSCALE_LOCATION - datacenter location spb0 or msk0
# HUBOT_VSCALE_RPLAN - type of instance small or medium or large or huge or monster
# HUBOT_VSCALE_OS - OS to use
# debian_8.1_64_001_master
# centos_7.1_64_001_master
# ubuntu_14.04_64_002_master
# centos_6.7_64_001_master
# debian_7_64_001_master
# opensuse_13.2_64_001_preseed
# fedora_23_64_001_master
# centos_7.2_64_001_master
# ubuntu_16.04_64_001_master
# ubuntu_14.04_64_001_ajenti
# ubuntu_14.04_64_001_vesta
# debian_8.1_64_001_master
# ubuntu_14.04_64_002_master
# centos_7.1_64_001_master
# debian_7_64_001_master
# opensuse_13.2_64_001_preseed
# centos_6.7_64_001_master
# fedora_23_64_001_master
# centos_7.2_64_001_master
# ubuntu_16.04_64_001_master
# ubuntu_14.04_64_001_ajenti
# ubuntu_14.04_64_001_vesta
#
# Commands:
# hubot vscale set auth <apitoken> - Set vscale credentials (get token from https://vscale.io/panel/settings/tokens/)
# hubot vscale list - lists all your servers
# hubot vscale describe <serverId> - Describe the server with specified id
# hubot vscale start <serverId> - Start the server with specified id
# hubot vscale stop <serverId> - Stop the server with specified id
# hubot vscale delete <serverId> - Delete the server with specified id
# hubot vscale restart <serverId> - Restart the server with specified id
# hubot vscale run <serverName> - Run new server with specified name
#
# Author:
# PI:NAME:<NAME>END_PI
generatePassword = require 'password-generator'
querystring = require 'querystring'
default_location = process.env.HUBOT_VSCALE_LOCATION || "spb0"
default_rplan = process.env.HUBOT_VSCALE_RPLAN || "small"
default_os = process.env.HUBOT_VSCALE_OS || "ubuntu_16.04_64_001_master"
api_url = "https://api.vscale.io/v1"
createSignedRequest = (url, msg) ->
user_id = msg.envelope.user.id
token = msg.robot.brain.data.users[user_id].vscale_auth
if !token
msg.send "Please set auth token first"
return false
req = msg.robot.http(url)
req.header('X-Token', token)
req.header('Accept', 'application/json')
req.header('Content-Type', 'application/json;charset=UTF-8')
return req
vscaleRun = (msg) ->
serverName = querystring.escape msg.match[1]
password = generatePassword()
req = createSignedRequest("#{api_url}/scalets", msg)
if req == false
return
dataObj = {
"make_from": default_os,
"rplan": default_rplan,
"do_start": true,
"name": serverName,
"password": PI:PASSWORD:<PASSWORD>END_PI,
"location": default_location
}
req.post(JSON.stringify(dataObj)) (err, res, body) ->
if err
msg.send "Vscale says: #{err}"
else
try
response = ""
content = JSON.parse(body)
response += "Server created:\n"
response += "Please save password: #{password}\n\n"
response += "Status: #{content.status}\n"
response += "Rplan: #{content.rplan}\n"
response += "Locked: #{content.locked}\n"
response += "Name: #{content.name}\n"
response += "PublicIP: #{content.public_address.address}\n"
response += "Hostname: #{content.hostname}\n"
response += "Locations: #{content.locations}\n"
response += "Active: #{content.active}\n"
response += "Made From: #{content.made_from}\n"
msg.send response
catch error
msg.send error
vscaleAction = (msg, action) ->
ctid = querystring.escape msg.match[1]
safeAction = querystring.escape action
req = createSignedRequest("#{api_url}/scalets/"+ctid+"/"+safeAction, msg)
if req == false
return
data = JSON.stringify({
id: ctid
})
req.patch(data) (err, res, body) ->
if err
msg.send "Vscale says: #{err}"
else
try
content = JSON.parse(body)
response = action+" successful"
msg.send response
catch error
msg.send error
vscaleDelete = (msg) ->
ctid = querystring.escape msg.match[1]
req = createSignedRequest("#{api_url}/scalets/"+ctid, msg)
if req == false
return
req.delete() (err, res, body) ->
if err
msg.send "Vscale says: #{err}"
else
try
content = JSON.parse(body)
response = "Server deleted"
msg.send response
catch error
msg.send error
vscaleDescribe = (msg) ->
ctid = querystring.escape msg.match[1]
req = createSignedRequest("#{api_url}/scalets/"+ctid, msg)
if req == false
return
req.get() (err, res, body) ->
if err
msg.send "Vscale says: #{err}"
else
response = ""
try
content = JSON.parse(body)
response += "Status: #{content.status}\n"
response += "Rplan: #{content.rplan}\n"
response += "Locked: #{content.locked}\n"
response += "Name: #{content.name}\n"
response += "PublicIP: #{content.public_address.address}\n"
response += "Hostname: #{content.hostname}\n"
response += "Locations: #{content.locations}\n"
response += "Active: #{content.active}\n"
response += "Made From: #{content.made_from}\n"
msg.send response
catch error
msg.send error
vscaleAuth = (msg) ->
user_id = msg.envelope.user.id
credentials = msg.match[1].trim()
msg.robot.brain.data.users[user_id].vscale_auth = credentials
msg.send "Saved vscale token for #{user_id}"
vscaleList = (msg) ->
req = createSignedRequest("#{api_url}/scalets", msg)
if req == false
return
req.get() (err, res, body) ->
response = ""
response += "Name-------ID---------IP-------Status-------Plan\n"
if err
msg.send "Vscale says: #{err}"
else
try
content = JSON.parse(body)
for server in content
response += "#{server.name}-------#{server.ctid}---------#{server.public_address.address}-------#{server.status}-------#{server.rplan}\n"
msg.send response
catch error
msg.send error
printWelcome = () ->
process.stdout.write("\n"+
" .. \n"+
" .';::::;'. \n"+
" .;dkxdl::::::::,.. \n"+
" .,lxOOOOOko,..,:::::::;'. \n"+
" .cxOOOOOOkl, .';:::::::;' \n"+
" ;OOOOOxc. .';:::::. \n"+
" ;OOOOd ;::::. \n"+
" ;OOOOo ;::::. ====================================================\n"+
" ;OOOOo ;::::. == Vscale.io management initialized, see ==\n"+
" ;OOOOo ;::::. == scripts/hubot-vscale.coffee for documentation ==\n"+
" ;OOOOo ;::::. ====================================================\n"+
" ;OOOOd. ,:::::. \n"+
" ;OOOOOOd:. .;okOc::::. \n"+
" ,lkOOOOOOko;. .:xOOOOOc::,. \n"+
" .:dkOOOOOOxclkOOOOOOko'. \n"+
" 'cxOOOOOOOOOOxc' \n"+
" .;lkOOkl;. \n"+
" .. \n")
module.exports = (robot) ->
# Used for debug purposes
# proxy = require 'proxy-agent'
# process.env.NODE_TLS_REJECT_UNAUTHORIZED = "0";
# robot.globalHttpOptions.httpAgent = proxy('http://127.0.0.1:8888', false)
# robot.globalHttpOptions.httpsAgent = proxy('http://127.0.0.1:8888', true)
printWelcome()
robot.respond /vs(?:cale)? list( (.+))?/i, (msg) ->
vscaleList(msg)
robot.respond /vs(?:cale)? describe (\d+)/i, (msg) ->
vscaleDescribe(msg)
robot.respond /vs(?:cale)? restart (\d+)/i, (msg) ->
vscaleAction(msg, "restart")
robot.respond /vs(?:cale)? start (\d+)/i, (msg) ->
vscaleAction(msg, "start")
robot.respond /vs(?:cale)? stop (\d+)/i, (msg) ->
vscaleAction(msg, "stop")
robot.respond /vs(?:cale)? delete (\d+)/i, (msg) ->
vscaleDelete(msg)
robot.respond /vs(?:cale)? set auth (.*)/i, (msg) ->
vscaleAuth(msg)
robot.respond /vs(?:cale)? run ([\w-_.]*)/i, (msg) ->
vscaleRun(msg) |
[
{
"context": "walk the $cheagle today\"\n with $cheagle = \"Coco\"\n say \"walking\"\n\n when \"stroll\"\n ",
"end": 4883,
"score": 0.9878063797950745,
"start": 4879,
"tag": "NAME",
"value": "Coco"
},
{
"context": " \"walk the $poodle today\"\n with $poo... | tests/features/intents.spec.coffee | fboerncke/litexa | 1 | ###
# ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
# Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
# SPDX-License-Identifier: Apache-2.0
# ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
###
assert = require 'assert'
debug = require('debug')('litexa')
{runSkill, expectParse, expectFailParse} = require '../preamble.coffee'
describe 'supports intent statements', ->
it 'runs the intents integration test', ->
runSkill 'intents'
it 'does not allow wrong indentation of intents', ->
expectParse """
waitForResponse
say "hello"
when AMAZON.YesIntent
say "yes"
when AMAZON.NoIntent
say "no"
say "post processor"
"""
expectFailParse """
waitForResponse
when AMAZON.YesIntent
say "hello"
when AMAZON.NoIntent
say "hi"
"""
expectFailParse """
waitForResponse
say "howdy."
when AMAZON.YesIntent
say "hello"
if 3 > 1
when AMAZON.YesIntent
say "meow"
when AMAZON.NoIntent
say "bork"
"""
expectFailParse """
waitForResponse
say "howdy."
when "nested level"
say "hello"
if 3 > 1
if 4 > 3
say "one more nested level"
when "another level"
say "meow"
when AMAZON.NoIntent
say "bork"
"""
expectFailParse """
someState
when "hi"
say "hi"
when "imposter state"
say "hello"
"""
it 'does not allow duplicate intents that are not event/name-specific in the same state', ->
expectParse """
someState
when "yes"
say "wahoo"
when AMAZON.NoIntent
say "aww"
anotherState
when "yes"
or "yea"
say "wahoo"
when AMAZON.NoIntent
say "aww"
"""
expectFailParse """
someState
when "meow"
or "mreow"
say "meow meow"
when AMAZON.NoIntent
say "aww"
when "Meow"
say "meow meow"
""", "redefine intent `MEOW`"
expectFailParse """
waitForAnswer
when "Yea"
or "yes"
say "You said"
when AMAZON.NoIntent
say "You said"
when AMAZON.NoIntent
say "no."
""", "redefine intent `AMAZON.NoIntent`"
expectParse """
global
when AMAZON.StopIntent
say "Goodbye."
END
when AMAZON.CancelIntent
say "Bye."
END
when AMAZON.StartOverIntent
say "No."
END
"""
expectFailParse """
global
when AMAZON.StopIntent
say "Goodbye."
END
when AMAZON.CancelIntent
say "Bye"
when AMAZON.StartOverIntent
say "No."
END
when AMAZON.CancelIntent
say "bye."
END
"""
expectFailParse """
global
when AMAZON.YesIntent
say "Goodbye."
END
when AMAZON.CancelIntent
say "Bye"
when AMAZON.StartOverIntent
say "No."
END
when AMAZON.YesIntent
say "bye."
END
"""
it 'does allow multiple name-specific intents in the same state', ->
expectParse """
global
when Connections.Response
say "Connections.Response"
when Connections.Response "Buy"
say "upsell Connections.Response"
when Connections.Response "Cancel"
say "upsell Connections.Response"
when Connections.Response "Upsell"
say "upsell Connections.Response"
when Connections.Response "Unknown"
say "unknown Connections.Response"
"""
it 'does not allow plain utterances being reused in different intent handlers', ->
# when <utterance> + or <utterance>
expectFailParse """
global
when "walk"
or "run"
say "moving"
when "run"
say "running"
""", "utterance 'run' in the intent handler for 'RUN' was already handled by the intent
'WALK'"
# or <utterance> + or <utterance>
expectFailParse """
global
when "walk"
or "run"
say "moving"
when "sprint"
or "run"
say "running"
""", "utterance 'run' in the intent handler for 'SPRINT' was already handled by the intent
'WALK'"
# or <Utterance> + or <uTTERANCE>
expectFailParse """
global
when "walk"
or "Run"
say "moving"
when "sprint"
or "rUN"
say "running"
""", "utterance 'run' in the intent handler for 'SPRINT' was already handled by the intent
'WALK'"
it 'does allow reusing otherwise identical utterances with different slot variables', ->
expectParse """
global
when "wander"
or "walk the $cheagle today"
with $cheagle = "Coco"
say "walking"
when "stroll"
or "walk the $poodle today"
with $poodle = "Princess"
say "running"
"""
it 'does not allow reusing identical utterances with identical slot variables', ->
expectFailParse """
global
when "wander"
or "walk the $Dog today"
with $Dog = "Lassie"
say "walking"
when "stroll"
or "walk the $Dog today"
say "running"
""", "utterance 'walk the $Dog today' in the intent handler for 'STROLL' was already handled by
the intent 'WANDER'"
| 18843 | ###
# ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
# Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
# SPDX-License-Identifier: Apache-2.0
# ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
###
assert = require 'assert'
debug = require('debug')('litexa')
{runSkill, expectParse, expectFailParse} = require '../preamble.coffee'
describe 'supports intent statements', ->
it 'runs the intents integration test', ->
runSkill 'intents'
it 'does not allow wrong indentation of intents', ->
expectParse """
waitForResponse
say "hello"
when AMAZON.YesIntent
say "yes"
when AMAZON.NoIntent
say "no"
say "post processor"
"""
expectFailParse """
waitForResponse
when AMAZON.YesIntent
say "hello"
when AMAZON.NoIntent
say "hi"
"""
expectFailParse """
waitForResponse
say "howdy."
when AMAZON.YesIntent
say "hello"
if 3 > 1
when AMAZON.YesIntent
say "meow"
when AMAZON.NoIntent
say "bork"
"""
expectFailParse """
waitForResponse
say "howdy."
when "nested level"
say "hello"
if 3 > 1
if 4 > 3
say "one more nested level"
when "another level"
say "meow"
when AMAZON.NoIntent
say "bork"
"""
expectFailParse """
someState
when "hi"
say "hi"
when "imposter state"
say "hello"
"""
it 'does not allow duplicate intents that are not event/name-specific in the same state', ->
expectParse """
someState
when "yes"
say "wahoo"
when AMAZON.NoIntent
say "aww"
anotherState
when "yes"
or "yea"
say "wahoo"
when AMAZON.NoIntent
say "aww"
"""
expectFailParse """
someState
when "meow"
or "mreow"
say "meow meow"
when AMAZON.NoIntent
say "aww"
when "Meow"
say "meow meow"
""", "redefine intent `MEOW`"
expectFailParse """
waitForAnswer
when "Yea"
or "yes"
say "You said"
when AMAZON.NoIntent
say "You said"
when AMAZON.NoIntent
say "no."
""", "redefine intent `AMAZON.NoIntent`"
expectParse """
global
when AMAZON.StopIntent
say "Goodbye."
END
when AMAZON.CancelIntent
say "Bye."
END
when AMAZON.StartOverIntent
say "No."
END
"""
expectFailParse """
global
when AMAZON.StopIntent
say "Goodbye."
END
when AMAZON.CancelIntent
say "Bye"
when AMAZON.StartOverIntent
say "No."
END
when AMAZON.CancelIntent
say "bye."
END
"""
expectFailParse """
global
when AMAZON.YesIntent
say "Goodbye."
END
when AMAZON.CancelIntent
say "Bye"
when AMAZON.StartOverIntent
say "No."
END
when AMAZON.YesIntent
say "bye."
END
"""
it 'does allow multiple name-specific intents in the same state', ->
expectParse """
global
when Connections.Response
say "Connections.Response"
when Connections.Response "Buy"
say "upsell Connections.Response"
when Connections.Response "Cancel"
say "upsell Connections.Response"
when Connections.Response "Upsell"
say "upsell Connections.Response"
when Connections.Response "Unknown"
say "unknown Connections.Response"
"""
it 'does not allow plain utterances being reused in different intent handlers', ->
# when <utterance> + or <utterance>
expectFailParse """
global
when "walk"
or "run"
say "moving"
when "run"
say "running"
""", "utterance 'run' in the intent handler for 'RUN' was already handled by the intent
'WALK'"
# or <utterance> + or <utterance>
expectFailParse """
global
when "walk"
or "run"
say "moving"
when "sprint"
or "run"
say "running"
""", "utterance 'run' in the intent handler for 'SPRINT' was already handled by the intent
'WALK'"
# or <Utterance> + or <uTTERANCE>
expectFailParse """
global
when "walk"
or "Run"
say "moving"
when "sprint"
or "rUN"
say "running"
""", "utterance 'run' in the intent handler for 'SPRINT' was already handled by the intent
'WALK'"
it 'does allow reusing otherwise identical utterances with different slot variables', ->
expectParse """
global
when "wander"
or "walk the $cheagle today"
with $cheagle = "<NAME>"
say "walking"
when "stroll"
or "walk the $poodle today"
with $poodle = "<NAME>"
say "running"
"""
it 'does not allow reusing identical utterances with identical slot variables', ->
expectFailParse """
global
when "wander"
or "walk the $Dog today"
with $Dog = "<NAME>"
say "walking"
when "stroll"
or "walk the $Dog today"
say "running"
""", "utterance 'walk the $Dog today' in the intent handler for 'STROLL' was already handled by
the intent 'WANDER'"
| true | ###
# ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
# Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
# SPDX-License-Identifier: Apache-2.0
# ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
###
assert = require 'assert'
debug = require('debug')('litexa')
{runSkill, expectParse, expectFailParse} = require '../preamble.coffee'
describe 'supports intent statements', ->
it 'runs the intents integration test', ->
runSkill 'intents'
it 'does not allow wrong indentation of intents', ->
expectParse """
waitForResponse
say "hello"
when AMAZON.YesIntent
say "yes"
when AMAZON.NoIntent
say "no"
say "post processor"
"""
expectFailParse """
waitForResponse
when AMAZON.YesIntent
say "hello"
when AMAZON.NoIntent
say "hi"
"""
expectFailParse """
waitForResponse
say "howdy."
when AMAZON.YesIntent
say "hello"
if 3 > 1
when AMAZON.YesIntent
say "meow"
when AMAZON.NoIntent
say "bork"
"""
expectFailParse """
waitForResponse
say "howdy."
when "nested level"
say "hello"
if 3 > 1
if 4 > 3
say "one more nested level"
when "another level"
say "meow"
when AMAZON.NoIntent
say "bork"
"""
expectFailParse """
someState
when "hi"
say "hi"
when "imposter state"
say "hello"
"""
it 'does not allow duplicate intents that are not event/name-specific in the same state', ->
expectParse """
someState
when "yes"
say "wahoo"
when AMAZON.NoIntent
say "aww"
anotherState
when "yes"
or "yea"
say "wahoo"
when AMAZON.NoIntent
say "aww"
"""
expectFailParse """
someState
when "meow"
or "mreow"
say "meow meow"
when AMAZON.NoIntent
say "aww"
when "Meow"
say "meow meow"
""", "redefine intent `MEOW`"
expectFailParse """
waitForAnswer
when "Yea"
or "yes"
say "You said"
when AMAZON.NoIntent
say "You said"
when AMAZON.NoIntent
say "no."
""", "redefine intent `AMAZON.NoIntent`"
expectParse """
global
when AMAZON.StopIntent
say "Goodbye."
END
when AMAZON.CancelIntent
say "Bye."
END
when AMAZON.StartOverIntent
say "No."
END
"""
expectFailParse """
global
when AMAZON.StopIntent
say "Goodbye."
END
when AMAZON.CancelIntent
say "Bye"
when AMAZON.StartOverIntent
say "No."
END
when AMAZON.CancelIntent
say "bye."
END
"""
expectFailParse """
global
when AMAZON.YesIntent
say "Goodbye."
END
when AMAZON.CancelIntent
say "Bye"
when AMAZON.StartOverIntent
say "No."
END
when AMAZON.YesIntent
say "bye."
END
"""
it 'does allow multiple name-specific intents in the same state', ->
expectParse """
global
when Connections.Response
say "Connections.Response"
when Connections.Response "Buy"
say "upsell Connections.Response"
when Connections.Response "Cancel"
say "upsell Connections.Response"
when Connections.Response "Upsell"
say "upsell Connections.Response"
when Connections.Response "Unknown"
say "unknown Connections.Response"
"""
it 'does not allow plain utterances being reused in different intent handlers', ->
# when <utterance> + or <utterance>
expectFailParse """
global
when "walk"
or "run"
say "moving"
when "run"
say "running"
""", "utterance 'run' in the intent handler for 'RUN' was already handled by the intent
'WALK'"
# or <utterance> + or <utterance>
expectFailParse """
global
when "walk"
or "run"
say "moving"
when "sprint"
or "run"
say "running"
""", "utterance 'run' in the intent handler for 'SPRINT' was already handled by the intent
'WALK'"
# or <Utterance> + or <uTTERANCE>
expectFailParse """
global
when "walk"
or "Run"
say "moving"
when "sprint"
or "rUN"
say "running"
""", "utterance 'run' in the intent handler for 'SPRINT' was already handled by the intent
'WALK'"
it 'does allow reusing otherwise identical utterances with different slot variables', ->
expectParse """
global
when "wander"
or "walk the $cheagle today"
with $cheagle = "PI:NAME:<NAME>END_PI"
say "walking"
when "stroll"
or "walk the $poodle today"
with $poodle = "PI:NAME:<NAME>END_PI"
say "running"
"""
it 'does not allow reusing identical utterances with identical slot variables', ->
expectFailParse """
global
when "wander"
or "walk the $Dog today"
with $Dog = "PI:NAME:<NAME>END_PI"
say "walking"
when "stroll"
or "walk the $Dog today"
say "running"
""", "utterance 'walk the $Dog today' in the intent handler for 'STROLL' was already handled by
the intent 'WANDER'"
|
[
{
"context": "て答えるやつ\n#\n# Configuration:\n# None\n#\n# Author:\n# knjcode <knjcode@gmail.com>\n\nmodule.exports = (robot) ->\n",
"end": 97,
"score": 0.9996605515480042,
"start": 90,
"tag": "USERNAME",
"value": "knjcode"
},
{
"context": " Configuration:\n# None\n#\n# Author:\n#... | src/hubot-yamero-otsukare.coffee | prototype-cafe/hubot-yamero-otsukare | 0 | # Description
# 『やめろ』って聞こえたら『お疲れ様です』って答えるやつ
#
# Configuration:
# None
#
# Author:
# knjcode <knjcode@gmail.com>
module.exports = (robot) ->
robot.hear /やめろ/, (msg) ->
msg.send "お疲れ様です"
| 126651 | # Description
# 『やめろ』って聞こえたら『お疲れ様です』って答えるやつ
#
# Configuration:
# None
#
# Author:
# knjcode <<EMAIL>>
module.exports = (robot) ->
robot.hear /やめろ/, (msg) ->
msg.send "お疲れ様です"
| true | # Description
# 『やめろ』って聞こえたら『お疲れ様です』って答えるやつ
#
# Configuration:
# None
#
# Author:
# knjcode <PI:EMAIL:<EMAIL>END_PI>
module.exports = (robot) ->
robot.hear /やめろ/, (msg) ->
msg.send "お疲れ様です"
|
[
{
"context": "sets }) ->\n \n @keymode = derive.keymodes.WEB_PW unless @keymode?\n @fixed = {} unless @fixed?\n ",
"end": 2412,
"score": 0.7485347986221313,
"start": 2410,
"tag": "KEY",
"value": "PW"
}
] | src/lib/engine.iced | maxtaco/oneshallpass | 120 |
util = require './util.iced'
{config} = require './config.iced'
derive = require './derive.iced'
{Client,Record} = require './client.iced'
##=======================================================================
class Cache
constructor : () ->
@_c = {}
timeout : () -> config.timeouts.cache
clear : () -> @_c = {}
lookup : (k) ->
obj = @_c[k] = {} unless (obj = @_c[k])?
return obj
##=======================================================================
input_trim = (x) ->
if x and x?
x = x.replace /\s+/g, " "
rxx = /^(\s*)(.*?)(\s*)$/
m = x.match rxx
m[2]
else ""
input_clean = (x) ->
ret = input_trim(x).toLowerCase()
ret = null if ret.length is 0
ret
input_clean_preserve_case = (x) ->
ret = input_trim(x)
ret = null if ret.length is 0
ret
##=======================================================================
class VersionObj
constructor : (args)->
@make : (v, args) ->
switch v
when 1 then new Version1Obj args
when 2 then new Version3Obj args
when 3 then new Version3Obj args
else null
##-----------------------------------------------------------------------
class Version1Obj extends VersionObj
constructor : (@_args) ->
clean_passphrase : (pp) ->
# Replace any interior whitepsace with just a single
# plain space, but otherwise, interior whitespaces count
# as part of the passphrase
ret = input_trim(pp).replace /\s+/g, " "
ret = null unless ret.length
ret
key_fields : -> [ 'email', 'passphrase', 'host', 'generation', 'security_bits' ]
key_deriver : (i) -> new derive.V1 i
version : () -> 1
##-----------------------------------------------------------------------
class Version3Obj extends VersionObj
constructor : (@_args) ->
clean_passphrase : (pp) ->
# strip out all spaces!
ret = pp.replace /\s/g, ""
ret = null unless ret.length
ret
key_fields : -> [ 'email', 'passphrase', 'security_bits' ]
key_deriver : (i) -> new derive.V3 i
version : () -> 3
##=======================================================================
copy_dict = (input) ->
ret = {}
(ret[k] = v for k,v of input)
ret
##=======================================================================
class Input
constructor: ({ @engine, @keymode, @fixed, presets }) ->
@keymode = derive.keymodes.WEB_PW unless @keymode?
@fixed = {} unless @fixed?
# Three fields: (1) if required to be non-empty; (2) if used in server push
# and (3), a validator
SELECT = [ true, true, null ]
@_template =
host : [ true, false , (x) -> input_clean x ]
passphrase : [ true, false, (x) => @_clean_passphrase x ]
email : [ true, true, (x) -> input_clean x ]
notes : [ false, true, (x) -> input_clean_preserve_case x ]
algo_version : SELECT
length : SELECT
security_bits : SELECT
num_symbols : SELECT
generation : SELECT
no_timeout : [ false, false, null ]
@_defaults = config.input.defaults
@_values = if presets? then copy_dict presets else {}
#-----------------------------------------
fork : (keymode, fixed) ->
out = new Input { @engine, keymode, fixed, presets : @_values }
out
#-----------------------------------------
get_version_obj : () -> VersionObj.make @get 'algo_version'
timeout : () -> config.timeouts.input
clear : ->
#-----------------------------------------
# Serialize the input and assign it a unique ID
unique_id : (version_obj) ->
version_obj = @get_version_obj() unless version_obj
parts = [ version_obj.version(), @keymode ]
fields = (@get f for f in version_obj.key_fields())
all = parts.concat fields
all.join ";"
#-----------------------------------------
derive_key : (cb) ->
# the compute hook is called once per iteration in the inner loop
# of key derivation. It can be used to stop the derivation (by returning
# false) and also to report progress to the UI
vo = @get_version_obj()
uid = @unique_id vo
compute_hook = (i, tot) =>
if (ret = (uid is @unique_id(vo))) and i % 10 is 0
@engine.on_compute_step @keymode, i, tot
ret
co = @engine._cache.lookup uid
await (vo.key_deriver @).run co, compute_hook, defer res
@engine.on_compute_done @keymode, res if res
cb res
#-----------------------------------------
get : (k) ->
if (f = @fixed[k])? then f
else if not (v = @_values[k])? then @_defaults[k]
else if (cleaner = @_template[k]?[2])? then cleaner v
else v
#-----------------------------------------
set : (k, val) ->
@_values[k] = val
#-----------------------------------------
_clean_passphrase : (pp) -> @get_version_obj().clean_passphrase pp
#-----------------------------------------
is_ready : () ->
for k,row of @_template when row[0]
return false if not (v = @get k)?
true
#-----------------------------------------
to_record : () ->
d = {}
for k, row of @_template when row[1]
v = @get k
if row[0] and not v? then return null
d[k] = v
if (host = @get 'host') then new Record host, d
else null
##=======================================================================
class Timer
#-----------------------------------------
constructor : (@_obj) ->
@_id = null
#-----------------------------------------
force : () ->
@_obj.clear()
@clear()
#-----------------------------------------
set : () ->
now = util.unix_time()
hook = () =>
@_obj.clear()
@_id = null
# Only set the timer if we haven't set it recently....
@clear()
@_id = setTimeout hook, @_obj.timeout()*1000
#-----------------------------------------
clear : () ->
if @_id?
clearTimeout @_id
@_id = null
##=======================================================================
class Timers
constructor : (@_eng) ->
@_timers = (new Timer o for o in [ @_eng, @_eng._cache ] )
@_active = false
poke : () -> @start() if @_active
start : () ->
@_active = true
(t.set() for t in @_timers)
stop : () ->
@_active = false
(t.clear() for t in @_timers)
toggle : (b) ->
if b and not @_active then @start()
else if not b and @_active then @stop()
force : () ->
(t.force() for t in @_timers)
##=======================================================================
exports.Engine = class Engine
##-----------------------------------------
constructor : (opts) ->
{ presets } = opts
{ @on_compute_step, @on_compute_done, @on_timeout } = opts.hooks
@_cache = new Cache
@_inp = new Input { engine : @, presets }
@_client = new Client @, (opts.net or {})
@_timers = new Timers @
@_timers.start() unless @_inp.get 'no_timeout'
##-----------------------------------------
client : () -> @_client
clear : () ->
@client().clear()
@on_timeout()
timeout : () -> config.timeouts.document
##-----------------------------------------
poke : () -> @_timers.poke()
##-----------------------------------------
set : (k,v) ->
@_timers.toggle(not v) if k is 'no_timeout'
@_inp.set k, v
@maybe_run()
##-----------------------------------------
get : (k) -> @_inp.get k
##-----------------------------------------
run : () ->
await @_inp.derive_key defer dk
##-----------------------------------------
maybe_run : () -> @run() if @_inp.is_ready()
##-----------------------------------------
fork_input : (mode, fixed) -> @_inp.fork mode, fixed
get_input : () -> @_inp
##-----------------------------------------
is_logged_in : () -> @client().is_logged_in()
login : (cb) -> @client().login(cb)
signup : (cb) -> @client().signup(cb)
push : (cb) -> @client().push(cb)
remove : (cb) -> @client().remove(cb)
get_stored_records : () -> @client().get_stored_records()
get_record : (h) -> @client().get_record h
##-----------------------------------------
gen_backup_dump : (dumping_ground) ->
dat = if @is_logged_in() then JSON.stringify @get_stored_records()
else "Please log in first!"
dumping_ground.html dat
##-----------------------------------------
logout: (cb) ->
# Do this one first, since force will logout a second time, but
# ignore the return status code
await @client().logout defer res
@_timers.force()
cb res
##=======================================================================
| 124875 |
util = require './util.iced'
{config} = require './config.iced'
derive = require './derive.iced'
{Client,Record} = require './client.iced'
##=======================================================================
class Cache
constructor : () ->
@_c = {}
timeout : () -> config.timeouts.cache
clear : () -> @_c = {}
lookup : (k) ->
obj = @_c[k] = {} unless (obj = @_c[k])?
return obj
##=======================================================================
input_trim = (x) ->
if x and x?
x = x.replace /\s+/g, " "
rxx = /^(\s*)(.*?)(\s*)$/
m = x.match rxx
m[2]
else ""
input_clean = (x) ->
ret = input_trim(x).toLowerCase()
ret = null if ret.length is 0
ret
input_clean_preserve_case = (x) ->
ret = input_trim(x)
ret = null if ret.length is 0
ret
##=======================================================================
class VersionObj
constructor : (args)->
@make : (v, args) ->
switch v
when 1 then new Version1Obj args
when 2 then new Version3Obj args
when 3 then new Version3Obj args
else null
##-----------------------------------------------------------------------
class Version1Obj extends VersionObj
constructor : (@_args) ->
clean_passphrase : (pp) ->
# Replace any interior whitepsace with just a single
# plain space, but otherwise, interior whitespaces count
# as part of the passphrase
ret = input_trim(pp).replace /\s+/g, " "
ret = null unless ret.length
ret
key_fields : -> [ 'email', 'passphrase', 'host', 'generation', 'security_bits' ]
key_deriver : (i) -> new derive.V1 i
version : () -> 1
##-----------------------------------------------------------------------
class Version3Obj extends VersionObj
constructor : (@_args) ->
clean_passphrase : (pp) ->
# strip out all spaces!
ret = pp.replace /\s/g, ""
ret = null unless ret.length
ret
key_fields : -> [ 'email', 'passphrase', 'security_bits' ]
key_deriver : (i) -> new derive.V3 i
version : () -> 3
##=======================================================================
copy_dict = (input) ->
ret = {}
(ret[k] = v for k,v of input)
ret
##=======================================================================
class Input
constructor: ({ @engine, @keymode, @fixed, presets }) ->
@keymode = derive.keymodes.WEB_<KEY> unless @keymode?
@fixed = {} unless @fixed?
# Three fields: (1) if required to be non-empty; (2) if used in server push
# and (3), a validator
SELECT = [ true, true, null ]
@_template =
host : [ true, false , (x) -> input_clean x ]
passphrase : [ true, false, (x) => @_clean_passphrase x ]
email : [ true, true, (x) -> input_clean x ]
notes : [ false, true, (x) -> input_clean_preserve_case x ]
algo_version : SELECT
length : SELECT
security_bits : SELECT
num_symbols : SELECT
generation : SELECT
no_timeout : [ false, false, null ]
@_defaults = config.input.defaults
@_values = if presets? then copy_dict presets else {}
#-----------------------------------------
fork : (keymode, fixed) ->
out = new Input { @engine, keymode, fixed, presets : @_values }
out
#-----------------------------------------
get_version_obj : () -> VersionObj.make @get 'algo_version'
timeout : () -> config.timeouts.input
clear : ->
#-----------------------------------------
# Serialize the input and assign it a unique ID
unique_id : (version_obj) ->
version_obj = @get_version_obj() unless version_obj
parts = [ version_obj.version(), @keymode ]
fields = (@get f for f in version_obj.key_fields())
all = parts.concat fields
all.join ";"
#-----------------------------------------
derive_key : (cb) ->
# the compute hook is called once per iteration in the inner loop
# of key derivation. It can be used to stop the derivation (by returning
# false) and also to report progress to the UI
vo = @get_version_obj()
uid = @unique_id vo
compute_hook = (i, tot) =>
if (ret = (uid is @unique_id(vo))) and i % 10 is 0
@engine.on_compute_step @keymode, i, tot
ret
co = @engine._cache.lookup uid
await (vo.key_deriver @).run co, compute_hook, defer res
@engine.on_compute_done @keymode, res if res
cb res
#-----------------------------------------
get : (k) ->
if (f = @fixed[k])? then f
else if not (v = @_values[k])? then @_defaults[k]
else if (cleaner = @_template[k]?[2])? then cleaner v
else v
#-----------------------------------------
set : (k, val) ->
@_values[k] = val
#-----------------------------------------
_clean_passphrase : (pp) -> @get_version_obj().clean_passphrase pp
#-----------------------------------------
is_ready : () ->
for k,row of @_template when row[0]
return false if not (v = @get k)?
true
#-----------------------------------------
to_record : () ->
d = {}
for k, row of @_template when row[1]
v = @get k
if row[0] and not v? then return null
d[k] = v
if (host = @get 'host') then new Record host, d
else null
##=======================================================================
class Timer
#-----------------------------------------
constructor : (@_obj) ->
@_id = null
#-----------------------------------------
force : () ->
@_obj.clear()
@clear()
#-----------------------------------------
set : () ->
now = util.unix_time()
hook = () =>
@_obj.clear()
@_id = null
# Only set the timer if we haven't set it recently....
@clear()
@_id = setTimeout hook, @_obj.timeout()*1000
#-----------------------------------------
clear : () ->
if @_id?
clearTimeout @_id
@_id = null
##=======================================================================
class Timers
constructor : (@_eng) ->
@_timers = (new Timer o for o in [ @_eng, @_eng._cache ] )
@_active = false
poke : () -> @start() if @_active
start : () ->
@_active = true
(t.set() for t in @_timers)
stop : () ->
@_active = false
(t.clear() for t in @_timers)
toggle : (b) ->
if b and not @_active then @start()
else if not b and @_active then @stop()
force : () ->
(t.force() for t in @_timers)
##=======================================================================
exports.Engine = class Engine
##-----------------------------------------
constructor : (opts) ->
{ presets } = opts
{ @on_compute_step, @on_compute_done, @on_timeout } = opts.hooks
@_cache = new Cache
@_inp = new Input { engine : @, presets }
@_client = new Client @, (opts.net or {})
@_timers = new Timers @
@_timers.start() unless @_inp.get 'no_timeout'
##-----------------------------------------
client : () -> @_client
clear : () ->
@client().clear()
@on_timeout()
timeout : () -> config.timeouts.document
##-----------------------------------------
poke : () -> @_timers.poke()
##-----------------------------------------
set : (k,v) ->
@_timers.toggle(not v) if k is 'no_timeout'
@_inp.set k, v
@maybe_run()
##-----------------------------------------
get : (k) -> @_inp.get k
##-----------------------------------------
run : () ->
await @_inp.derive_key defer dk
##-----------------------------------------
maybe_run : () -> @run() if @_inp.is_ready()
##-----------------------------------------
fork_input : (mode, fixed) -> @_inp.fork mode, fixed
get_input : () -> @_inp
##-----------------------------------------
is_logged_in : () -> @client().is_logged_in()
login : (cb) -> @client().login(cb)
signup : (cb) -> @client().signup(cb)
push : (cb) -> @client().push(cb)
remove : (cb) -> @client().remove(cb)
get_stored_records : () -> @client().get_stored_records()
get_record : (h) -> @client().get_record h
##-----------------------------------------
gen_backup_dump : (dumping_ground) ->
dat = if @is_logged_in() then JSON.stringify @get_stored_records()
else "Please log in first!"
dumping_ground.html dat
##-----------------------------------------
logout: (cb) ->
# Do this one first, since force will logout a second time, but
# ignore the return status code
await @client().logout defer res
@_timers.force()
cb res
##=======================================================================
| true |
util = require './util.iced'
{config} = require './config.iced'
derive = require './derive.iced'
{Client,Record} = require './client.iced'
##=======================================================================
class Cache
constructor : () ->
@_c = {}
timeout : () -> config.timeouts.cache
clear : () -> @_c = {}
lookup : (k) ->
obj = @_c[k] = {} unless (obj = @_c[k])?
return obj
##=======================================================================
input_trim = (x) ->
if x and x?
x = x.replace /\s+/g, " "
rxx = /^(\s*)(.*?)(\s*)$/
m = x.match rxx
m[2]
else ""
input_clean = (x) ->
ret = input_trim(x).toLowerCase()
ret = null if ret.length is 0
ret
input_clean_preserve_case = (x) ->
ret = input_trim(x)
ret = null if ret.length is 0
ret
##=======================================================================
class VersionObj
constructor : (args)->
@make : (v, args) ->
switch v
when 1 then new Version1Obj args
when 2 then new Version3Obj args
when 3 then new Version3Obj args
else null
##-----------------------------------------------------------------------
class Version1Obj extends VersionObj
constructor : (@_args) ->
clean_passphrase : (pp) ->
# Replace any interior whitepsace with just a single
# plain space, but otherwise, interior whitespaces count
# as part of the passphrase
ret = input_trim(pp).replace /\s+/g, " "
ret = null unless ret.length
ret
key_fields : -> [ 'email', 'passphrase', 'host', 'generation', 'security_bits' ]
key_deriver : (i) -> new derive.V1 i
version : () -> 1
##-----------------------------------------------------------------------
class Version3Obj extends VersionObj
constructor : (@_args) ->
clean_passphrase : (pp) ->
# strip out all spaces!
ret = pp.replace /\s/g, ""
ret = null unless ret.length
ret
key_fields : -> [ 'email', 'passphrase', 'security_bits' ]
key_deriver : (i) -> new derive.V3 i
version : () -> 3
##=======================================================================
copy_dict = (input) ->
ret = {}
(ret[k] = v for k,v of input)
ret
##=======================================================================
class Input
constructor: ({ @engine, @keymode, @fixed, presets }) ->
@keymode = derive.keymodes.WEB_PI:KEY:<KEY>END_PI unless @keymode?
@fixed = {} unless @fixed?
# Three fields: (1) if required to be non-empty; (2) if used in server push
# and (3), a validator
SELECT = [ true, true, null ]
@_template =
host : [ true, false , (x) -> input_clean x ]
passphrase : [ true, false, (x) => @_clean_passphrase x ]
email : [ true, true, (x) -> input_clean x ]
notes : [ false, true, (x) -> input_clean_preserve_case x ]
algo_version : SELECT
length : SELECT
security_bits : SELECT
num_symbols : SELECT
generation : SELECT
no_timeout : [ false, false, null ]
@_defaults = config.input.defaults
@_values = if presets? then copy_dict presets else {}
#-----------------------------------------
fork : (keymode, fixed) ->
out = new Input { @engine, keymode, fixed, presets : @_values }
out
#-----------------------------------------
get_version_obj : () -> VersionObj.make @get 'algo_version'
timeout : () -> config.timeouts.input
clear : ->
#-----------------------------------------
# Serialize the input and assign it a unique ID
unique_id : (version_obj) ->
version_obj = @get_version_obj() unless version_obj
parts = [ version_obj.version(), @keymode ]
fields = (@get f for f in version_obj.key_fields())
all = parts.concat fields
all.join ";"
#-----------------------------------------
derive_key : (cb) ->
# the compute hook is called once per iteration in the inner loop
# of key derivation. It can be used to stop the derivation (by returning
# false) and also to report progress to the UI
vo = @get_version_obj()
uid = @unique_id vo
compute_hook = (i, tot) =>
if (ret = (uid is @unique_id(vo))) and i % 10 is 0
@engine.on_compute_step @keymode, i, tot
ret
co = @engine._cache.lookup uid
await (vo.key_deriver @).run co, compute_hook, defer res
@engine.on_compute_done @keymode, res if res
cb res
#-----------------------------------------
get : (k) ->
if (f = @fixed[k])? then f
else if not (v = @_values[k])? then @_defaults[k]
else if (cleaner = @_template[k]?[2])? then cleaner v
else v
#-----------------------------------------
set : (k, val) ->
@_values[k] = val
#-----------------------------------------
_clean_passphrase : (pp) -> @get_version_obj().clean_passphrase pp
#-----------------------------------------
is_ready : () ->
for k,row of @_template when row[0]
return false if not (v = @get k)?
true
#-----------------------------------------
to_record : () ->
d = {}
for k, row of @_template when row[1]
v = @get k
if row[0] and not v? then return null
d[k] = v
if (host = @get 'host') then new Record host, d
else null
##=======================================================================
class Timer
#-----------------------------------------
constructor : (@_obj) ->
@_id = null
#-----------------------------------------
force : () ->
@_obj.clear()
@clear()
#-----------------------------------------
set : () ->
now = util.unix_time()
hook = () =>
@_obj.clear()
@_id = null
# Only set the timer if we haven't set it recently....
@clear()
@_id = setTimeout hook, @_obj.timeout()*1000
#-----------------------------------------
clear : () ->
if @_id?
clearTimeout @_id
@_id = null
##=======================================================================
class Timers
constructor : (@_eng) ->
@_timers = (new Timer o for o in [ @_eng, @_eng._cache ] )
@_active = false
poke : () -> @start() if @_active
start : () ->
@_active = true
(t.set() for t in @_timers)
stop : () ->
@_active = false
(t.clear() for t in @_timers)
toggle : (b) ->
if b and not @_active then @start()
else if not b and @_active then @stop()
force : () ->
(t.force() for t in @_timers)
##=======================================================================
exports.Engine = class Engine
##-----------------------------------------
constructor : (opts) ->
{ presets } = opts
{ @on_compute_step, @on_compute_done, @on_timeout } = opts.hooks
@_cache = new Cache
@_inp = new Input { engine : @, presets }
@_client = new Client @, (opts.net or {})
@_timers = new Timers @
@_timers.start() unless @_inp.get 'no_timeout'
##-----------------------------------------
client : () -> @_client
clear : () ->
@client().clear()
@on_timeout()
timeout : () -> config.timeouts.document
##-----------------------------------------
poke : () -> @_timers.poke()
##-----------------------------------------
set : (k,v) ->
@_timers.toggle(not v) if k is 'no_timeout'
@_inp.set k, v
@maybe_run()
##-----------------------------------------
get : (k) -> @_inp.get k
##-----------------------------------------
run : () ->
await @_inp.derive_key defer dk
##-----------------------------------------
maybe_run : () -> @run() if @_inp.is_ready()
##-----------------------------------------
fork_input : (mode, fixed) -> @_inp.fork mode, fixed
get_input : () -> @_inp
##-----------------------------------------
is_logged_in : () -> @client().is_logged_in()
login : (cb) -> @client().login(cb)
signup : (cb) -> @client().signup(cb)
push : (cb) -> @client().push(cb)
remove : (cb) -> @client().remove(cb)
get_stored_records : () -> @client().get_stored_records()
get_record : (h) -> @client().get_record h
##-----------------------------------------
gen_backup_dump : (dumping_ground) ->
dat = if @is_logged_in() then JSON.stringify @get_stored_records()
else "Please log in first!"
dumping_ground.html dat
##-----------------------------------------
logout: (cb) ->
# Do this one first, since force will logout a second time, but
# ignore the return status code
await @client().logout defer res
@_timers.force()
cb res
##=======================================================================
|
[
{
"context": "# @fileoverview Validate JSX indentation\n# @author Yannick Croissant\n# This rule has been ported and modified from esl",
"end": 73,
"score": 0.9998957514762878,
"start": 56,
"tag": "NAME",
"value": "Yannick Croissant"
},
{
"context": "ted and modified from eslint and ... | src/rules/jsx-indent.coffee | danielbayley/eslint-plugin-coffee | 21 | ###*
# @fileoverview Validate JSX indentation
# @author Yannick Croissant
# This rule has been ported and modified from eslint and nodeca.
# @author Vitaly Puzrin
# @author Gyandeep Singh
# @copyright 2015 Vitaly Puzrin. All rights reserved.
# @copyright 2015 Gyandeep Singh. All rights reserved.
Copyright (C) 2014 by Vitaly Puzrin
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the 'Software'), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
###
'use strict'
astUtil = require '../util/react/ast'
docsUrl = require 'eslint-plugin-react/lib/util/docsUrl'
# ------------------------------------------------------------------------------
# Rule Definition
# ------------------------------------------------------------------------------
module.exports =
meta:
docs:
description: 'Validate JSX indentation'
category: 'Stylistic Issues'
recommended: no
url: docsUrl 'jsx-indent'
fixable: 'whitespace'
schema: [oneOf: [{enum: ['tab']}, {type: 'integer'}]]
create: (context) ->
MESSAGE =
'Expected indentation of {{needed}} {{type}} {{characters}} but found {{gotten}}.'
extraColumnStart = 0
indentType = 'space'
indentSize = 4
sourceCode = context.getSourceCode()
if context.options.length
if context.options[0] is 'tab'
indentSize = 1
indentType = 'tab'
else if typeof context.options[0] is 'number'
indentSize = context.options[0]
indentType = 'space'
indentChar = if indentType is 'space' then ' ' else '\t'
###*
# Responsible for fixing the indentation issue fix
# @param {ASTNode} node Node violating the indent rule
# @param {Number} needed Expected indentation character count
# @returns {Function} function to be executed by the fixer
# @private
###
getFixerFunction = (node, needed) -> (fixer) ->
indent = Array(needed + 1).join indentChar
fixer.replaceTextRange(
[node.range[0] - node.loc.start.column, node.range[0]]
indent
)
###*
# Reports a given indent violation and properly pluralizes the message
# @param {ASTNode} node Node violating the indent rule
# @param {Number} needed Expected indentation character count
# @param {Number} gotten Indentation character count in the actual node/code
# @param {Object} loc Error line and column location
###
report = (node, needed, gotten, loc) ->
msgContext = {
needed
type: indentType
characters: if needed is 1 then 'character' else 'characters'
gotten
}
if loc
context.report {
node
loc
message: MESSAGE
data: msgContext
fix: getFixerFunction node, needed
}
else
context.report {
node
message: MESSAGE
data: msgContext
fix: getFixerFunction node, needed
}
###*
# Get node indent
# @param {ASTNode} node Node to examine
# @param {Boolean} byLastLine get indent of node's last line
# @param {Boolean} excludeCommas skip comma on start of line
# @return {Number} Indent
###
getNodeIndent = (node, byLastLine, excludeCommas) ->
byLastLine or= no
excludeCommas or= no
src = sourceCode.getText node, node.loc.start.column + extraColumnStart
lines = src.split '\n'
if byLastLine then src = lines[lines.length - 1] else src = lines[0]
skip = if excludeCommas then ',' else ''
if indentType is 'space'
regExp = new RegExp "^[ #{skip}]+"
else
regExp = new RegExp "^[\t#{skip}]+"
indent = regExp.exec src
if indent then indent[0].length else 0
# ###*
# # Check if the node is the right member of a logical expression
# # @param {ASTNode} node The node to check
# # @return {Boolean} true if its the case, false if not
# ###
# isRightInLogicalExp = (node) ->
# node.parent?.parent?.type is 'LogicalExpression' and
# node.parent.parent.right is node.parent
# ###*
# # Check if the node is the alternate member of a conditional expression
# # @param {ASTNode} node The node to check
# # @return {Boolean} true if its the case, false if not
# ###
# isAlternateInConditionalExp = (node) ->
# node.parent?.parent?.type is 'ConditionalExpression' and
# node.parent.parent.alternate is node.parent and
# sourceCode.getTokenBefore(node).value isnt '('
###*
# Check indent for nodes list
# @param {ASTNode} node The node to check
# @param {Number} indent needed indent
# @param {Boolean} excludeCommas skip comma on start of line
###
checkNodesIndent = (node, indent, excludeCommas) ->
nodeIndent = getNodeIndent node, no, excludeCommas
# isCorrectRightInLogicalExp =
# isRightInLogicalExp(node) and nodeIndent - indent is indentSize
# isCorrectAlternateInCondExp =
# isAlternateInConditionalExp(node) and nodeIndent - indent is 0
if (
nodeIndent isnt indent and astUtil.isNodeFirstInLine context, node # and # not isCorrectRightInLogicalExp and
)
# not isCorrectAlternateInCondExp
report node, indent, nodeIndent
handleOpeningElement = (node) ->
prevToken = sourceCode.getTokenBefore node
return unless prevToken
# Use the parent in a list or an array
if (
prevToken.type is 'JSXText' # or # (prevToken.type is 'Punctuator' and prevToken.value is ',')
)
prevToken = sourceCode.getNodeByRangeIndex prevToken.range[0]
prevToken =
if prevToken.type in ['Literal', 'JSXText']
prevToken.parent
else
prevToken
# Use the first non-punctuator token in a conditional expression
# else if prevToken.type is 'Punctuator' and prevToken.value is ':'
# prevToken = sourceCode.getTokenBefore prevToken
# while prevToken.type is 'Punctuator' and prevToken.value isnt '/'
# prevToken = sourceCode.getTokenBefore prevToken
# prevToken = sourceCode.getNodeByRangeIndex prevToken.range[0]
# while (
# prevToken.parent and
# prevToken.parent.type isnt 'ConditionalExpression'
# )
# prevToken = prevToken.parent
prevToken = prevToken.expression if (
prevToken.type is 'JSXExpressionContainer'
)
parentElementIndent = getNodeIndent prevToken
indent =
if (
node.parent.parent.type is 'ExpressionStatement' and
node.parent.parent.parent.type is 'BlockStatement' and
node.parent.parent.parent.body.length > 1 and
node.parent.parent isnt node.parent.parent.parent.body[0]
)
0
else if (
prevToken.loc.start.line is node.loc.start.line # or
)
# isRightInLogicalExp(node) or
# isAlternateInConditionalExp node
0
else
indentSize
checkNodesIndent node, parentElementIndent + indent
handleClosingElement = (node) ->
return unless node.parent
peerElementIndent = getNodeIndent(
node.parent.openingElement or node.parent.openingFragment
)
checkNodesIndent node, peerElementIndent
JSXOpeningElement: handleOpeningElement
JSXOpeningFragment: handleOpeningElement
JSXClosingElement: handleClosingElement
JSXClosingFragment: handleClosingElement
JSXExpressionContainer: (node) ->
return unless node.parent
parentNodeIndent = getNodeIndent node.parent
checkNodesIndent node, parentNodeIndent + indentSize
| 10509 | ###*
# @fileoverview Validate JSX indentation
# @author <NAME>
# This rule has been ported and modified from eslint and nodeca.
# @author <NAME>
# @author <NAME>
# @copyright 2015 <NAME>. All rights reserved.
# @copyright 2015 <NAME>. All rights reserved.
Copyright (C) 2014 by <NAME>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the 'Software'), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
###
'use strict'
astUtil = require '../util/react/ast'
docsUrl = require 'eslint-plugin-react/lib/util/docsUrl'
# ------------------------------------------------------------------------------
# Rule Definition
# ------------------------------------------------------------------------------
module.exports =
meta:
docs:
description: 'Validate JSX indentation'
category: 'Stylistic Issues'
recommended: no
url: docsUrl 'jsx-indent'
fixable: 'whitespace'
schema: [oneOf: [{enum: ['tab']}, {type: 'integer'}]]
create: (context) ->
MESSAGE =
'Expected indentation of {{needed}} {{type}} {{characters}} but found {{gotten}}.'
extraColumnStart = 0
indentType = 'space'
indentSize = 4
sourceCode = context.getSourceCode()
if context.options.length
if context.options[0] is 'tab'
indentSize = 1
indentType = 'tab'
else if typeof context.options[0] is 'number'
indentSize = context.options[0]
indentType = 'space'
indentChar = if indentType is 'space' then ' ' else '\t'
###*
# Responsible for fixing the indentation issue fix
# @param {ASTNode} node Node violating the indent rule
# @param {Number} needed Expected indentation character count
# @returns {Function} function to be executed by the fixer
# @private
###
getFixerFunction = (node, needed) -> (fixer) ->
indent = Array(needed + 1).join indentChar
fixer.replaceTextRange(
[node.range[0] - node.loc.start.column, node.range[0]]
indent
)
###*
# Reports a given indent violation and properly pluralizes the message
# @param {ASTNode} node Node violating the indent rule
# @param {Number} needed Expected indentation character count
# @param {Number} gotten Indentation character count in the actual node/code
# @param {Object} loc Error line and column location
###
report = (node, needed, gotten, loc) ->
msgContext = {
needed
type: indentType
characters: if needed is 1 then 'character' else 'characters'
gotten
}
if loc
context.report {
node
loc
message: MESSAGE
data: msgContext
fix: getFixerFunction node, needed
}
else
context.report {
node
message: MESSAGE
data: msgContext
fix: getFixerFunction node, needed
}
###*
# Get node indent
# @param {ASTNode} node Node to examine
# @param {Boolean} byLastLine get indent of node's last line
# @param {Boolean} excludeCommas skip comma on start of line
# @return {Number} Indent
###
getNodeIndent = (node, byLastLine, excludeCommas) ->
byLastLine or= no
excludeCommas or= no
src = sourceCode.getText node, node.loc.start.column + extraColumnStart
lines = src.split '\n'
if byLastLine then src = lines[lines.length - 1] else src = lines[0]
skip = if excludeCommas then ',' else ''
if indentType is 'space'
regExp = new RegExp "^[ #{skip}]+"
else
regExp = new RegExp "^[\t#{skip}]+"
indent = regExp.exec src
if indent then indent[0].length else 0
# ###*
# # Check if the node is the right member of a logical expression
# # @param {ASTNode} node The node to check
# # @return {Boolean} true if its the case, false if not
# ###
# isRightInLogicalExp = (node) ->
# node.parent?.parent?.type is 'LogicalExpression' and
# node.parent.parent.right is node.parent
# ###*
# # Check if the node is the alternate member of a conditional expression
# # @param {ASTNode} node The node to check
# # @return {Boolean} true if its the case, false if not
# ###
# isAlternateInConditionalExp = (node) ->
# node.parent?.parent?.type is 'ConditionalExpression' and
# node.parent.parent.alternate is node.parent and
# sourceCode.getTokenBefore(node).value isnt '('
###*
# Check indent for nodes list
# @param {ASTNode} node The node to check
# @param {Number} indent needed indent
# @param {Boolean} excludeCommas skip comma on start of line
###
checkNodesIndent = (node, indent, excludeCommas) ->
nodeIndent = getNodeIndent node, no, excludeCommas
# isCorrectRightInLogicalExp =
# isRightInLogicalExp(node) and nodeIndent - indent is indentSize
# isCorrectAlternateInCondExp =
# isAlternateInConditionalExp(node) and nodeIndent - indent is 0
if (
nodeIndent isnt indent and astUtil.isNodeFirstInLine context, node # and # not isCorrectRightInLogicalExp and
)
# not isCorrectAlternateInCondExp
report node, indent, nodeIndent
handleOpeningElement = (node) ->
prevToken = sourceCode.getTokenBefore node
return unless prevToken
# Use the parent in a list or an array
if (
prevToken.type is 'JSXText' # or # (prevToken.type is 'Punctuator' and prevToken.value is ',')
)
prevToken = sourceCode.getNodeByRangeIndex prevToken.range[0]
prevToken =
if prevToken.type in ['Literal', 'JSXText']
prevToken.parent
else
prevToken
# Use the first non-punctuator token in a conditional expression
# else if prevToken.type is 'Punctuator' and prevToken.value is ':'
# prevToken = sourceCode.getTokenBefore prevToken
# while prevToken.type is 'Punctuator' and prevToken.value isnt '/'
# prevToken = sourceCode.getTokenBefore prevToken
# prevToken = sourceCode.getNodeByRangeIndex prevToken.range[0]
# while (
# prevToken.parent and
# prevToken.parent.type isnt 'ConditionalExpression'
# )
# prevToken = prevToken.parent
prevToken = prevToken.expression if (
prevToken.type is 'JSXExpressionContainer'
)
parentElementIndent = getNodeIndent prevToken
indent =
if (
node.parent.parent.type is 'ExpressionStatement' and
node.parent.parent.parent.type is 'BlockStatement' and
node.parent.parent.parent.body.length > 1 and
node.parent.parent isnt node.parent.parent.parent.body[0]
)
0
else if (
prevToken.loc.start.line is node.loc.start.line # or
)
# isRightInLogicalExp(node) or
# isAlternateInConditionalExp node
0
else
indentSize
checkNodesIndent node, parentElementIndent + indent
handleClosingElement = (node) ->
return unless node.parent
peerElementIndent = getNodeIndent(
node.parent.openingElement or node.parent.openingFragment
)
checkNodesIndent node, peerElementIndent
JSXOpeningElement: handleOpeningElement
JSXOpeningFragment: handleOpeningElement
JSXClosingElement: handleClosingElement
JSXClosingFragment: handleClosingElement
JSXExpressionContainer: (node) ->
return unless node.parent
parentNodeIndent = getNodeIndent node.parent
checkNodesIndent node, parentNodeIndent + indentSize
| true | ###*
# @fileoverview Validate JSX indentation
# @author PI:NAME:<NAME>END_PI
# This rule has been ported and modified from eslint and nodeca.
# @author PI:NAME:<NAME>END_PI
# @author PI:NAME:<NAME>END_PI
# @copyright 2015 PI:NAME:<NAME>END_PI. All rights reserved.
# @copyright 2015 PI:NAME:<NAME>END_PI. All rights reserved.
Copyright (C) 2014 by PI:NAME:<NAME>END_PI
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the 'Software'), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
###
'use strict'
astUtil = require '../util/react/ast'
docsUrl = require 'eslint-plugin-react/lib/util/docsUrl'
# ------------------------------------------------------------------------------
# Rule Definition
# ------------------------------------------------------------------------------
module.exports =
meta:
docs:
description: 'Validate JSX indentation'
category: 'Stylistic Issues'
recommended: no
url: docsUrl 'jsx-indent'
fixable: 'whitespace'
schema: [oneOf: [{enum: ['tab']}, {type: 'integer'}]]
create: (context) ->
MESSAGE =
'Expected indentation of {{needed}} {{type}} {{characters}} but found {{gotten}}.'
extraColumnStart = 0
indentType = 'space'
indentSize = 4
sourceCode = context.getSourceCode()
if context.options.length
if context.options[0] is 'tab'
indentSize = 1
indentType = 'tab'
else if typeof context.options[0] is 'number'
indentSize = context.options[0]
indentType = 'space'
indentChar = if indentType is 'space' then ' ' else '\t'
###*
# Responsible for fixing the indentation issue fix
# @param {ASTNode} node Node violating the indent rule
# @param {Number} needed Expected indentation character count
# @returns {Function} function to be executed by the fixer
# @private
###
getFixerFunction = (node, needed) -> (fixer) ->
indent = Array(needed + 1).join indentChar
fixer.replaceTextRange(
[node.range[0] - node.loc.start.column, node.range[0]]
indent
)
###*
# Reports a given indent violation and properly pluralizes the message
# @param {ASTNode} node Node violating the indent rule
# @param {Number} needed Expected indentation character count
# @param {Number} gotten Indentation character count in the actual node/code
# @param {Object} loc Error line and column location
###
report = (node, needed, gotten, loc) ->
msgContext = {
needed
type: indentType
characters: if needed is 1 then 'character' else 'characters'
gotten
}
if loc
context.report {
node
loc
message: MESSAGE
data: msgContext
fix: getFixerFunction node, needed
}
else
context.report {
node
message: MESSAGE
data: msgContext
fix: getFixerFunction node, needed
}
###*
# Get node indent
# @param {ASTNode} node Node to examine
# @param {Boolean} byLastLine get indent of node's last line
# @param {Boolean} excludeCommas skip comma on start of line
# @return {Number} Indent
###
getNodeIndent = (node, byLastLine, excludeCommas) ->
byLastLine or= no
excludeCommas or= no
src = sourceCode.getText node, node.loc.start.column + extraColumnStart
lines = src.split '\n'
if byLastLine then src = lines[lines.length - 1] else src = lines[0]
skip = if excludeCommas then ',' else ''
if indentType is 'space'
regExp = new RegExp "^[ #{skip}]+"
else
regExp = new RegExp "^[\t#{skip}]+"
indent = regExp.exec src
if indent then indent[0].length else 0
# ###*
# # Check if the node is the right member of a logical expression
# # @param {ASTNode} node The node to check
# # @return {Boolean} true if its the case, false if not
# ###
# isRightInLogicalExp = (node) ->
# node.parent?.parent?.type is 'LogicalExpression' and
# node.parent.parent.right is node.parent
# ###*
# # Check if the node is the alternate member of a conditional expression
# # @param {ASTNode} node The node to check
# # @return {Boolean} true if its the case, false if not
# ###
# isAlternateInConditionalExp = (node) ->
# node.parent?.parent?.type is 'ConditionalExpression' and
# node.parent.parent.alternate is node.parent and
# sourceCode.getTokenBefore(node).value isnt '('
###*
# Check indent for nodes list
# @param {ASTNode} node The node to check
# @param {Number} indent needed indent
# @param {Boolean} excludeCommas skip comma on start of line
###
checkNodesIndent = (node, indent, excludeCommas) ->
nodeIndent = getNodeIndent node, no, excludeCommas
# isCorrectRightInLogicalExp =
# isRightInLogicalExp(node) and nodeIndent - indent is indentSize
# isCorrectAlternateInCondExp =
# isAlternateInConditionalExp(node) and nodeIndent - indent is 0
if (
nodeIndent isnt indent and astUtil.isNodeFirstInLine context, node # and # not isCorrectRightInLogicalExp and
)
# not isCorrectAlternateInCondExp
report node, indent, nodeIndent
handleOpeningElement = (node) ->
prevToken = sourceCode.getTokenBefore node
return unless prevToken
# Use the parent in a list or an array
if (
prevToken.type is 'JSXText' # or # (prevToken.type is 'Punctuator' and prevToken.value is ',')
)
prevToken = sourceCode.getNodeByRangeIndex prevToken.range[0]
prevToken =
if prevToken.type in ['Literal', 'JSXText']
prevToken.parent
else
prevToken
# Use the first non-punctuator token in a conditional expression
# else if prevToken.type is 'Punctuator' and prevToken.value is ':'
# prevToken = sourceCode.getTokenBefore prevToken
# while prevToken.type is 'Punctuator' and prevToken.value isnt '/'
# prevToken = sourceCode.getTokenBefore prevToken
# prevToken = sourceCode.getNodeByRangeIndex prevToken.range[0]
# while (
# prevToken.parent and
# prevToken.parent.type isnt 'ConditionalExpression'
# )
# prevToken = prevToken.parent
prevToken = prevToken.expression if (
prevToken.type is 'JSXExpressionContainer'
)
parentElementIndent = getNodeIndent prevToken
indent =
if (
node.parent.parent.type is 'ExpressionStatement' and
node.parent.parent.parent.type is 'BlockStatement' and
node.parent.parent.parent.body.length > 1 and
node.parent.parent isnt node.parent.parent.parent.body[0]
)
0
else if (
prevToken.loc.start.line is node.loc.start.line # or
)
# isRightInLogicalExp(node) or
# isAlternateInConditionalExp node
0
else
indentSize
checkNodesIndent node, parentElementIndent + indent
handleClosingElement = (node) ->
return unless node.parent
peerElementIndent = getNodeIndent(
node.parent.openingElement or node.parent.openingFragment
)
checkNodesIndent node, peerElementIndent
JSXOpeningElement: handleOpeningElement
JSXOpeningFragment: handleOpeningElement
JSXClosingElement: handleClosingElement
JSXClosingFragment: handleClosingElement
JSXExpressionContainer: (node) ->
return unless node.parent
parentNodeIndent = getNodeIndent node.parent
checkNodesIndent node, parentNodeIndent + indentSize
|
[
{
"context": "e createable', ->\n t = new Task({\n name: 'a name'\n time: new Date()\n opts:\n ",
"end": 244,
"score": 0.8813353180885315,
"start": 243,
"tag": "NAME",
"value": "a"
},
{
"context": " createable', ->\n t = new Task({\n name: 'a name'\n ... | test/task.coffee | ethanmick/future-client | 0 | 'use strict'
should = require('chai').should()
Task = require '../lib/task'
Client = require '../lib/client'
describe 'Task', ->
it 'should exist', ->
should.exist Task
it 'should be createable', ->
t = new Task({
name: 'a name'
time: new Date()
opts:
something: 'ok'
})
t.name.should.equal 'a name'
t.time.should.be.ok
t.opts.should.deep.equal something: 'ok'
it 'should schedule', ->
Client.connect()
t = new Task(name: 'name', time: new Date())
t.schedule()
it 'should go in ms', ->
Task.inMilliseconds(1000)
it 'should have the options', ->
t = Task.inMilliseconds(1000, name: 'derp')
t.name.should.equal 'derp'
it 'should work in seconds', ->
Task.inSeconds(1)
| 192156 | 'use strict'
should = require('chai').should()
Task = require '../lib/task'
Client = require '../lib/client'
describe 'Task', ->
it 'should exist', ->
should.exist Task
it 'should be createable', ->
t = new Task({
name: '<NAME> name'
time: new Date()
opts:
something: 'ok'
})
t.name.should.equal '<NAME>'
t.time.should.be.ok
t.opts.should.deep.equal something: 'ok'
it 'should schedule', ->
Client.connect()
t = new Task(name: 'name', time: new Date())
t.schedule()
it 'should go in ms', ->
Task.inMilliseconds(1000)
it 'should have the options', ->
t = Task.inMilliseconds(1000, name: '<NAME>')
t.name.should.equal '<NAME>'
it 'should work in seconds', ->
Task.inSeconds(1)
| true | 'use strict'
should = require('chai').should()
Task = require '../lib/task'
Client = require '../lib/client'
describe 'Task', ->
it 'should exist', ->
should.exist Task
it 'should be createable', ->
t = new Task({
name: 'PI:NAME:<NAME>END_PI name'
time: new Date()
opts:
something: 'ok'
})
t.name.should.equal 'PI:NAME:<NAME>END_PI'
t.time.should.be.ok
t.opts.should.deep.equal something: 'ok'
it 'should schedule', ->
Client.connect()
t = new Task(name: 'name', time: new Date())
t.schedule()
it 'should go in ms', ->
Task.inMilliseconds(1000)
it 'should have the options', ->
t = Task.inMilliseconds(1000, name: 'PI:NAME:<NAME>END_PI')
t.name.should.equal 'PI:NAME:<NAME>END_PI'
it 'should work in seconds', ->
Task.inSeconds(1)
|
[
{
"context": "##\n * Federated Wiki : Node Server\n *\n * Copyright Ward Cunningham and other contributors\n * Licensed under the MIT ",
"end": 67,
"score": 0.9998788833618164,
"start": 52,
"tag": "NAME",
"value": "Ward Cunningham"
},
{
"context": "nsed under the MIT license.\n * htt... | server/friends.coffee | Strongpool/fedarwiki-security-arweave | 0 | ###
* Federated Wiki : Node Server
*
* Copyright Ward Cunningham and other contributors
* Licensed under the MIT license.
* https://github.com/fedwiki/wiki-node-server/blob/master/LICENSE.txt
###
# **security.coffee**
# Module for Arweave site security.
#### Requires ####
console.log 'friends starting'
crypto = require 'crypto'
fs = require 'fs'
seedrandom = require 'seedrandom'
Arweave = require 'arweave'
# Export a function that generates security handler
# when called with options object.
module.exports = exports = (log, loga, argv) ->
security = {}
#### Private utility methods. ####
user = ''
owner = ''
admin = argv.admin
# save the location of the identity file
idFile = argv.id
arweave = Arweave.init({
host: 'arweave.net'
port: 1984,
protocol: 'https'})
#### Public stuff ####
# Retrieve owner infomation from identity file in status directory
# owner will contain { address: <address> }
security.retrieveOwner = (cb) ->
fs.exists idFile, (exists) ->
if exists
fs.readFile(idFile, (err, data) ->
if err then return cb err
owner = JSON.parse(data)
cb())
else
owner = ''
cb()
# Return the owners name
security.getOwner = getOwner = ->
if !owner.address?
ownerName = ''
else
ownerName = owner.address
ownerName
security.setOwner = setOwner = (id, cb) ->
owner = id
fs.exists idFile, (exists) ->
if !exists
fs.writeFile(idFile, JSON.stringify(id, null, " "), (err) ->
if err then return cb err
console.log "Claiming site for ", id:id
owner = id
cb())
else
cb()
security.getUser = (req) ->
if req.session.address
return req.session.address
else
return ''
security.isAuthorized = (req) ->
try
if req.session.address is owner.address
return true
else
return false
catch error
return false
# Wiki server admin
security.isAdmin = (req) ->
if req.session.address is admin
return true
else
return false
security.login = (updateOwner) ->
(req, res) ->
try
rawTx = req.body.tx
tx = arweave.transactions.fromRaw rawTx
verified = await arweave.transactions.verify tx
address = arweave.utils.bufferTob64Url(
crypto
.createHash('sha256')
.update(arweave.utils.b64UrlToBuffer(rawTx.owner))
.digest())
catch error
console.log 'Failed to verify transaction ', req.hostname, 'error ', error
res.sendStatus(500)
if owner is '' # site is not claimed
if verified
req.session.address = address
id = { address: address }
setOwner id, (err) ->
if err
console.log 'Failed to claim wiki ', req.hostname, 'error ', err
res.sendStatus(500)
updateOwner getOwner
res.json { ownerName: address }
res.end
else
res.sendStatus(401)
else
if verified and owner.address is address
req.session.address = owner.address
res.end()
else
res.sendStatus(401)
console.log 'Arweave returning login'
security.logout = () ->
(req, res) ->
req.session.reset()
res.send('OK')
security.defineRoutes = (app, cors, updateOwner) ->
app.post '/login', cors, security.login(updateOwner)
app.get '/logout', cors, security.logout()
security
| 100040 | ###
* Federated Wiki : Node Server
*
* Copyright <NAME> and other contributors
* Licensed under the MIT license.
* https://github.com/fedwiki/wiki-node-server/blob/master/LICENSE.txt
###
# **security.coffee**
# Module for Arweave site security.
#### Requires ####
console.log 'friends starting'
crypto = require 'crypto'
fs = require 'fs'
seedrandom = require 'seedrandom'
Arweave = require 'arweave'
# Export a function that generates security handler
# when called with options object.
module.exports = exports = (log, loga, argv) ->
security = {}
#### Private utility methods. ####
user = ''
owner = ''
admin = argv.admin
# save the location of the identity file
idFile = argv.id
arweave = Arweave.init({
host: 'arweave.net'
port: 1984,
protocol: 'https'})
#### Public stuff ####
# Retrieve owner infomation from identity file in status directory
# owner will contain { address: <address> }
security.retrieveOwner = (cb) ->
fs.exists idFile, (exists) ->
if exists
fs.readFile(idFile, (err, data) ->
if err then return cb err
owner = JSON.parse(data)
cb())
else
owner = ''
cb()
# Return the owners name
security.getOwner = getOwner = ->
if !owner.address?
ownerName = ''
else
ownerName = owner.address
ownerName
security.setOwner = setOwner = (id, cb) ->
owner = id
fs.exists idFile, (exists) ->
if !exists
fs.writeFile(idFile, JSON.stringify(id, null, " "), (err) ->
if err then return cb err
console.log "Claiming site for ", id:id
owner = id
cb())
else
cb()
security.getUser = (req) ->
if req.session.address
return req.session.address
else
return ''
security.isAuthorized = (req) ->
try
if req.session.address is owner.address
return true
else
return false
catch error
return false
# Wiki server admin
security.isAdmin = (req) ->
if req.session.address is admin
return true
else
return false
security.login = (updateOwner) ->
(req, res) ->
try
rawTx = req.body.tx
tx = arweave.transactions.fromRaw rawTx
verified = await arweave.transactions.verify tx
address = arweave.utils.bufferTob64Url(
crypto
.createHash('sha256')
.update(arweave.utils.b64UrlToBuffer(rawTx.owner))
.digest())
catch error
console.log 'Failed to verify transaction ', req.hostname, 'error ', error
res.sendStatus(500)
if owner is '' # site is not claimed
if verified
req.session.address = address
id = { address: address }
setOwner id, (err) ->
if err
console.log 'Failed to claim wiki ', req.hostname, 'error ', err
res.sendStatus(500)
updateOwner getOwner
res.json { ownerName: address }
res.end
else
res.sendStatus(401)
else
if verified and owner.address is address
req.session.address = owner.address
res.end()
else
res.sendStatus(401)
console.log 'Arweave returning login'
security.logout = () ->
(req, res) ->
req.session.reset()
res.send('OK')
security.defineRoutes = (app, cors, updateOwner) ->
app.post '/login', cors, security.login(updateOwner)
app.get '/logout', cors, security.logout()
security
| true | ###
* Federated Wiki : Node Server
*
* Copyright PI:NAME:<NAME>END_PI and other contributors
* Licensed under the MIT license.
* https://github.com/fedwiki/wiki-node-server/blob/master/LICENSE.txt
###
# **security.coffee**
# Module for Arweave site security.
#### Requires ####
console.log 'friends starting'
crypto = require 'crypto'
fs = require 'fs'
seedrandom = require 'seedrandom'
Arweave = require 'arweave'
# Export a function that generates security handler
# when called with options object.
module.exports = exports = (log, loga, argv) ->
security = {}
#### Private utility methods. ####
user = ''
owner = ''
admin = argv.admin
# save the location of the identity file
idFile = argv.id
arweave = Arweave.init({
host: 'arweave.net'
port: 1984,
protocol: 'https'})
#### Public stuff ####
# Retrieve owner infomation from identity file in status directory
# owner will contain { address: <address> }
security.retrieveOwner = (cb) ->
fs.exists idFile, (exists) ->
if exists
fs.readFile(idFile, (err, data) ->
if err then return cb err
owner = JSON.parse(data)
cb())
else
owner = ''
cb()
# Return the owners name
security.getOwner = getOwner = ->
if !owner.address?
ownerName = ''
else
ownerName = owner.address
ownerName
security.setOwner = setOwner = (id, cb) ->
owner = id
fs.exists idFile, (exists) ->
if !exists
fs.writeFile(idFile, JSON.stringify(id, null, " "), (err) ->
if err then return cb err
console.log "Claiming site for ", id:id
owner = id
cb())
else
cb()
security.getUser = (req) ->
if req.session.address
return req.session.address
else
return ''
security.isAuthorized = (req) ->
try
if req.session.address is owner.address
return true
else
return false
catch error
return false
# Wiki server admin
security.isAdmin = (req) ->
if req.session.address is admin
return true
else
return false
security.login = (updateOwner) ->
(req, res) ->
try
rawTx = req.body.tx
tx = arweave.transactions.fromRaw rawTx
verified = await arweave.transactions.verify tx
address = arweave.utils.bufferTob64Url(
crypto
.createHash('sha256')
.update(arweave.utils.b64UrlToBuffer(rawTx.owner))
.digest())
catch error
console.log 'Failed to verify transaction ', req.hostname, 'error ', error
res.sendStatus(500)
if owner is '' # site is not claimed
if verified
req.session.address = address
id = { address: address }
setOwner id, (err) ->
if err
console.log 'Failed to claim wiki ', req.hostname, 'error ', err
res.sendStatus(500)
updateOwner getOwner
res.json { ownerName: address }
res.end
else
res.sendStatus(401)
else
if verified and owner.address is address
req.session.address = owner.address
res.end()
else
res.sendStatus(401)
console.log 'Arweave returning login'
security.logout = () ->
(req, res) ->
req.session.reset()
res.send('OK')
security.defineRoutes = (app, cors, updateOwner) ->
app.post '/login', cors, security.login(updateOwner)
app.get '/logout', cors, security.logout()
security
|
[
{
"context": "ys.push opt.alias if opt.alias?\n combined_keys[(\"-#{prefix}#{key}\" for key in keys).join ', '] = opt\n key_col_width = min_widt",
"end": 2338,
"score": 0.9611576795578003,
"start": 2311,
"tag": "KEY",
"value": "#{prefix}#{key}\" for key in"
},
{
"context": " comb... | src/option_parser.coffee | angrave/doppio | 2 | #! /usr/bin/env coffee
_ = require '../vendor/underscore/underscore.js'
root = exports ? this.option_parser = {}
options = null
description = null
root.describe = (new_description) ->
options = {}
description = new_description
for k, category of description
category_copy = {}
for opt_name, opt_value of category
if _.isString opt_value
# kind of a hack, to allow for shorthand when we don't need to specify
# the other options
opt_value = category[opt_name] = { description: opt_value }
category_copy[opt_name] = opt_value
if opt_value.alias?
opt_value.aliased_by = opt_name
category_copy[opt_value.alias] = opt_value
options[k] = category_copy
return
root.parse = (argv) ->
args = argv[2..].reverse()
result =
standard: {}
non_standard: {}
properties: {}
_: []
parse_flag = (args, full_key, key, option_data, result_dict) ->
unless option_data[key]
console.error "Unrecognized option '#{full_key}'"
process.exit 1
result_dict[option_data[key].aliased_by ? key] =
if option_data[key].has_value
args.pop()
else
true
args
while args.length > 0
arg = args.pop()
if arg[0] isnt '-' or result.standard.jar?
result._ = args.reverse()
if result.standard.jar?
result._.unshift arg
else
result.className = arg
break
if arg.length <= 2 # for '-X', mostly
args = parse_flag args, arg, arg[1..], options.standard, result.standard
else
switch arg[1]
when 'X'
args = parse_flag args, arg, arg[2..], options.non_standard, result.non_standard
when 'D'
prop = arg[2..]
[key, value] = prop.split '='
result.properties[key] = value ? true
else
args = parse_flag args, arg, arg[1..], options.standard, result.standard
result
# formatted printing helpers
min_width = (values) -> Math.max.apply(Math, value.length for value in values)
print_col = (value, width) ->
rv = value
padding = width - value.length
rv += " " while padding-- > 0
rv
show_help = (description, prefix) ->
rv = ""
combined_keys = {}
for key, opt of description
keys = [key]
keys.push opt.alias if opt.alias?
combined_keys[("-#{prefix}#{key}" for key in keys).join ', '] = opt
key_col_width = min_width(key for key, opt of combined_keys)
for key, option of combined_keys
rv += "#{print_col key, key_col_width} #{option.description}\n"
rv
root.show_help = -> show_help description.standard, ''
root.show_non_standard_help = -> show_help description.non_standard, 'X'
| 17521 | #! /usr/bin/env coffee
_ = require '../vendor/underscore/underscore.js'
root = exports ? this.option_parser = {}
options = null
description = null
root.describe = (new_description) ->
options = {}
description = new_description
for k, category of description
category_copy = {}
for opt_name, opt_value of category
if _.isString opt_value
# kind of a hack, to allow for shorthand when we don't need to specify
# the other options
opt_value = category[opt_name] = { description: opt_value }
category_copy[opt_name] = opt_value
if opt_value.alias?
opt_value.aliased_by = opt_name
category_copy[opt_value.alias] = opt_value
options[k] = category_copy
return
root.parse = (argv) ->
args = argv[2..].reverse()
result =
standard: {}
non_standard: {}
properties: {}
_: []
parse_flag = (args, full_key, key, option_data, result_dict) ->
unless option_data[key]
console.error "Unrecognized option '#{full_key}'"
process.exit 1
result_dict[option_data[key].aliased_by ? key] =
if option_data[key].has_value
args.pop()
else
true
args
while args.length > 0
arg = args.pop()
if arg[0] isnt '-' or result.standard.jar?
result._ = args.reverse()
if result.standard.jar?
result._.unshift arg
else
result.className = arg
break
if arg.length <= 2 # for '-X', mostly
args = parse_flag args, arg, arg[1..], options.standard, result.standard
else
switch arg[1]
when 'X'
args = parse_flag args, arg, arg[2..], options.non_standard, result.non_standard
when 'D'
prop = arg[2..]
[key, value] = prop.split '='
result.properties[key] = value ? true
else
args = parse_flag args, arg, arg[1..], options.standard, result.standard
result
# formatted printing helpers
min_width = (values) -> Math.max.apply(Math, value.length for value in values)
print_col = (value, width) ->
rv = value
padding = width - value.length
rv += " " while padding-- > 0
rv
show_help = (description, prefix) ->
rv = ""
combined_keys = {}
for key, opt of description
keys = [key]
keys.push opt.alias if opt.alias?
combined_keys[("-<KEY> keys).<KEY> '] = opt
key_col_width = min_width(key for key, opt of combined_keys)
for key, option of combined_keys
rv += "#{print_col key, key_col_width} #{option.description}\n"
rv
root.show_help = -> show_help description.standard, ''
root.show_non_standard_help = -> show_help description.non_standard, 'X'
| true | #! /usr/bin/env coffee
_ = require '../vendor/underscore/underscore.js'
root = exports ? this.option_parser = {}
options = null
description = null
root.describe = (new_description) ->
options = {}
description = new_description
for k, category of description
category_copy = {}
for opt_name, opt_value of category
if _.isString opt_value
# kind of a hack, to allow for shorthand when we don't need to specify
# the other options
opt_value = category[opt_name] = { description: opt_value }
category_copy[opt_name] = opt_value
if opt_value.alias?
opt_value.aliased_by = opt_name
category_copy[opt_value.alias] = opt_value
options[k] = category_copy
return
root.parse = (argv) ->
args = argv[2..].reverse()
result =
standard: {}
non_standard: {}
properties: {}
_: []
parse_flag = (args, full_key, key, option_data, result_dict) ->
unless option_data[key]
console.error "Unrecognized option '#{full_key}'"
process.exit 1
result_dict[option_data[key].aliased_by ? key] =
if option_data[key].has_value
args.pop()
else
true
args
while args.length > 0
arg = args.pop()
if arg[0] isnt '-' or result.standard.jar?
result._ = args.reverse()
if result.standard.jar?
result._.unshift arg
else
result.className = arg
break
if arg.length <= 2 # for '-X', mostly
args = parse_flag args, arg, arg[1..], options.standard, result.standard
else
switch arg[1]
when 'X'
args = parse_flag args, arg, arg[2..], options.non_standard, result.non_standard
when 'D'
prop = arg[2..]
[key, value] = prop.split '='
result.properties[key] = value ? true
else
args = parse_flag args, arg, arg[1..], options.standard, result.standard
result
# formatted printing helpers
min_width = (values) -> Math.max.apply(Math, value.length for value in values)
print_col = (value, width) ->
rv = value
padding = width - value.length
rv += " " while padding-- > 0
rv
show_help = (description, prefix) ->
rv = ""
combined_keys = {}
for key, opt of description
keys = [key]
keys.push opt.alias if opt.alias?
combined_keys[("-PI:KEY:<KEY>END_PI keys).PI:KEY:<KEY>END_PI '] = opt
key_col_width = min_width(key for key, opt of combined_keys)
for key, option of combined_keys
rv += "#{print_col key, key_col_width} #{option.description}\n"
rv
root.show_help = -> show_help description.standard, ''
root.show_non_standard_help = -> show_help description.non_standard, 'X'
|
[
{
"context": ": (attributes) ->\n super(attributes)\n @key = @constructor.buildKey(attributes)\n @isDefault = up.framework.booting",
"end": 331,
"score": 0.5621457099914551,
"start": 310,
"tag": "KEY",
"value": "@constructor.buildKey"
}
] | lib/assets/javascripts/unpoly/classes/event_listener.coffee | pfw/unpoly | 0 | u = up.util
e = up.element
class up.EventListener extends up.Record
keys: ->
[
'element',
'eventType',
'selector',
'callback',
'jQuery',
'guard',
'baseLayer',
'passive',
'once'
]
constructor: (attributes) ->
super(attributes)
@key = @constructor.buildKey(attributes)
@isDefault = up.framework.booting
bind: ->
map = (@element.upEventListeners ||= {})
if map[@key]
up.fail('up.on(): The %o callback %o cannot be registered more than once', @eventType, @callback)
map[@key] = this
@element.addEventListener(@addListenerArgs()...)
addListenerArgs: ->
args = [@eventType, @nativeCallback]
if @passive && up.browser.canPassiveEventListener()
args.push({ passive: true })
return args
unbind: ->
if map = @element.upEventListeners
delete map[@key]
@element.removeEventListener(@addListenerArgs()...)
nativeCallback: (event) =>
# Once we drop IE11 support we can forward the { once } option
# to Element#addEventListener().
if @once
@unbind()
# 1. Since we're listing on `document`, event.currentTarget is now `document`.
# 2. event.target is the element that received an event, which might be a
# child of `selector`.
# 3. There is only a single event bubbling up the DOM, so we are only called once.
element = event.target
if @selector
element = e.closest(element, u.evalOption(@selector))
if @guard && !@guard(event)
return
if element
elementArg = if @jQuery then up.browser.jQuery(element) else element
args = [event, elementArg]
# Do not retrieve and parse [up-data] unless the listener function
# expects a third argument. Note that we must pass data for an argument
# count of 0, since then the function might take varargs.
expectedArgCount = @callback.length
unless expectedArgCount == 1 || expectedArgCount == 2
data = up.syntax.data(element)
args.push(data)
applyCallback = => @callback.apply(element, args)
if @baseLayer
# Unpoly will usually set up.layer.current when emitting an event.
# But Unpoly-unaware code will not set up.layer.current when emitting events.
# Hence layerInstance.on('click') will use this to set layer.current to layerInstance.
@baseLayer.asCurrent(applyCallback)
else
applyCallback()
@fromElement: (attributes) ->
if map = attributes.element.upEventListeners
key = @buildKey(attributes)
return map[key]
@buildKey: (attributes) ->
# Give the callback function a numeric identifier so it
# can become part of the upEventListeners key.
attributes.callback.upUid ||= u.uid()
return [
attributes.eventType,
attributes.selector,
attributes.callback.upUid
].join('|')
@unbindNonDefault: (element) ->
if map = element.upEventListeners
listeners = u.values(map)
for listener in listeners
unless listener.isDefault
# Calling unbind() also removes the listener from element.upEventListeners
listener.unbind()
| 110433 | u = up.util
e = up.element
class up.EventListener extends up.Record
keys: ->
[
'element',
'eventType',
'selector',
'callback',
'jQuery',
'guard',
'baseLayer',
'passive',
'once'
]
constructor: (attributes) ->
super(attributes)
@key = <KEY>(attributes)
@isDefault = up.framework.booting
bind: ->
map = (@element.upEventListeners ||= {})
if map[@key]
up.fail('up.on(): The %o callback %o cannot be registered more than once', @eventType, @callback)
map[@key] = this
@element.addEventListener(@addListenerArgs()...)
addListenerArgs: ->
args = [@eventType, @nativeCallback]
if @passive && up.browser.canPassiveEventListener()
args.push({ passive: true })
return args
unbind: ->
if map = @element.upEventListeners
delete map[@key]
@element.removeEventListener(@addListenerArgs()...)
nativeCallback: (event) =>
# Once we drop IE11 support we can forward the { once } option
# to Element#addEventListener().
if @once
@unbind()
# 1. Since we're listing on `document`, event.currentTarget is now `document`.
# 2. event.target is the element that received an event, which might be a
# child of `selector`.
# 3. There is only a single event bubbling up the DOM, so we are only called once.
element = event.target
if @selector
element = e.closest(element, u.evalOption(@selector))
if @guard && !@guard(event)
return
if element
elementArg = if @jQuery then up.browser.jQuery(element) else element
args = [event, elementArg]
# Do not retrieve and parse [up-data] unless the listener function
# expects a third argument. Note that we must pass data for an argument
# count of 0, since then the function might take varargs.
expectedArgCount = @callback.length
unless expectedArgCount == 1 || expectedArgCount == 2
data = up.syntax.data(element)
args.push(data)
applyCallback = => @callback.apply(element, args)
if @baseLayer
# Unpoly will usually set up.layer.current when emitting an event.
# But Unpoly-unaware code will not set up.layer.current when emitting events.
# Hence layerInstance.on('click') will use this to set layer.current to layerInstance.
@baseLayer.asCurrent(applyCallback)
else
applyCallback()
@fromElement: (attributes) ->
if map = attributes.element.upEventListeners
key = @buildKey(attributes)
return map[key]
@buildKey: (attributes) ->
# Give the callback function a numeric identifier so it
# can become part of the upEventListeners key.
attributes.callback.upUid ||= u.uid()
return [
attributes.eventType,
attributes.selector,
attributes.callback.upUid
].join('|')
@unbindNonDefault: (element) ->
if map = element.upEventListeners
listeners = u.values(map)
for listener in listeners
unless listener.isDefault
# Calling unbind() also removes the listener from element.upEventListeners
listener.unbind()
| true | u = up.util
e = up.element
class up.EventListener extends up.Record
keys: ->
[
'element',
'eventType',
'selector',
'callback',
'jQuery',
'guard',
'baseLayer',
'passive',
'once'
]
constructor: (attributes) ->
super(attributes)
@key = PI:KEY:<KEY>END_PI(attributes)
@isDefault = up.framework.booting
bind: ->
map = (@element.upEventListeners ||= {})
if map[@key]
up.fail('up.on(): The %o callback %o cannot be registered more than once', @eventType, @callback)
map[@key] = this
@element.addEventListener(@addListenerArgs()...)
addListenerArgs: ->
args = [@eventType, @nativeCallback]
if @passive && up.browser.canPassiveEventListener()
args.push({ passive: true })
return args
unbind: ->
if map = @element.upEventListeners
delete map[@key]
@element.removeEventListener(@addListenerArgs()...)
nativeCallback: (event) =>
# Once we drop IE11 support we can forward the { once } option
# to Element#addEventListener().
if @once
@unbind()
# 1. Since we're listing on `document`, event.currentTarget is now `document`.
# 2. event.target is the element that received an event, which might be a
# child of `selector`.
# 3. There is only a single event bubbling up the DOM, so we are only called once.
element = event.target
if @selector
element = e.closest(element, u.evalOption(@selector))
if @guard && !@guard(event)
return
if element
elementArg = if @jQuery then up.browser.jQuery(element) else element
args = [event, elementArg]
# Do not retrieve and parse [up-data] unless the listener function
# expects a third argument. Note that we must pass data for an argument
# count of 0, since then the function might take varargs.
expectedArgCount = @callback.length
unless expectedArgCount == 1 || expectedArgCount == 2
data = up.syntax.data(element)
args.push(data)
applyCallback = => @callback.apply(element, args)
if @baseLayer
# Unpoly will usually set up.layer.current when emitting an event.
# But Unpoly-unaware code will not set up.layer.current when emitting events.
# Hence layerInstance.on('click') will use this to set layer.current to layerInstance.
@baseLayer.asCurrent(applyCallback)
else
applyCallback()
@fromElement: (attributes) ->
if map = attributes.element.upEventListeners
key = @buildKey(attributes)
return map[key]
@buildKey: (attributes) ->
# Give the callback function a numeric identifier so it
# can become part of the upEventListeners key.
attributes.callback.upUid ||= u.uid()
return [
attributes.eventType,
attributes.selector,
attributes.callback.upUid
].join('|')
@unbindNonDefault: (element) ->
if map = element.upEventListeners
listeners = u.values(map)
for listener in listeners
unless listener.isDefault
# Calling unbind() also removes the listener from element.upEventListeners
listener.unbind()
|
[
{
"context": "# Fuzzy\n# https://github.com/myork/fuzzy\n#\n# Copyright (c) 2012 Matt York\n# Licensed",
"end": 34,
"score": 0.9993451833724976,
"start": 29,
"tag": "USERNAME",
"value": "myork"
},
{
"context": "ps://github.com/myork/fuzzy\n#\n# Copyright (c) 2012 Matt York\n# License... | atom/packages/git-plus/lib/models/fuzzy.coffee | ericeslinger/dotfiles | 0 | # Fuzzy
# https://github.com/myork/fuzzy
#
# Copyright (c) 2012 Matt York
# Licensed under the MIT license.
fuzzy = {}
module.exports = fuzzy
# Return all elements of `array` that have a fuzzy
# match against `pattern`.
fuzzy.simpleFilter = (pattern, array) ->
array.filter (string) ->
fuzzy.test pattern, string
# Does `pattern` fuzzy match `string`?
fuzzy.test = (pattern, string) ->
fuzzy.match(pattern, string) isnt null
# If `pattern` (input) matches `string` (test against), wrap each matching
# character in `opts.pre` and `opts.post`. If no match, return null
fuzzy.match = (pattern, string, opts={}) ->
patternIdx = 0
result = []
len = string.length
totalScore = 0
currScore = 0
# prefix
pre = opts.pre or ""
# suffix
post = opts.post or ""
# String to compare against. This might be a lowercase version of the
# raw string
compareString = opts.caseSensitive and string or string.toLowerCase()
ch = undefined
compareChar = undefined
pattern = opts.caseSensitive and pattern or pattern.toLowerCase()
# For each character in the string, either add it to the result
# or wrap in template if its the next string in the pattern
idx = 0
while idx < len
# Ignore Whitespaces
patternIdx++ if pattern[patternIdx] is ' '
ch = string[idx]
if compareString[idx] is pattern[patternIdx]
ch = pre + ch + post
patternIdx += 1
currScore += 1 + currScore
else
currScore = 0
totalScore += currScore
result[result.length] = ch
idx++
return {rendered: result.join(""), score: totalScore} if patternIdx is pattern.length
fuzzy.filter = (pattern, arr, opts={}) ->
highlighted = arr.reduce(
(prev, element, idx, arr) ->
str = element
str = opts.extract(element) if opts.extract
rendered = fuzzy.match(pattern, str, opts)
if rendered?
prev[prev.length] =
string: rendered.rendered
score: rendered.score
index: idx
original: element
prev
,[]
).sort (a, b) ->
compare = b.score - a.score
if compare is 0
return opts.extract(a.original).length - opts.extract(b.original).length if opts.extract
return a.original.length - b.original.length
return compare if compare
a.index - b.index
# No matches? Sort the original array using Damerau-Levenshtein.
if highlighted.length < 1
highlighted = arr.reduce(
(prev, element, idx, arr) ->
str = element
str = opts.extract(element) if opts.extract
prev[prev.length] =
string: str
score: levenshtein(pattern, str)
index: idx
original: element
prev
,[]
).sort (a, b) ->
compare = a.score - b.score
return compare if compare
b.index - a.index
highlighted
###
# Copyright (c) 2011 Andrei Mackenzie
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of
# this software and associated documentation files (the "Software"), to deal in
# the Software without restriction, including without limitation the rights to
# use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
# the Software, and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
# FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
# COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
# IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
###
# Compute the edit distance between the two given strings
levenshtein = (a, b) ->
return b.length if a.length is 0
return a.length if b.length is 0
matrix = []
# increment along the first column of each row
i = undefined
i = 0
while i <= b.length
matrix[i] = [i]
i++
# increment each column in the first row
j = undefined
j = 0
while j <= a.length
matrix[0][j] = j
j++
# Fill in the rest of the matrix
i = 1
while i <= b.length
j = 1
while j <= a.length
if b.charAt(i - 1) is a.charAt(j - 1)
matrix[i][j] = matrix[i - 1][j - 1]
else
# substitution
# insertion
matrix[i][j] = Math.min(matrix[i - 1][j - 1] + 1, Math.min(matrix[i][j - 1] + 1, matrix[i - 1][j] + 1)) # deletion
j++
i++
matrix[b.length][a.length]
| 91327 | # Fuzzy
# https://github.com/myork/fuzzy
#
# Copyright (c) 2012 <NAME>
# Licensed under the MIT license.
fuzzy = {}
module.exports = fuzzy
# Return all elements of `array` that have a fuzzy
# match against `pattern`.
fuzzy.simpleFilter = (pattern, array) ->
array.filter (string) ->
fuzzy.test pattern, string
# Does `pattern` fuzzy match `string`?
fuzzy.test = (pattern, string) ->
fuzzy.match(pattern, string) isnt null
# If `pattern` (input) matches `string` (test against), wrap each matching
# character in `opts.pre` and `opts.post`. If no match, return null
fuzzy.match = (pattern, string, opts={}) ->
patternIdx = 0
result = []
len = string.length
totalScore = 0
currScore = 0
# prefix
pre = opts.pre or ""
# suffix
post = opts.post or ""
# String to compare against. This might be a lowercase version of the
# raw string
compareString = opts.caseSensitive and string or string.toLowerCase()
ch = undefined
compareChar = undefined
pattern = opts.caseSensitive and pattern or pattern.toLowerCase()
# For each character in the string, either add it to the result
# or wrap in template if its the next string in the pattern
idx = 0
while idx < len
# Ignore Whitespaces
patternIdx++ if pattern[patternIdx] is ' '
ch = string[idx]
if compareString[idx] is pattern[patternIdx]
ch = pre + ch + post
patternIdx += 1
currScore += 1 + currScore
else
currScore = 0
totalScore += currScore
result[result.length] = ch
idx++
return {rendered: result.join(""), score: totalScore} if patternIdx is pattern.length
fuzzy.filter = (pattern, arr, opts={}) ->
highlighted = arr.reduce(
(prev, element, idx, arr) ->
str = element
str = opts.extract(element) if opts.extract
rendered = fuzzy.match(pattern, str, opts)
if rendered?
prev[prev.length] =
string: rendered.rendered
score: rendered.score
index: idx
original: element
prev
,[]
).sort (a, b) ->
compare = b.score - a.score
if compare is 0
return opts.extract(a.original).length - opts.extract(b.original).length if opts.extract
return a.original.length - b.original.length
return compare if compare
a.index - b.index
# No matches? Sort the original array using Damerau-Levenshtein.
if highlighted.length < 1
highlighted = arr.reduce(
(prev, element, idx, arr) ->
str = element
str = opts.extract(element) if opts.extract
prev[prev.length] =
string: str
score: levenshtein(pattern, str)
index: idx
original: element
prev
,[]
).sort (a, b) ->
compare = a.score - b.score
return compare if compare
b.index - a.index
highlighted
###
# Copyright (c) 2011 <NAME>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of
# this software and associated documentation files (the "Software"), to deal in
# the Software without restriction, including without limitation the rights to
# use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
# the Software, and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
# FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
# COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
# IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
###
# Compute the edit distance between the two given strings
levenshtein = (a, b) ->
return b.length if a.length is 0
return a.length if b.length is 0
matrix = []
# increment along the first column of each row
i = undefined
i = 0
while i <= b.length
matrix[i] = [i]
i++
# increment each column in the first row
j = undefined
j = 0
while j <= a.length
matrix[0][j] = j
j++
# Fill in the rest of the matrix
i = 1
while i <= b.length
j = 1
while j <= a.length
if b.charAt(i - 1) is a.charAt(j - 1)
matrix[i][j] = matrix[i - 1][j - 1]
else
# substitution
# insertion
matrix[i][j] = Math.min(matrix[i - 1][j - 1] + 1, Math.min(matrix[i][j - 1] + 1, matrix[i - 1][j] + 1)) # deletion
j++
i++
matrix[b.length][a.length]
| true | # Fuzzy
# https://github.com/myork/fuzzy
#
# Copyright (c) 2012 PI:NAME:<NAME>END_PI
# Licensed under the MIT license.
fuzzy = {}
module.exports = fuzzy
# Return all elements of `array` that have a fuzzy
# match against `pattern`.
fuzzy.simpleFilter = (pattern, array) ->
array.filter (string) ->
fuzzy.test pattern, string
# Does `pattern` fuzzy match `string`?
fuzzy.test = (pattern, string) ->
fuzzy.match(pattern, string) isnt null
# If `pattern` (input) matches `string` (test against), wrap each matching
# character in `opts.pre` and `opts.post`. If no match, return null
fuzzy.match = (pattern, string, opts={}) ->
patternIdx = 0
result = []
len = string.length
totalScore = 0
currScore = 0
# prefix
pre = opts.pre or ""
# suffix
post = opts.post or ""
# String to compare against. This might be a lowercase version of the
# raw string
compareString = opts.caseSensitive and string or string.toLowerCase()
ch = undefined
compareChar = undefined
pattern = opts.caseSensitive and pattern or pattern.toLowerCase()
# For each character in the string, either add it to the result
# or wrap in template if its the next string in the pattern
idx = 0
while idx < len
# Ignore Whitespaces
patternIdx++ if pattern[patternIdx] is ' '
ch = string[idx]
if compareString[idx] is pattern[patternIdx]
ch = pre + ch + post
patternIdx += 1
currScore += 1 + currScore
else
currScore = 0
totalScore += currScore
result[result.length] = ch
idx++
return {rendered: result.join(""), score: totalScore} if patternIdx is pattern.length
fuzzy.filter = (pattern, arr, opts={}) ->
highlighted = arr.reduce(
(prev, element, idx, arr) ->
str = element
str = opts.extract(element) if opts.extract
rendered = fuzzy.match(pattern, str, opts)
if rendered?
prev[prev.length] =
string: rendered.rendered
score: rendered.score
index: idx
original: element
prev
,[]
).sort (a, b) ->
compare = b.score - a.score
if compare is 0
return opts.extract(a.original).length - opts.extract(b.original).length if opts.extract
return a.original.length - b.original.length
return compare if compare
a.index - b.index
# No matches? Sort the original array using Damerau-Levenshtein.
if highlighted.length < 1
highlighted = arr.reduce(
(prev, element, idx, arr) ->
str = element
str = opts.extract(element) if opts.extract
prev[prev.length] =
string: str
score: levenshtein(pattern, str)
index: idx
original: element
prev
,[]
).sort (a, b) ->
compare = a.score - b.score
return compare if compare
b.index - a.index
highlighted
###
# Copyright (c) 2011 PI:NAME:<NAME>END_PI
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of
# this software and associated documentation files (the "Software"), to deal in
# the Software without restriction, including without limitation the rights to
# use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
# the Software, and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
# FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
# COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
# IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
###
# Compute the edit distance between the two given strings
levenshtein = (a, b) ->
return b.length if a.length is 0
return a.length if b.length is 0
matrix = []
# increment along the first column of each row
i = undefined
i = 0
while i <= b.length
matrix[i] = [i]
i++
# increment each column in the first row
j = undefined
j = 0
while j <= a.length
matrix[0][j] = j
j++
# Fill in the rest of the matrix
i = 1
while i <= b.length
j = 1
while j <= a.length
if b.charAt(i - 1) is a.charAt(j - 1)
matrix[i][j] = matrix[i - 1][j - 1]
else
# substitution
# insertion
matrix[i][j] = Math.min(matrix[i - 1][j - 1] + 1, Math.min(matrix[i][j - 1] + 1, matrix[i - 1][j] + 1)) # deletion
j++
i++
matrix[b.length][a.length]
|
[
{
"context": " # Creating valid Post.\n post = new Post text: 'Norad II crashed!'\n assert post.valid(), true\n\n # Creati",
"end": 2077,
"score": 0.994596004486084,
"start": 2069,
"tag": "NAME",
"value": "Norad II"
}
] | examples/validations.coffee | wanbok/mongo-model | 1 | # Validations in [Model](model.html).
#
# You can use `errors` to store error messages, `valid` to check validity of Model and `validate`,
# `before validate` and 'after validate` callbacks for defining validation rules.
#
# In this example we'll create simple Blog Application and
# use validations to ensure correctness of Post and Comments.
_ = require 'underscore'
Model = require 'mongo-model'
# Enabling optional [synchronous](synchronous.html) mode.
require 'mongo-model/lib/sync'
sync = ->
# Connecting to default database and clearing it before starting.
db = Model.db()
db.clear()
# ### Basics
#
# Defining Post and requiring presence of text attribute.
class global.Post extends Model
@collection 'posts'
@validate (callback) ->
@errors().add text: "can't be empty" unless @text
callback()
# Creating post, it can't be saved because its text is empty and it's invalid.
post = new Post()
assert post.valid(), false
assert post.errors(), {text: ["can't be empty"]}
assert post.save(), false
# Let's add text to it so it will be valid and we can save it.
post.text = 'Norad II crashed!'
assert post.valid(), true
assert post.save(), true
# Usually when model is invalid it can't be saved, but if You need You can skip
# validation and save invalid model.
post = new Post()
assert post.valid(), false
assert post.save(validate: false), true
# ### Embedded Models
#
# MongoDB encourage to use Embedded Models a lot, so it's important
# to provide validations for it.
#
# Defining Post with embedded Comments.
class global.Post extends Model
@collection 'posts'
@embedded 'comments'
constructor: (args...) ->
@comments = []
super args...
@validate (callback) ->
@errors().add text: "can't be empty" unless @text
callback()
# Defining Comment.
class global.Comment extends Model
@validate (callback) ->
@errors().add text: "can't be empty" unless @text
callback()
# Creating valid Post.
post = new Post text: 'Norad II crashed!'
assert post.valid(), true
# Creating invalid Comment with empty text.
comment = new Comment()
assert comment.valid(), false
# Validation of Main Model also runs Validations on all its Embedded Models,
# so adding invalid Comment to valid Post will make Post also invalid.
post.comments.push comment
assert post.valid(), false
assert post.save(), false
# In order to save Post we need to make the Comment valid.
comment.text = "Where?"
assert comment.valid(), true
assert post.valid(), true
assert post.save(), true
# Closing connection.
db.close()
# This stuff needed for [synchronous](synchronous.html) mode.
Fiber(sync).run()
global.assert = (args...) -> require('assert').deepEqual args...
# ### Predefined validations
#
# In example before we covered custom validations, but usually in 90% of cases You need only
# a small set of simple validations, and it would be really handy if there will be helpers like this:
#
# validatesFormatOf
# validatesLengthOf
# validatesNumericalityOf
# validatesAcceptanceOf
# validatesConfirmationOf
# validatesPresenceOf
# validatesTrueFor
# validatesExclusionOf
# validatesInclusionOf
#
# But sadly, there's no such Validation library in JavaScript right now. And I feel that it would be wrong
# to reinvent a bycicle and implement this stuff directly in Model.
# I believe it should be implemented as a completely standalone library.
#
# So, right now predefined validations aren't available, but it's important feature and I'm looking for a ways to
# implemente it. Probably it will be available in the next versions. | 70722 | # Validations in [Model](model.html).
#
# You can use `errors` to store error messages, `valid` to check validity of Model and `validate`,
# `before validate` and 'after validate` callbacks for defining validation rules.
#
# In this example we'll create simple Blog Application and
# use validations to ensure correctness of Post and Comments.
_ = require 'underscore'
Model = require 'mongo-model'
# Enabling optional [synchronous](synchronous.html) mode.
require 'mongo-model/lib/sync'
sync = ->
# Connecting to default database and clearing it before starting.
db = Model.db()
db.clear()
# ### Basics
#
# Defining Post and requiring presence of text attribute.
class global.Post extends Model
@collection 'posts'
@validate (callback) ->
@errors().add text: "can't be empty" unless @text
callback()
# Creating post, it can't be saved because its text is empty and it's invalid.
post = new Post()
assert post.valid(), false
assert post.errors(), {text: ["can't be empty"]}
assert post.save(), false
# Let's add text to it so it will be valid and we can save it.
post.text = 'Norad II crashed!'
assert post.valid(), true
assert post.save(), true
# Usually when model is invalid it can't be saved, but if You need You can skip
# validation and save invalid model.
post = new Post()
assert post.valid(), false
assert post.save(validate: false), true
# ### Embedded Models
#
# MongoDB encourage to use Embedded Models a lot, so it's important
# to provide validations for it.
#
# Defining Post with embedded Comments.
class global.Post extends Model
@collection 'posts'
@embedded 'comments'
constructor: (args...) ->
@comments = []
super args...
@validate (callback) ->
@errors().add text: "can't be empty" unless @text
callback()
# Defining Comment.
class global.Comment extends Model
@validate (callback) ->
@errors().add text: "can't be empty" unless @text
callback()
# Creating valid Post.
post = new Post text: '<NAME> crashed!'
assert post.valid(), true
# Creating invalid Comment with empty text.
comment = new Comment()
assert comment.valid(), false
# Validation of Main Model also runs Validations on all its Embedded Models,
# so adding invalid Comment to valid Post will make Post also invalid.
post.comments.push comment
assert post.valid(), false
assert post.save(), false
# In order to save Post we need to make the Comment valid.
comment.text = "Where?"
assert comment.valid(), true
assert post.valid(), true
assert post.save(), true
# Closing connection.
db.close()
# This stuff needed for [synchronous](synchronous.html) mode.
Fiber(sync).run()
global.assert = (args...) -> require('assert').deepEqual args...
# ### Predefined validations
#
# In example before we covered custom validations, but usually in 90% of cases You need only
# a small set of simple validations, and it would be really handy if there will be helpers like this:
#
# validatesFormatOf
# validatesLengthOf
# validatesNumericalityOf
# validatesAcceptanceOf
# validatesConfirmationOf
# validatesPresenceOf
# validatesTrueFor
# validatesExclusionOf
# validatesInclusionOf
#
# But sadly, there's no such Validation library in JavaScript right now. And I feel that it would be wrong
# to reinvent a bycicle and implement this stuff directly in Model.
# I believe it should be implemented as a completely standalone library.
#
# So, right now predefined validations aren't available, but it's important feature and I'm looking for a ways to
# implemente it. Probably it will be available in the next versions. | true | # Validations in [Model](model.html).
#
# You can use `errors` to store error messages, `valid` to check validity of Model and `validate`,
# `before validate` and 'after validate` callbacks for defining validation rules.
#
# In this example we'll create simple Blog Application and
# use validations to ensure correctness of Post and Comments.
_ = require 'underscore'
Model = require 'mongo-model'
# Enabling optional [synchronous](synchronous.html) mode.
require 'mongo-model/lib/sync'
sync = ->
# Connecting to default database and clearing it before starting.
db = Model.db()
db.clear()
# ### Basics
#
# Defining Post and requiring presence of text attribute.
class global.Post extends Model
@collection 'posts'
@validate (callback) ->
@errors().add text: "can't be empty" unless @text
callback()
# Creating post, it can't be saved because its text is empty and it's invalid.
post = new Post()
assert post.valid(), false
assert post.errors(), {text: ["can't be empty"]}
assert post.save(), false
# Let's add text to it so it will be valid and we can save it.
post.text = 'Norad II crashed!'
assert post.valid(), true
assert post.save(), true
# Usually when model is invalid it can't be saved, but if You need You can skip
# validation and save invalid model.
post = new Post()
assert post.valid(), false
assert post.save(validate: false), true
# ### Embedded Models
#
# MongoDB encourage to use Embedded Models a lot, so it's important
# to provide validations for it.
#
# Defining Post with embedded Comments.
class global.Post extends Model
@collection 'posts'
@embedded 'comments'
constructor: (args...) ->
@comments = []
super args...
@validate (callback) ->
@errors().add text: "can't be empty" unless @text
callback()
# Defining Comment.
class global.Comment extends Model
@validate (callback) ->
@errors().add text: "can't be empty" unless @text
callback()
# Creating valid Post.
post = new Post text: 'PI:NAME:<NAME>END_PI crashed!'
assert post.valid(), true
# Creating invalid Comment with empty text.
comment = new Comment()
assert comment.valid(), false
# Validation of Main Model also runs Validations on all its Embedded Models,
# so adding invalid Comment to valid Post will make Post also invalid.
post.comments.push comment
assert post.valid(), false
assert post.save(), false
# In order to save Post we need to make the Comment valid.
comment.text = "Where?"
assert comment.valid(), true
assert post.valid(), true
assert post.save(), true
# Closing connection.
db.close()
# This stuff needed for [synchronous](synchronous.html) mode.
Fiber(sync).run()
global.assert = (args...) -> require('assert').deepEqual args...
# ### Predefined validations
#
# In example before we covered custom validations, but usually in 90% of cases You need only
# a small set of simple validations, and it would be really handy if there will be helpers like this:
#
# validatesFormatOf
# validatesLengthOf
# validatesNumericalityOf
# validatesAcceptanceOf
# validatesConfirmationOf
# validatesPresenceOf
# validatesTrueFor
# validatesExclusionOf
# validatesInclusionOf
#
# But sadly, there's no such Validation library in JavaScript right now. And I feel that it would be wrong
# to reinvent a bycicle and implement this stuff directly in Model.
# I believe it should be implemented as a completely standalone library.
#
# So, right now predefined validations aren't available, but it's important feature and I'm looking for a ways to
# implemente it. Probably it will be available in the next versions. |
[
{
"context": "\n###\nNode CSV - Copyright David Worms <open@adaltas.com> (BSD Licensed)\nTesting the rea",
"end": 37,
"score": 0.9998475313186646,
"start": 26,
"tag": "NAME",
"value": "David Worms"
},
{
"context": "\n###\nNode CSV - Copyright David Worms <open@adaltas.com> (BSD Licen... | Tools/phantalyzer-master/node_modules/csv/test/trim.coffee | pianomanx/watchdog | 377 |
###
Node CSV - Copyright David Worms <open@adaltas.com> (BSD Licensed)
Testing the read options `trim`, `ltrim` and `rtrim`.
###
require 'coffee-script'
fs = require 'fs'
should = require 'should'
csv = if process.env.CSV_COV then require '../lib-cov' else require '../src'
describe 'ltrim', ->
it 'should ignore the whitespaces immediately following the delimiter', (next) ->
csv()
.from.string("""
FIELD_1, FIELD_2, FIELD_3, FIELD_4, FIELD_5, FIELD_6
20322051544," 1979",8.8017226E7, ABC,45,2000-01-01
28392898392, " 1974", 8.8392926E7,DEF, 23, 2050-11-27
""", ltrim: true )
.transform( (record, index) -> record )
.on 'close', (count) ->
count.should.eql 3
.to.string (data) ->
data.should.eql """
FIELD_1,FIELD_2,FIELD_3,FIELD_4,FIELD_5,FIELD_6
20322051544, 1979,8.8017226E7,ABC,45,2000-01-01
28392898392, 1974,8.8392926E7,DEF,23,2050-11-27
"""
next()
it 'should work on last field', (next) ->
csv()
.from.string("""
FIELD_1, FIELD_2
20322051544, a
28392898392, " "
""", ltrim: true )
.transform( (record, index) -> record )
.on 'close', (count) ->
count.should.eql 3
.to.string (data) ->
data.should.eql """
FIELD_1,FIELD_2
20322051544,a
28392898392,
"""
next()
describe 'rtrim', ->
it 'should ignore the whitespaces immediately preceding the delimiter', (next) ->
csv()
.from.string("""
FIELD_1 ,FIELD_2 ,FIELD_3 ,FIELD_4 ,FIELD_5 ,FIELD_6
20322051544 ,1979,8.8017226E7 ,ABC,45 ,2000-01-01
28392898392,1974 ,8.8392926E7,DEF,23 ,2050-11-27
""", rtrim: true )
.transform( (record, index) -> record )
.on 'close', (count) ->
count.should.eql 3
.to.string (data) ->
data.should.eql """
FIELD_1,FIELD_2,FIELD_3,FIELD_4,FIELD_5,FIELD_6
20322051544,1979,8.8017226E7,ABC,45,2000-01-01
28392898392,1974,8.8392926E7,DEF,23,2050-11-27
"""
next()
describe 'trim', ->
it 'should ignore the whitespaces immediately preceding and following the delimiter', (next) ->
csv()
.from.string("""
FIELD_1 , FIELD_2 , FIELD_3,FIELD_4 , FIELD_5,FIELD_6
20322051544,1979 ,8.8017226E7,ABC , 45 , 2000-01-01
28392898392, 1974,8.8392926E7,DEF , 23 , 2050-11-27
""", trim: true )
.transform( (record, index) -> record )
.on 'close', (count) ->
count.should.eql 3
.to.string (data) ->
data.should.eql """
FIELD_1,FIELD_2,FIELD_3,FIELD_4,FIELD_5,FIELD_6
20322051544,1979,8.8017226E7,ABC,45,2000-01-01
28392898392,1974,8.8392926E7,DEF,23,2050-11-27
"""
next()
it 'should preserve surrounding whitespaces', (next) ->
csv()
.from.string("""
FIELD_1 , FIELD_2 , FIELD_3,FIELD_4 , FIELD_5,FIELD_6
20322051544,1979 ,8.8017226E7,ABC , 45 , 2000-01-01
28392898392, 1974,8.8392926E7,DEF , 23 , 2050-11-27
""")
.transform( (record, index) -> record )
.on 'close', (count) ->
count.should.eql 3
.to.string (data) ->
data.should.eql """
FIELD_1 , FIELD_2 , FIELD_3,FIELD_4 , FIELD_5,FIELD_6
20322051544,1979 ,8.8017226E7,ABC , 45 , 2000-01-01
28392898392, 1974,8.8392926E7,DEF , 23 , 2050-11-27
"""
next()
| 103771 |
###
Node CSV - Copyright <NAME> <<EMAIL>> (BSD Licensed)
Testing the read options `trim`, `ltrim` and `rtrim`.
###
require 'coffee-script'
fs = require 'fs'
should = require 'should'
csv = if process.env.CSV_COV then require '../lib-cov' else require '../src'
describe 'ltrim', ->
it 'should ignore the whitespaces immediately following the delimiter', (next) ->
csv()
.from.string("""
FIELD_1, FIELD_2, FIELD_3, FIELD_4, FIELD_5, FIELD_6
20322051544," 1979",8.8017226E7, ABC,45,2000-01-01
28392898392, " 1974", 8.8392926E7,DEF, 23, 2050-11-27
""", ltrim: true )
.transform( (record, index) -> record )
.on 'close', (count) ->
count.should.eql 3
.to.string (data) ->
data.should.eql """
FIELD_1,FIELD_2,FIELD_3,FIELD_4,FIELD_5,FIELD_6
20322051544, 1979,8.8017226E7,ABC,45,2000-01-01
28392898392, 1974,8.8392926E7,DEF,23,2050-11-27
"""
next()
it 'should work on last field', (next) ->
csv()
.from.string("""
FIELD_1, FIELD_2
20322051544, a
28392898392, " "
""", ltrim: true )
.transform( (record, index) -> record )
.on 'close', (count) ->
count.should.eql 3
.to.string (data) ->
data.should.eql """
FIELD_1,FIELD_2
20322051544,a
28392898392,
"""
next()
describe 'rtrim', ->
it 'should ignore the whitespaces immediately preceding the delimiter', (next) ->
csv()
.from.string("""
FIELD_1 ,FIELD_2 ,FIELD_3 ,FIELD_4 ,FIELD_5 ,FIELD_6
20322051544 ,1979,8.8017226E7 ,ABC,45 ,2000-01-01
28392898392,1974 ,8.8392926E7,DEF,23 ,2050-11-27
""", rtrim: true )
.transform( (record, index) -> record )
.on 'close', (count) ->
count.should.eql 3
.to.string (data) ->
data.should.eql """
FIELD_1,FIELD_2,FIELD_3,FIELD_4,FIELD_5,FIELD_6
20322051544,1979,8.8017226E7,ABC,45,2000-01-01
28392898392,1974,8.8392926E7,DEF,23,2050-11-27
"""
next()
describe 'trim', ->
it 'should ignore the whitespaces immediately preceding and following the delimiter', (next) ->
csv()
.from.string("""
FIELD_1 , FIELD_2 , FIELD_3,FIELD_4 , FIELD_5,FIELD_6
20322051544,1979 ,8.8017226E7,ABC , 45 , 2000-01-01
28392898392, 1974,8.8392926E7,DEF , 23 , 2050-11-27
""", trim: true )
.transform( (record, index) -> record )
.on 'close', (count) ->
count.should.eql 3
.to.string (data) ->
data.should.eql """
FIELD_1,FIELD_2,FIELD_3,FIELD_4,FIELD_5,FIELD_6
20322051544,1979,8.8017226E7,ABC,45,2000-01-01
28392898392,1974,8.8392926E7,DEF,23,2050-11-27
"""
next()
it 'should preserve surrounding whitespaces', (next) ->
csv()
.from.string("""
FIELD_1 , FIELD_2 , FIELD_3,FIELD_4 , FIELD_5,FIELD_6
20322051544,1979 ,8.8017226E7,ABC , 45 , 2000-01-01
28392898392, 1974,8.8392926E7,DEF , 23 , 2050-11-27
""")
.transform( (record, index) -> record )
.on 'close', (count) ->
count.should.eql 3
.to.string (data) ->
data.should.eql """
FIELD_1 , FIELD_2 , FIELD_3,FIELD_4 , FIELD_5,FIELD_6
20322051544,1979 ,8.8017226E7,ABC , 45 , 2000-01-01
28392898392, 1974,8.8392926E7,DEF , 23 , 2050-11-27
"""
next()
| true |
###
Node CSV - Copyright PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI> (BSD Licensed)
Testing the read options `trim`, `ltrim` and `rtrim`.
###
require 'coffee-script'
fs = require 'fs'
should = require 'should'
csv = if process.env.CSV_COV then require '../lib-cov' else require '../src'
describe 'ltrim', ->
it 'should ignore the whitespaces immediately following the delimiter', (next) ->
csv()
.from.string("""
FIELD_1, FIELD_2, FIELD_3, FIELD_4, FIELD_5, FIELD_6
20322051544," 1979",8.8017226E7, ABC,45,2000-01-01
28392898392, " 1974", 8.8392926E7,DEF, 23, 2050-11-27
""", ltrim: true )
.transform( (record, index) -> record )
.on 'close', (count) ->
count.should.eql 3
.to.string (data) ->
data.should.eql """
FIELD_1,FIELD_2,FIELD_3,FIELD_4,FIELD_5,FIELD_6
20322051544, 1979,8.8017226E7,ABC,45,2000-01-01
28392898392, 1974,8.8392926E7,DEF,23,2050-11-27
"""
next()
it 'should work on last field', (next) ->
csv()
.from.string("""
FIELD_1, FIELD_2
20322051544, a
28392898392, " "
""", ltrim: true )
.transform( (record, index) -> record )
.on 'close', (count) ->
count.should.eql 3
.to.string (data) ->
data.should.eql """
FIELD_1,FIELD_2
20322051544,a
28392898392,
"""
next()
describe 'rtrim', ->
it 'should ignore the whitespaces immediately preceding the delimiter', (next) ->
csv()
.from.string("""
FIELD_1 ,FIELD_2 ,FIELD_3 ,FIELD_4 ,FIELD_5 ,FIELD_6
20322051544 ,1979,8.8017226E7 ,ABC,45 ,2000-01-01
28392898392,1974 ,8.8392926E7,DEF,23 ,2050-11-27
""", rtrim: true )
.transform( (record, index) -> record )
.on 'close', (count) ->
count.should.eql 3
.to.string (data) ->
data.should.eql """
FIELD_1,FIELD_2,FIELD_3,FIELD_4,FIELD_5,FIELD_6
20322051544,1979,8.8017226E7,ABC,45,2000-01-01
28392898392,1974,8.8392926E7,DEF,23,2050-11-27
"""
next()
describe 'trim', ->
it 'should ignore the whitespaces immediately preceding and following the delimiter', (next) ->
csv()
.from.string("""
FIELD_1 , FIELD_2 , FIELD_3,FIELD_4 , FIELD_5,FIELD_6
20322051544,1979 ,8.8017226E7,ABC , 45 , 2000-01-01
28392898392, 1974,8.8392926E7,DEF , 23 , 2050-11-27
""", trim: true )
.transform( (record, index) -> record )
.on 'close', (count) ->
count.should.eql 3
.to.string (data) ->
data.should.eql """
FIELD_1,FIELD_2,FIELD_3,FIELD_4,FIELD_5,FIELD_6
20322051544,1979,8.8017226E7,ABC,45,2000-01-01
28392898392,1974,8.8392926E7,DEF,23,2050-11-27
"""
next()
it 'should preserve surrounding whitespaces', (next) ->
csv()
.from.string("""
FIELD_1 , FIELD_2 , FIELD_3,FIELD_4 , FIELD_5,FIELD_6
20322051544,1979 ,8.8017226E7,ABC , 45 , 2000-01-01
28392898392, 1974,8.8392926E7,DEF , 23 , 2050-11-27
""")
.transform( (record, index) -> record )
.on 'close', (count) ->
count.should.eql 3
.to.string (data) ->
data.should.eql """
FIELD_1 , FIELD_2 , FIELD_3,FIELD_4 , FIELD_5,FIELD_6
20322051544,1979 ,8.8017226E7,ABC , 45 , 2000-01-01
28392898392, 1974,8.8392926E7,DEF , 23 , 2050-11-27
"""
next()
|
[
{
"context": "###\n Copyright (c) 2015 Abi Hafshin\n see README.md\n###\n\nasset = require 'connect-ass",
"end": 36,
"score": 0.9998801350593567,
"start": 25,
"tag": "NAME",
"value": "Abi Hafshin"
}
] | app/asset.coffee | abihf/express-rad | 0 | ###
Copyright (c) 2015 Abi Hafshin
see README.md
###
asset = require 'connect-assets'
Mincer = require('mincer')
require('mincer-cssurl')(Mincer)
Mincer.Template.libs.coffee = require('iced-coffee-script')
module.exports = () ->
options =
paths: [
'assets/css',
'assets/js',
'assets/img'
]
gzip: process.env.NODE_ENV == 'production'
return asset options, (instance) ->
env = instance.environment
env.enable('cssurl')
env.enable('autoprefixer')
| 159243 | ###
Copyright (c) 2015 <NAME>
see README.md
###
asset = require 'connect-assets'
Mincer = require('mincer')
require('mincer-cssurl')(Mincer)
Mincer.Template.libs.coffee = require('iced-coffee-script')
module.exports = () ->
options =
paths: [
'assets/css',
'assets/js',
'assets/img'
]
gzip: process.env.NODE_ENV == 'production'
return asset options, (instance) ->
env = instance.environment
env.enable('cssurl')
env.enable('autoprefixer')
| true | ###
Copyright (c) 2015 PI:NAME:<NAME>END_PI
see README.md
###
asset = require 'connect-assets'
Mincer = require('mincer')
require('mincer-cssurl')(Mincer)
Mincer.Template.libs.coffee = require('iced-coffee-script')
module.exports = () ->
options =
paths: [
'assets/css',
'assets/js',
'assets/img'
]
gzip: process.env.NODE_ENV == 'production'
return asset options, (instance) ->
env = instance.environment
env.enable('cssurl')
env.enable('autoprefixer')
|
[
{
"context": "ntext) ->\n watchers = @_subscriptions\n key = uniqueKey()\n watchers[key] = [fn, context]\n unuse = @",
"end": 1276,
"score": 0.8881018161773682,
"start": 1267,
"tag": "KEY",
"value": "uniqueKey"
}
] | src/signal.coffee | trello/hearsay | 5 | uniqueKey = require './utils/uniqueKey'
once = require './utils/once'
schedulerRef = require './scheduler-ref'
eligibleSignals = []
isDisposalScheduled = false
dispose = ->
i = 0
while i < eligibleSignals.length
signal = eligibleSignals[i]
signal._scheduled = false
if signal._users == 0
signal._dispose()
i++
eligibleSignals = []
isDisposalScheduled = false
return
module.exports = class Signal
_users: 0
_disposed: false
_scheduled: false
constructor: (source) ->
@_disposers = [source @_send.bind(@)]
@_subscriptions = {}
@_schedule()
_send: (val) ->
if @_disposed
throw new Error("Signal cannot send events after disposal. (Did you forget to return a disposer?)")
for key, [fn, context] of @_subscriptions
fn.call context, val
return
_schedule: ->
if @_scheduled
return
@_scheduled = true
eligibleSignals.push(@)
if isDisposalScheduled
return
schedulerRef.schedule(dispose)
isDisposalScheduled = true
return
_dispose: ->
@_disposed = true
for disposer in @_disposers when typeof disposer != 'undefined'
disposer()
delete @_disposers
return
subscribe: (fn, context) ->
watchers = @_subscriptions
key = uniqueKey()
watchers[key] = [fn, context]
unuse = @use()
return once 'Cannot "unsubscribe" more than once!', ->
delete watchers[key]
unuse()
return
use: ->
if @_disposed
throw new Error("Cannot use a signal after it has been disposed.")
@_users++
return once 'Cannot "unuse" more than once!', =>
@_users--
if @_users == 0
@_schedule()
return
derive: (source) ->
unuse = @use()
new @derivedType (send) ->
disposeInner = source(send)
return ->
disposeInner()
unuse()
addDisposer: (disposer) ->
if @_disposed
throw new Error("Cannot add a disposer to a disposed signal.")
@_disposers.push(disposer)
return @
derivedType: Signal
| 38109 | uniqueKey = require './utils/uniqueKey'
once = require './utils/once'
schedulerRef = require './scheduler-ref'
eligibleSignals = []
isDisposalScheduled = false
dispose = ->
i = 0
while i < eligibleSignals.length
signal = eligibleSignals[i]
signal._scheduled = false
if signal._users == 0
signal._dispose()
i++
eligibleSignals = []
isDisposalScheduled = false
return
module.exports = class Signal
_users: 0
_disposed: false
_scheduled: false
constructor: (source) ->
@_disposers = [source @_send.bind(@)]
@_subscriptions = {}
@_schedule()
_send: (val) ->
if @_disposed
throw new Error("Signal cannot send events after disposal. (Did you forget to return a disposer?)")
for key, [fn, context] of @_subscriptions
fn.call context, val
return
_schedule: ->
if @_scheduled
return
@_scheduled = true
eligibleSignals.push(@)
if isDisposalScheduled
return
schedulerRef.schedule(dispose)
isDisposalScheduled = true
return
_dispose: ->
@_disposed = true
for disposer in @_disposers when typeof disposer != 'undefined'
disposer()
delete @_disposers
return
subscribe: (fn, context) ->
watchers = @_subscriptions
key = <KEY>()
watchers[key] = [fn, context]
unuse = @use()
return once 'Cannot "unsubscribe" more than once!', ->
delete watchers[key]
unuse()
return
use: ->
if @_disposed
throw new Error("Cannot use a signal after it has been disposed.")
@_users++
return once 'Cannot "unuse" more than once!', =>
@_users--
if @_users == 0
@_schedule()
return
derive: (source) ->
unuse = @use()
new @derivedType (send) ->
disposeInner = source(send)
return ->
disposeInner()
unuse()
addDisposer: (disposer) ->
if @_disposed
throw new Error("Cannot add a disposer to a disposed signal.")
@_disposers.push(disposer)
return @
derivedType: Signal
| true | uniqueKey = require './utils/uniqueKey'
once = require './utils/once'
schedulerRef = require './scheduler-ref'
eligibleSignals = []
isDisposalScheduled = false
dispose = ->
i = 0
while i < eligibleSignals.length
signal = eligibleSignals[i]
signal._scheduled = false
if signal._users == 0
signal._dispose()
i++
eligibleSignals = []
isDisposalScheduled = false
return
module.exports = class Signal
_users: 0
_disposed: false
_scheduled: false
constructor: (source) ->
@_disposers = [source @_send.bind(@)]
@_subscriptions = {}
@_schedule()
_send: (val) ->
if @_disposed
throw new Error("Signal cannot send events after disposal. (Did you forget to return a disposer?)")
for key, [fn, context] of @_subscriptions
fn.call context, val
return
_schedule: ->
if @_scheduled
return
@_scheduled = true
eligibleSignals.push(@)
if isDisposalScheduled
return
schedulerRef.schedule(dispose)
isDisposalScheduled = true
return
_dispose: ->
@_disposed = true
for disposer in @_disposers when typeof disposer != 'undefined'
disposer()
delete @_disposers
return
subscribe: (fn, context) ->
watchers = @_subscriptions
key = PI:KEY:<KEY>END_PI()
watchers[key] = [fn, context]
unuse = @use()
return once 'Cannot "unsubscribe" more than once!', ->
delete watchers[key]
unuse()
return
use: ->
if @_disposed
throw new Error("Cannot use a signal after it has been disposed.")
@_users++
return once 'Cannot "unuse" more than once!', =>
@_users--
if @_users == 0
@_schedule()
return
derive: (source) ->
unuse = @use()
new @derivedType (send) ->
disposeInner = source(send)
return ->
disposeInner()
unuse()
addDisposer: (disposer) ->
if @_disposed
throw new Error("Cannot add a disposer to a disposed signal.")
@_disposers.push(disposer)
return @
derivedType: Signal
|
[
{
"context": " describe 'OrderBy()', ->\n it 'should return [\"Amanda\", \"Todd\"]', (done) ->\n l = new LINQ [{name: ",
"end": 5250,
"score": 0.9500133395195007,
"start": 5244,
"tag": "NAME",
"value": "Amanda"
},
{
"context": "'OrderBy()', ->\n it 'should return [\"Amanda... | node_modules/node-linq/test/LINQ.coffee | wleorule/CloudBox | 1 | {LINQ} = require '../'
should = require 'should'
require 'mocha'
describe 'indexes', ->
describe 'Count()', ->
it 'should return 5', (done) ->
l = new LINQ [1, 2, 3, 4, 5]
l.Count().should.equal 5
done()
it 'should return 0', (done) ->
l = new LINQ []
l.Count().should.equal 0
done()
describe 'ElementAt()', ->
it 'should return 3', (done) ->
l = new LINQ [1, 2, 3, 4]
l.ElementAt(2).should.equal 3
done()
describe 'ElementAtOrDefault()', ->
it 'should return 3', (done) ->
l = new LINQ [1, 2, 3, 4]
l.ElementAtOrDefault(2).should.equal 3
done()
it 'should return 7', (done) ->
l = new LINQ [1, 2, 3, 4]
l.ElementAtOrDefault(9001, 7).should.equal 7
done()
describe 'Single()', ->
it 'should return 3', (done) ->
l = new LINQ [3]
l.Single().should.equal 3
done()
describe 'SingleOrDefault()', ->
it 'should return 3', (done) ->
l = new LINQ [3]
l.SingleOrDefault().should.equal 3
done()
it 'should return 7', (done) ->
l = new LINQ []
l.SingleOrDefault(7).should.equal 7
done()
describe 'First()', ->
it 'should return 3', (done) ->
l = new LINQ [3, 4, 5]
l.First().should.equal 3
done()
describe 'FirstOrDefault()', ->
it 'should return 3', (done) ->
l = new LINQ [3, 4, 5]
l.FirstOrDefault().should.equal 3
done()
it 'should return 7', (done) ->
l = new LINQ []
l.FirstOrDefault(7).should.equal 7
done()
describe 'Last()', ->
it 'should return 3', (done) ->
l = new LINQ [3, 4, 5]
l.Last().should.equal 5
done()
describe 'LastOrDefault()', ->
it 'should return 3', (done) ->
l = new LINQ [3, 4, 5]
l.LastOrDefault().should.equal 5
done()
it 'should return 7', (done) ->
l = new LINQ []
l.LastOrDefault(7).should.equal 7
done()
describe 'DefaultIfEmpty()', ->
it 'should return 3', (done) ->
l = new LINQ [3, 4, 5]
l.DefaultIfEmpty().should.equal l
done()
it 'should return 7', (done) ->
l = new LINQ []
l.DefaultIfEmpty(7).should.equal 7
done()
describe 'selects', ->
describe 'Where()', ->
it 'should return [3, 4, 5]', (done) ->
l = new LINQ [1, 2, 3, 4, 5]
expected = new LINQ [3, 4, 5]
l.Where((num) -> num > 2).should.eql expected
done()
describe 'Distinct()', ->
it 'should return [1, 2, 3, 4, 5]', (done) ->
l = new LINQ [1, 2, 3, 4, 5, 1, 2, 3, 4, 5]
expected = new LINQ [1, 2, 3, 4, 5]
l.Distinct().should.eql expected
done()
describe 'Except()', ->
it 'should return [6]', (done) ->
except = [1, 2, 3, 4, 5]
l = new LINQ [1, 2, 3, 4, 5, 6]
expected = new LINQ [6]
l.Except(except).should.eql expected
done()
it 'should return [6] with selector', (done) ->
except = [1, 2, 3, 4, 5]
l = new LINQ [1, 2, 3, 4, 5, 6]
expected = new LINQ [6]
sel = (i) -> i
l.Except(except, sel).should.eql expected
done()
describe 'OfType()', ->
it 'should return [6]', (done) ->
l = new LINQ ['str', {}, null, 6]
expected = new LINQ [6]
l.OfType('number').should.eql expected
done()
it 'should return ["str"]', (done) ->
l = new LINQ ['str', {}, null, 6]
expected = new LINQ ['str']
l.OfType('string').should.eql expected
done()
describe 'Cast()', ->
it 'should return [6, 6]', (done) ->
l = new LINQ ['6', 6]
expected = new LINQ [6, 6]
l.Cast(parseInt).should.eql expected
done()
it 'should return ["6", "6"]', (done) ->
l = new LINQ ['6', 6]
expected = new LINQ ['6', '6']
l.Cast(String).should.eql expected
done()
describe 'Select()', ->
it 'should return ["Max", "Todd"]', (done) ->
l = new LINQ [{first: "Max", last: "Su"}, {first: "Todd", last: "Su"}]
expected = new LINQ ["Max", "Todd"]
l.Select((item) -> item.first).should.eql expected
done()
describe 'SelectMany()', ->
it 'should return ["Max", "Todd"]', (done) ->
l = new LINQ [{first: "Max", last: "Su"}, {first: "Todd", last: "Su"}]
expected = new LINQ ["Max", "Su", "Todd", "Su"]
l.SelectMany((item) -> [item.first, item.last]).should.eql expected
done()
describe 'computation', ->
describe 'Max()', ->
it 'should return 5', (done) ->
l = new LINQ [1, 2, 3, 4, 5]
l.Max().should.equal 5
done()
describe 'Min()', ->
it 'should return 1', (done) ->
l = new LINQ [1, 2, 3, 4, 5]
l.Min().should.equal 1
done()
describe 'Sum()', ->
it 'should return 6', (done) ->
l = new LINQ [1, 2, 3]
l.Sum().should.equal 6
done()
describe 'Average()', ->
it 'should return 2', (done) ->
l = new LINQ [1, 2, 3]
l.Average().should.equal 2
done()
describe 'ordering', ->
describe 'Reverse()', ->
it 'should return [3, 2, 1]', (done) ->
l = new LINQ [1, 2, 3]
expected = new LINQ [3, 2, 1]
l.Reverse().should.eql expected
done()
describe 'OrderBy()', ->
it 'should return ["Amanda", "Todd"]', (done) ->
l = new LINQ [{name: "Todd"}, {name: "Amanda"}]
expected = new LINQ [{name: "Amanda"}, {name: "Todd"}]
l.OrderBy((item) -> item.name).should.eql expected
done()
describe 'OrderByDescending()', ->
it 'should return ["Todd", "Amanda"]', (done) ->
l = new LINQ [{name: "Amanda"}, {name: "Todd"}]
expected = new LINQ [{name: "Todd"}, {name: "Amanda"}]
l.OrderByDescending((item) -> item.name).should.eql expected
done()
describe 'GroupBy()', ->
it 'should return ["Amanda", "Todd"]', (done) ->
l = new LINQ [{name: "Todd"}, {name: "Amanda"}, {name: "Alan"}]
expected =
T: [ name: 'Todd' ]
A: [
{name: 'Amanda'}
{name: 'Alan'}
]
l.GroupBy((item) -> item.name[0]).should.eql expected
done()
describe 'conditions', ->
describe 'Contains()', ->
it 'should return true', (done) ->
l = new LINQ [1, 2, 3]
l.Contains(1).should.equal true
done()
describe 'Contains()', ->
it 'should return false', (done) ->
l = new LINQ [1, 2, 3]
l.Contains(4).should.equal false
done()
describe 'ContainsAll()', ->
it 'should return true', (done) ->
l = new LINQ [1, 2, 3]
l.ContainsAll([1, 2, 3]).should.equal true
done()
describe 'ContainsAll()', ->
it 'should return false', (done) ->
l = new LINQ [1, 2, 3]
l.ContainsAll([1, 2, 3, 4]).should.equal false
done()
describe 'Any()', ->
it 'should return true', (done) ->
l = new LINQ [1, 2, 3]
l.Any((item) -> item is 1).should.equal true
done()
describe 'Any()', ->
it 'should return false', (done) ->
l = new LINQ [1, 2, 3]
l.Any((item) -> item is 4).should.equal false
done()
describe 'All()', ->
it 'should return true', (done) ->
l = new LINQ [1, 2, 3]
l.All((item) -> item > 0).should.equal true
done()
describe 'All()', ->
it 'should return false', (done) ->
l = new LINQ [1, 2, 3]
l.All((item) -> item > 10).should.equal false
done()
describe 'modifications', ->
describe 'Concat()', ->
it 'should return true', (done) ->
l = new LINQ [1, 2, 3]
l2 = new LINQ [1, 2, 3]
expected = new LINQ [1, 2, 3, 1, 2, 3]
l.Concat(l2).should.eql expected
done()
describe 'Intersect()', ->
it 'should return true', (done) ->
l = new LINQ [1, 2, 3, 4]
l2 = new LINQ [1, 2, 3, 5]
expected = new LINQ [1, 2, 3]
l.Intersect(l2).should.eql expected
done()
describe 'chaining', ->
describe 'dogs and puppies', ->
it 'should return correct', (done) ->
dogs = [
{name: 'Toby', age: 2, type: 'Yorkie'},
{name: 'Max', age: 3, type: 'Labrador'},
{name: 'Lil Billy', age: 4, type: 'Labrador'},
{name: 'Choni', age: 5, type: 'Poodle'}
]
puppies = [
{name: 'T-Bone', age: 1, type: 'Yorkie'},
{name: 'Lil Chili', age: 1, type: 'Labrador'}
]
arr = new LINQ(dogs)
.Concat(puppies)
.Where((dog) -> dog.type is 'Labrador')
.OrderBy((dog) -> dog.age)
.Select((dog) -> dog.name)
.ToArray()
arr.should.eql [ 'Lil Chili', 'Max', 'Lil Billy' ]
done()
| 53974 | {LINQ} = require '../'
should = require 'should'
require 'mocha'
describe 'indexes', ->
describe 'Count()', ->
it 'should return 5', (done) ->
l = new LINQ [1, 2, 3, 4, 5]
l.Count().should.equal 5
done()
it 'should return 0', (done) ->
l = new LINQ []
l.Count().should.equal 0
done()
describe 'ElementAt()', ->
it 'should return 3', (done) ->
l = new LINQ [1, 2, 3, 4]
l.ElementAt(2).should.equal 3
done()
describe 'ElementAtOrDefault()', ->
it 'should return 3', (done) ->
l = new LINQ [1, 2, 3, 4]
l.ElementAtOrDefault(2).should.equal 3
done()
it 'should return 7', (done) ->
l = new LINQ [1, 2, 3, 4]
l.ElementAtOrDefault(9001, 7).should.equal 7
done()
describe 'Single()', ->
it 'should return 3', (done) ->
l = new LINQ [3]
l.Single().should.equal 3
done()
describe 'SingleOrDefault()', ->
it 'should return 3', (done) ->
l = new LINQ [3]
l.SingleOrDefault().should.equal 3
done()
it 'should return 7', (done) ->
l = new LINQ []
l.SingleOrDefault(7).should.equal 7
done()
describe 'First()', ->
it 'should return 3', (done) ->
l = new LINQ [3, 4, 5]
l.First().should.equal 3
done()
describe 'FirstOrDefault()', ->
it 'should return 3', (done) ->
l = new LINQ [3, 4, 5]
l.FirstOrDefault().should.equal 3
done()
it 'should return 7', (done) ->
l = new LINQ []
l.FirstOrDefault(7).should.equal 7
done()
describe 'Last()', ->
it 'should return 3', (done) ->
l = new LINQ [3, 4, 5]
l.Last().should.equal 5
done()
describe 'LastOrDefault()', ->
it 'should return 3', (done) ->
l = new LINQ [3, 4, 5]
l.LastOrDefault().should.equal 5
done()
it 'should return 7', (done) ->
l = new LINQ []
l.LastOrDefault(7).should.equal 7
done()
describe 'DefaultIfEmpty()', ->
it 'should return 3', (done) ->
l = new LINQ [3, 4, 5]
l.DefaultIfEmpty().should.equal l
done()
it 'should return 7', (done) ->
l = new LINQ []
l.DefaultIfEmpty(7).should.equal 7
done()
describe 'selects', ->
describe 'Where()', ->
it 'should return [3, 4, 5]', (done) ->
l = new LINQ [1, 2, 3, 4, 5]
expected = new LINQ [3, 4, 5]
l.Where((num) -> num > 2).should.eql expected
done()
describe 'Distinct()', ->
it 'should return [1, 2, 3, 4, 5]', (done) ->
l = new LINQ [1, 2, 3, 4, 5, 1, 2, 3, 4, 5]
expected = new LINQ [1, 2, 3, 4, 5]
l.Distinct().should.eql expected
done()
describe 'Except()', ->
it 'should return [6]', (done) ->
except = [1, 2, 3, 4, 5]
l = new LINQ [1, 2, 3, 4, 5, 6]
expected = new LINQ [6]
l.Except(except).should.eql expected
done()
it 'should return [6] with selector', (done) ->
except = [1, 2, 3, 4, 5]
l = new LINQ [1, 2, 3, 4, 5, 6]
expected = new LINQ [6]
sel = (i) -> i
l.Except(except, sel).should.eql expected
done()
describe 'OfType()', ->
it 'should return [6]', (done) ->
l = new LINQ ['str', {}, null, 6]
expected = new LINQ [6]
l.OfType('number').should.eql expected
done()
it 'should return ["str"]', (done) ->
l = new LINQ ['str', {}, null, 6]
expected = new LINQ ['str']
l.OfType('string').should.eql expected
done()
describe 'Cast()', ->
it 'should return [6, 6]', (done) ->
l = new LINQ ['6', 6]
expected = new LINQ [6, 6]
l.Cast(parseInt).should.eql expected
done()
it 'should return ["6", "6"]', (done) ->
l = new LINQ ['6', 6]
expected = new LINQ ['6', '6']
l.Cast(String).should.eql expected
done()
describe 'Select()', ->
it 'should return ["Max", "Todd"]', (done) ->
l = new LINQ [{first: "Max", last: "Su"}, {first: "Todd", last: "Su"}]
expected = new LINQ ["Max", "Todd"]
l.Select((item) -> item.first).should.eql expected
done()
describe 'SelectMany()', ->
it 'should return ["Max", "Todd"]', (done) ->
l = new LINQ [{first: "Max", last: "Su"}, {first: "Todd", last: "Su"}]
expected = new LINQ ["Max", "Su", "Todd", "Su"]
l.SelectMany((item) -> [item.first, item.last]).should.eql expected
done()
describe 'computation', ->
describe 'Max()', ->
it 'should return 5', (done) ->
l = new LINQ [1, 2, 3, 4, 5]
l.Max().should.equal 5
done()
describe 'Min()', ->
it 'should return 1', (done) ->
l = new LINQ [1, 2, 3, 4, 5]
l.Min().should.equal 1
done()
describe 'Sum()', ->
it 'should return 6', (done) ->
l = new LINQ [1, 2, 3]
l.Sum().should.equal 6
done()
describe 'Average()', ->
it 'should return 2', (done) ->
l = new LINQ [1, 2, 3]
l.Average().should.equal 2
done()
describe 'ordering', ->
describe 'Reverse()', ->
it 'should return [3, 2, 1]', (done) ->
l = new LINQ [1, 2, 3]
expected = new LINQ [3, 2, 1]
l.Reverse().should.eql expected
done()
describe 'OrderBy()', ->
it 'should return ["<NAME>", "<NAME>"]', (done) ->
l = new LINQ [{name: "<NAME>"}, {name: "<NAME>"}]
expected = new LINQ [{name: "<NAME>"}, {name: "<NAME>"}]
l.OrderBy((item) -> item.name).should.eql expected
done()
describe 'OrderByDescending()', ->
it 'should return ["<NAME>", "<NAME>"]', (done) ->
l = new LINQ [{name: "<NAME>"}, {name: "<NAME>"}]
expected = new LINQ [{name: "<NAME>"}, {name: "<NAME>"}]
l.OrderByDescending((item) -> item.name).should.eql expected
done()
describe 'GroupBy()', ->
it 'should return ["<NAME>", "<NAME>"]', (done) ->
l = new LINQ [{name: "<NAME>"}, {name: "<NAME>"}, {name: "<NAME>"}]
expected =
T: [ name: '<NAME>' ]
A: [
{name: '<NAME>'}
{name: '<NAME>'}
]
l.GroupBy((item) -> item.name[0]).should.eql expected
done()
describe 'conditions', ->
describe 'Contains()', ->
it 'should return true', (done) ->
l = new LINQ [1, 2, 3]
l.Contains(1).should.equal true
done()
describe 'Contains()', ->
it 'should return false', (done) ->
l = new LINQ [1, 2, 3]
l.Contains(4).should.equal false
done()
describe 'ContainsAll()', ->
it 'should return true', (done) ->
l = new LINQ [1, 2, 3]
l.ContainsAll([1, 2, 3]).should.equal true
done()
describe 'ContainsAll()', ->
it 'should return false', (done) ->
l = new LINQ [1, 2, 3]
l.ContainsAll([1, 2, 3, 4]).should.equal false
done()
describe 'Any()', ->
it 'should return true', (done) ->
l = new LINQ [1, 2, 3]
l.Any((item) -> item is 1).should.equal true
done()
describe 'Any()', ->
it 'should return false', (done) ->
l = new LINQ [1, 2, 3]
l.Any((item) -> item is 4).should.equal false
done()
describe 'All()', ->
it 'should return true', (done) ->
l = new LINQ [1, 2, 3]
l.All((item) -> item > 0).should.equal true
done()
describe 'All()', ->
it 'should return false', (done) ->
l = new LINQ [1, 2, 3]
l.All((item) -> item > 10).should.equal false
done()
describe 'modifications', ->
describe 'Concat()', ->
it 'should return true', (done) ->
l = new LINQ [1, 2, 3]
l2 = new LINQ [1, 2, 3]
expected = new LINQ [1, 2, 3, 1, 2, 3]
l.Concat(l2).should.eql expected
done()
describe 'Intersect()', ->
it 'should return true', (done) ->
l = new LINQ [1, 2, 3, 4]
l2 = new LINQ [1, 2, 3, 5]
expected = new LINQ [1, 2, 3]
l.Intersect(l2).should.eql expected
done()
describe 'chaining', ->
describe 'dogs and puppies', ->
it 'should return correct', (done) ->
dogs = [
{name: '<NAME>', age: 2, type: 'Yorkie'},
{name: '<NAME>', age: 3, type: 'Labrador'},
{name: '<NAME>', age: 4, type: 'Labrador'},
{name: '<NAME>', age: 5, type: 'Poodle'}
]
puppies = [
{name: '<NAME>', age: 1, type: 'Yorkie'},
{name: '<NAME>', age: 1, type: 'Labrador'}
]
arr = new LINQ(dogs)
.Concat(puppies)
.Where((dog) -> dog.type is 'Labrador')
.OrderBy((dog) -> dog.age)
.Select((dog) -> dog.name)
.ToArray()
arr.should.eql [ '<NAME>', '<NAME>', '<NAME>' ]
done()
| true | {LINQ} = require '../'
should = require 'should'
require 'mocha'
describe 'indexes', ->
describe 'Count()', ->
it 'should return 5', (done) ->
l = new LINQ [1, 2, 3, 4, 5]
l.Count().should.equal 5
done()
it 'should return 0', (done) ->
l = new LINQ []
l.Count().should.equal 0
done()
describe 'ElementAt()', ->
it 'should return 3', (done) ->
l = new LINQ [1, 2, 3, 4]
l.ElementAt(2).should.equal 3
done()
describe 'ElementAtOrDefault()', ->
it 'should return 3', (done) ->
l = new LINQ [1, 2, 3, 4]
l.ElementAtOrDefault(2).should.equal 3
done()
it 'should return 7', (done) ->
l = new LINQ [1, 2, 3, 4]
l.ElementAtOrDefault(9001, 7).should.equal 7
done()
describe 'Single()', ->
it 'should return 3', (done) ->
l = new LINQ [3]
l.Single().should.equal 3
done()
describe 'SingleOrDefault()', ->
it 'should return 3', (done) ->
l = new LINQ [3]
l.SingleOrDefault().should.equal 3
done()
it 'should return 7', (done) ->
l = new LINQ []
l.SingleOrDefault(7).should.equal 7
done()
describe 'First()', ->
it 'should return 3', (done) ->
l = new LINQ [3, 4, 5]
l.First().should.equal 3
done()
describe 'FirstOrDefault()', ->
it 'should return 3', (done) ->
l = new LINQ [3, 4, 5]
l.FirstOrDefault().should.equal 3
done()
it 'should return 7', (done) ->
l = new LINQ []
l.FirstOrDefault(7).should.equal 7
done()
describe 'Last()', ->
it 'should return 3', (done) ->
l = new LINQ [3, 4, 5]
l.Last().should.equal 5
done()
describe 'LastOrDefault()', ->
it 'should return 3', (done) ->
l = new LINQ [3, 4, 5]
l.LastOrDefault().should.equal 5
done()
it 'should return 7', (done) ->
l = new LINQ []
l.LastOrDefault(7).should.equal 7
done()
describe 'DefaultIfEmpty()', ->
it 'should return 3', (done) ->
l = new LINQ [3, 4, 5]
l.DefaultIfEmpty().should.equal l
done()
it 'should return 7', (done) ->
l = new LINQ []
l.DefaultIfEmpty(7).should.equal 7
done()
describe 'selects', ->
describe 'Where()', ->
it 'should return [3, 4, 5]', (done) ->
l = new LINQ [1, 2, 3, 4, 5]
expected = new LINQ [3, 4, 5]
l.Where((num) -> num > 2).should.eql expected
done()
describe 'Distinct()', ->
it 'should return [1, 2, 3, 4, 5]', (done) ->
l = new LINQ [1, 2, 3, 4, 5, 1, 2, 3, 4, 5]
expected = new LINQ [1, 2, 3, 4, 5]
l.Distinct().should.eql expected
done()
describe 'Except()', ->
it 'should return [6]', (done) ->
except = [1, 2, 3, 4, 5]
l = new LINQ [1, 2, 3, 4, 5, 6]
expected = new LINQ [6]
l.Except(except).should.eql expected
done()
it 'should return [6] with selector', (done) ->
except = [1, 2, 3, 4, 5]
l = new LINQ [1, 2, 3, 4, 5, 6]
expected = new LINQ [6]
sel = (i) -> i
l.Except(except, sel).should.eql expected
done()
describe 'OfType()', ->
it 'should return [6]', (done) ->
l = new LINQ ['str', {}, null, 6]
expected = new LINQ [6]
l.OfType('number').should.eql expected
done()
it 'should return ["str"]', (done) ->
l = new LINQ ['str', {}, null, 6]
expected = new LINQ ['str']
l.OfType('string').should.eql expected
done()
describe 'Cast()', ->
it 'should return [6, 6]', (done) ->
l = new LINQ ['6', 6]
expected = new LINQ [6, 6]
l.Cast(parseInt).should.eql expected
done()
it 'should return ["6", "6"]', (done) ->
l = new LINQ ['6', 6]
expected = new LINQ ['6', '6']
l.Cast(String).should.eql expected
done()
describe 'Select()', ->
it 'should return ["Max", "Todd"]', (done) ->
l = new LINQ [{first: "Max", last: "Su"}, {first: "Todd", last: "Su"}]
expected = new LINQ ["Max", "Todd"]
l.Select((item) -> item.first).should.eql expected
done()
describe 'SelectMany()', ->
it 'should return ["Max", "Todd"]', (done) ->
l = new LINQ [{first: "Max", last: "Su"}, {first: "Todd", last: "Su"}]
expected = new LINQ ["Max", "Su", "Todd", "Su"]
l.SelectMany((item) -> [item.first, item.last]).should.eql expected
done()
describe 'computation', ->
describe 'Max()', ->
it 'should return 5', (done) ->
l = new LINQ [1, 2, 3, 4, 5]
l.Max().should.equal 5
done()
describe 'Min()', ->
it 'should return 1', (done) ->
l = new LINQ [1, 2, 3, 4, 5]
l.Min().should.equal 1
done()
describe 'Sum()', ->
it 'should return 6', (done) ->
l = new LINQ [1, 2, 3]
l.Sum().should.equal 6
done()
describe 'Average()', ->
it 'should return 2', (done) ->
l = new LINQ [1, 2, 3]
l.Average().should.equal 2
done()
describe 'ordering', ->
describe 'Reverse()', ->
it 'should return [3, 2, 1]', (done) ->
l = new LINQ [1, 2, 3]
expected = new LINQ [3, 2, 1]
l.Reverse().should.eql expected
done()
describe 'OrderBy()', ->
it 'should return ["PI:NAME:<NAME>END_PI", "PI:NAME:<NAME>END_PI"]', (done) ->
l = new LINQ [{name: "PI:NAME:<NAME>END_PI"}, {name: "PI:NAME:<NAME>END_PI"}]
expected = new LINQ [{name: "PI:NAME:<NAME>END_PI"}, {name: "PI:NAME:<NAME>END_PI"}]
l.OrderBy((item) -> item.name).should.eql expected
done()
describe 'OrderByDescending()', ->
it 'should return ["PI:NAME:<NAME>END_PI", "PI:NAME:<NAME>END_PI"]', (done) ->
l = new LINQ [{name: "PI:NAME:<NAME>END_PI"}, {name: "PI:NAME:<NAME>END_PI"}]
expected = new LINQ [{name: "PI:NAME:<NAME>END_PI"}, {name: "PI:NAME:<NAME>END_PI"}]
l.OrderByDescending((item) -> item.name).should.eql expected
done()
describe 'GroupBy()', ->
it 'should return ["PI:NAME:<NAME>END_PI", "PI:NAME:<NAME>END_PI"]', (done) ->
l = new LINQ [{name: "PI:NAME:<NAME>END_PI"}, {name: "PI:NAME:<NAME>END_PI"}, {name: "PI:NAME:<NAME>END_PI"}]
expected =
T: [ name: 'PI:NAME:<NAME>END_PI' ]
A: [
{name: 'PI:NAME:<NAME>END_PI'}
{name: 'PI:NAME:<NAME>END_PI'}
]
l.GroupBy((item) -> item.name[0]).should.eql expected
done()
describe 'conditions', ->
describe 'Contains()', ->
it 'should return true', (done) ->
l = new LINQ [1, 2, 3]
l.Contains(1).should.equal true
done()
describe 'Contains()', ->
it 'should return false', (done) ->
l = new LINQ [1, 2, 3]
l.Contains(4).should.equal false
done()
describe 'ContainsAll()', ->
it 'should return true', (done) ->
l = new LINQ [1, 2, 3]
l.ContainsAll([1, 2, 3]).should.equal true
done()
describe 'ContainsAll()', ->
it 'should return false', (done) ->
l = new LINQ [1, 2, 3]
l.ContainsAll([1, 2, 3, 4]).should.equal false
done()
describe 'Any()', ->
it 'should return true', (done) ->
l = new LINQ [1, 2, 3]
l.Any((item) -> item is 1).should.equal true
done()
describe 'Any()', ->
it 'should return false', (done) ->
l = new LINQ [1, 2, 3]
l.Any((item) -> item is 4).should.equal false
done()
describe 'All()', ->
it 'should return true', (done) ->
l = new LINQ [1, 2, 3]
l.All((item) -> item > 0).should.equal true
done()
describe 'All()', ->
it 'should return false', (done) ->
l = new LINQ [1, 2, 3]
l.All((item) -> item > 10).should.equal false
done()
describe 'modifications', ->
describe 'Concat()', ->
it 'should return true', (done) ->
l = new LINQ [1, 2, 3]
l2 = new LINQ [1, 2, 3]
expected = new LINQ [1, 2, 3, 1, 2, 3]
l.Concat(l2).should.eql expected
done()
describe 'Intersect()', ->
it 'should return true', (done) ->
l = new LINQ [1, 2, 3, 4]
l2 = new LINQ [1, 2, 3, 5]
expected = new LINQ [1, 2, 3]
l.Intersect(l2).should.eql expected
done()
describe 'chaining', ->
describe 'dogs and puppies', ->
it 'should return correct', (done) ->
dogs = [
{name: 'PI:NAME:<NAME>END_PI', age: 2, type: 'Yorkie'},
{name: 'PI:NAME:<NAME>END_PI', age: 3, type: 'Labrador'},
{name: 'PI:NAME:<NAME>END_PI', age: 4, type: 'Labrador'},
{name: 'PI:NAME:<NAME>END_PI', age: 5, type: 'Poodle'}
]
puppies = [
{name: 'PI:NAME:<NAME>END_PI', age: 1, type: 'Yorkie'},
{name: 'PI:NAME:<NAME>END_PI', age: 1, type: 'Labrador'}
]
arr = new LINQ(dogs)
.Concat(puppies)
.Where((dog) -> dog.type is 'Labrador')
.OrderBy((dog) -> dog.age)
.Select((dog) -> dog.name)
.ToArray()
arr.should.eql [ 'PI:NAME:<NAME>END_PI', 'PI:NAME:<NAME>END_PI', 'PI:NAME:<NAME>END_PI' ]
done()
|
[
{
"context": "essageWithType \"auth\", \"hashed_login\", { username: @pending_username, bcrypted: hashed }\n return\n if data[",
"end": 1264,
"score": 0.9741301536560059,
"start": 1247,
"tag": "USERNAME",
"value": "@pending_username"
},
{
"context": "\"player_action\", ac... | dcjs/dcjs_websocket.coffee | noahgibbs/demiurge-createjs | 0 | class DCJS.WebsocketTransport extends DCJS.Transport
constructor: (dcjs, ws, cookie = document.cookie) ->
super(dcjs)
@dcjs = dcjs # Workaround to not reference "this" before calling super
@ws = ws
@cookie = cookie
@opened = false
@ready = false
@login_handler = false
@failed_login_handler = false
@pending_password = false
@pending_username = false
@on_open = false
@queue = []
setup: () ->
transport = this
@ws.onmessage = (evt) =>
data = JSON.parse evt.data
if data[0] == "game_msg"
return transport.api_handler data[1], data.slice(2)
if data[0] == "failed_login" || data[0] == "failed_registration"
if @failed_login_handler?
@failed_login_handler(data[1])
else
console.log "No failed login handler set!"
@pending_password = false
@pending_hash = false
@pending_save_login = false
return
if data[0] == "login_salt"
bcrypt = dcodeIO.bcrypt
salt = data[1]
hashed = bcrypt.hashSync(@pending_password, salt)
@pending_password = false # Tried it? Clear it.
@pending_hash = hashed
@sendMessageWithType "auth", "hashed_login", { username: @pending_username, bcrypted: hashed }
return
if data[0] == "login"
console.log "Logged in as", data[1]["username"]
if @pending_save_login && @pending_hash
@cookie = "dcjs_username=#{data[1]["username"]};secure"
@cookie = "dcjs_hash=#{@pending_hash};secure"
@pending_hash = false
@logged_in_as = data[1]["username"]
if @login_handler?
@login_handler(data[1]["username"])
else
console.log "No login handler set!"
return
if data[0] == "registration"
consider_auto_login()
return console.log "Unexpected message type: #{data[0]}"
@ws.onclose = () ->
console.log "socket closed"
@ws.onopen = () ->
@opened = true
ready_check = () =>
if transport.ws.readyState == 1
transport.ready = true
transport.clearQueue()
else
setTimeout ready_check, 0.25
console.log "connected..."
ready_check()
if @on_open?
@on_open()
sendMessage: (msgName, args...) ->
sendMessageWithType("game_msg", msgName, args...)
sendMessageWithType: (msgType, msgName, args...) ->
if @ready
# Serialize as JSON, send
@ws.send JSON.stringify([ msgType, msgName, args ])
else
@queue.push([msgType, msgName, args])
setTimeout (() => @clearQueue()), 0.5
clearQueue: () ->
if @ready && @queue.length > 0
for msg in @queue
@ws.send JSON.stringify(msg)
playerAction: (action_name, args...) ->
@sendMessageWithType("player_action", action_name, args...)
onLogin: (@login_handler) ->
onFailedLogin: (@failed_login_handler) ->
onOpen: (@on_open) ->
if @opened?
@on_open() # Already open? Call it.
api_handler: (msg_type, args) ->
@handler msg_type, args
registerAccount: (username, password) ->
bcrypt = dcodeIO.bcrypt;
salt = bcrypt.genSaltSync(10);
hashed = bcrypt.hashSync(password, salt);
@sendMessageWithType("auth", "register_account", { username: username, salt: salt, bcrypted: hashed })
login: (username, password, save_login_info = true) ->
@pending_password = password
@pending_username = username
@pending_save_login = save_login_info
@sendMessageWithType("auth", "get_salt", { username: username })
logout: () ->
@cookie = 'dcjs_username=; expires=Thu, 01 Jan 1970 00:00:01 GMT;';
@cookie = 'dcjs_hash=; expires=Thu, 01 Jan 1970 00:00:01 GMT;';
considerAutoLogin: () ->
cookie_username = @cookie.replace(/(?:(?:^|.*;\s*)dcjs_username\s*\=\s*([^;]*).*$)|^.*$/, "$1")
cookie_hash = @cookie.replace(/(?:(?:^|.*;\s*)dcjs_hash\s*\=\s*([^;]*).*$)|^.*$/, "$1")
if cookie_username? && cookie_hash?
@sendMessageWithType "auth", "hashed_login", { username: cookie_username, bcrypted: cookie_hash }
| 117108 | class DCJS.WebsocketTransport extends DCJS.Transport
constructor: (dcjs, ws, cookie = document.cookie) ->
super(dcjs)
@dcjs = dcjs # Workaround to not reference "this" before calling super
@ws = ws
@cookie = cookie
@opened = false
@ready = false
@login_handler = false
@failed_login_handler = false
@pending_password = false
@pending_username = false
@on_open = false
@queue = []
setup: () ->
transport = this
@ws.onmessage = (evt) =>
data = JSON.parse evt.data
if data[0] == "game_msg"
return transport.api_handler data[1], data.slice(2)
if data[0] == "failed_login" || data[0] == "failed_registration"
if @failed_login_handler?
@failed_login_handler(data[1])
else
console.log "No failed login handler set!"
@pending_password = false
@pending_hash = false
@pending_save_login = false
return
if data[0] == "login_salt"
bcrypt = dcodeIO.bcrypt
salt = data[1]
hashed = bcrypt.hashSync(@pending_password, salt)
@pending_password = false # Tried it? Clear it.
@pending_hash = hashed
@sendMessageWithType "auth", "hashed_login", { username: @pending_username, bcrypted: hashed }
return
if data[0] == "login"
console.log "Logged in as", data[1]["username"]
if @pending_save_login && @pending_hash
@cookie = "dcjs_username=#{data[1]["username"]};secure"
@cookie = "dcjs_hash=#{@pending_hash};secure"
@pending_hash = false
@logged_in_as = data[1]["username"]
if @login_handler?
@login_handler(data[1]["username"])
else
console.log "No login handler set!"
return
if data[0] == "registration"
consider_auto_login()
return console.log "Unexpected message type: #{data[0]}"
@ws.onclose = () ->
console.log "socket closed"
@ws.onopen = () ->
@opened = true
ready_check = () =>
if transport.ws.readyState == 1
transport.ready = true
transport.clearQueue()
else
setTimeout ready_check, 0.25
console.log "connected..."
ready_check()
if @on_open?
@on_open()
sendMessage: (msgName, args...) ->
sendMessageWithType("game_msg", msgName, args...)
sendMessageWithType: (msgType, msgName, args...) ->
if @ready
# Serialize as JSON, send
@ws.send JSON.stringify([ msgType, msgName, args ])
else
@queue.push([msgType, msgName, args])
setTimeout (() => @clearQueue()), 0.5
clearQueue: () ->
if @ready && @queue.length > 0
for msg in @queue
@ws.send JSON.stringify(msg)
playerAction: (action_name, args...) ->
@sendMessageWithType("player_action", action_name, args...)
onLogin: (@login_handler) ->
onFailedLogin: (@failed_login_handler) ->
onOpen: (@on_open) ->
if @opened?
@on_open() # Already open? Call it.
api_handler: (msg_type, args) ->
@handler msg_type, args
registerAccount: (username, password) ->
bcrypt = dcodeIO.bcrypt;
salt = bcrypt.genSaltSync(10);
hashed = bcrypt.hashSync(password, salt);
@sendMessageWithType("auth", "register_account", { username: username, salt: salt, bcrypted: hashed })
login: (username, password, save_login_info = true) ->
@pending_password = <PASSWORD>
@pending_username = username
@pending_save_login = save_login_info
@sendMessageWithType("auth", "get_salt", { username: username })
logout: () ->
@cookie = 'dcjs_username=; expires=Thu, 01 Jan 1970 00:00:01 GMT;';
@cookie = 'dcjs_hash=; expires=Thu, 01 Jan 1970 00:00:01 GMT;';
considerAutoLogin: () ->
cookie_username = @cookie.replace(/(?:(?:^|.*;\s*)dcjs_username\s*\=\s*([^;]*).*$)|^.*$/, "$1")
cookie_hash = @cookie.replace(/(?:(?:^|.*;\s*)dcjs_hash\s*\=\s*([^;]*).*$)|^.*$/, "$1")
if cookie_username? && cookie_hash?
@sendMessageWithType "auth", "hashed_login", { username: cookie_username, bcrypted: cookie_hash }
| true | class DCJS.WebsocketTransport extends DCJS.Transport
constructor: (dcjs, ws, cookie = document.cookie) ->
super(dcjs)
@dcjs = dcjs # Workaround to not reference "this" before calling super
@ws = ws
@cookie = cookie
@opened = false
@ready = false
@login_handler = false
@failed_login_handler = false
@pending_password = false
@pending_username = false
@on_open = false
@queue = []
setup: () ->
transport = this
@ws.onmessage = (evt) =>
data = JSON.parse evt.data
if data[0] == "game_msg"
return transport.api_handler data[1], data.slice(2)
if data[0] == "failed_login" || data[0] == "failed_registration"
if @failed_login_handler?
@failed_login_handler(data[1])
else
console.log "No failed login handler set!"
@pending_password = false
@pending_hash = false
@pending_save_login = false
return
if data[0] == "login_salt"
bcrypt = dcodeIO.bcrypt
salt = data[1]
hashed = bcrypt.hashSync(@pending_password, salt)
@pending_password = false # Tried it? Clear it.
@pending_hash = hashed
@sendMessageWithType "auth", "hashed_login", { username: @pending_username, bcrypted: hashed }
return
if data[0] == "login"
console.log "Logged in as", data[1]["username"]
if @pending_save_login && @pending_hash
@cookie = "dcjs_username=#{data[1]["username"]};secure"
@cookie = "dcjs_hash=#{@pending_hash};secure"
@pending_hash = false
@logged_in_as = data[1]["username"]
if @login_handler?
@login_handler(data[1]["username"])
else
console.log "No login handler set!"
return
if data[0] == "registration"
consider_auto_login()
return console.log "Unexpected message type: #{data[0]}"
@ws.onclose = () ->
console.log "socket closed"
@ws.onopen = () ->
@opened = true
ready_check = () =>
if transport.ws.readyState == 1
transport.ready = true
transport.clearQueue()
else
setTimeout ready_check, 0.25
console.log "connected..."
ready_check()
if @on_open?
@on_open()
sendMessage: (msgName, args...) ->
sendMessageWithType("game_msg", msgName, args...)
sendMessageWithType: (msgType, msgName, args...) ->
if @ready
# Serialize as JSON, send
@ws.send JSON.stringify([ msgType, msgName, args ])
else
@queue.push([msgType, msgName, args])
setTimeout (() => @clearQueue()), 0.5
clearQueue: () ->
if @ready && @queue.length > 0
for msg in @queue
@ws.send JSON.stringify(msg)
playerAction: (action_name, args...) ->
@sendMessageWithType("player_action", action_name, args...)
onLogin: (@login_handler) ->
onFailedLogin: (@failed_login_handler) ->
onOpen: (@on_open) ->
if @opened?
@on_open() # Already open? Call it.
api_handler: (msg_type, args) ->
@handler msg_type, args
registerAccount: (username, password) ->
bcrypt = dcodeIO.bcrypt;
salt = bcrypt.genSaltSync(10);
hashed = bcrypt.hashSync(password, salt);
@sendMessageWithType("auth", "register_account", { username: username, salt: salt, bcrypted: hashed })
login: (username, password, save_login_info = true) ->
@pending_password = PI:PASSWORD:<PASSWORD>END_PI
@pending_username = username
@pending_save_login = save_login_info
@sendMessageWithType("auth", "get_salt", { username: username })
logout: () ->
@cookie = 'dcjs_username=; expires=Thu, 01 Jan 1970 00:00:01 GMT;';
@cookie = 'dcjs_hash=; expires=Thu, 01 Jan 1970 00:00:01 GMT;';
considerAutoLogin: () ->
cookie_username = @cookie.replace(/(?:(?:^|.*;\s*)dcjs_username\s*\=\s*([^;]*).*$)|^.*$/, "$1")
cookie_hash = @cookie.replace(/(?:(?:^|.*;\s*)dcjs_hash\s*\=\s*([^;]*).*$)|^.*$/, "$1")
if cookie_username? && cookie_hash?
@sendMessageWithType "auth", "hashed_login", { username: cookie_username, bcrypted: cookie_hash }
|
[
{
"context": "\n fakeProperty =\n id: propertyId\n name: \"Bad Apartment\"\n description: \"A pretty bad apartment\"\n ad",
"end": 222,
"score": 0.7306563854217529,
"start": 209,
"tag": "NAME",
"value": "Bad Apartment"
}
] | spec/javascripts/controllers/PropertyController_spec.coffee | Apmats/property_management_system_ngRoR_sample | 0 | describe "PropertyController", ->
scope = null
ctrl = null
routeParams = null
httpBackend = null
propertyId = 10
flash = null
fakeProperty =
id: propertyId
name: "Bad Apartment"
description: "A pretty bad apartment"
address: "Nonexistant Str. 45"
property_type: "City"
floor: 3
setupController =(propertyExists=true)->
inject(($location, $routeParams, $rootScope, $httpBackend, $controller, _flash_)->
scope = $rootScope.$new()
location = $location
httpBackend = $httpBackend
routeParams = $routeParams
routeParams.propertyId = propertyId
flash = _flash_
request = new RegExp("\/properties/#{propertyId}")
results = if propertyExists
[200,fakeProperty]
else
[404]
httpBackend.expectGET(request).respond(results[0],results[1])
ctrl = $controller('PropertyController',
$scope: scope)
)
beforeEach(module("pms"))
afterEach ->
httpBackend.verifyNoOutstandingExpectation()
httpBackend.verifyNoOutstandingRequest()
describe 'controller initialization', ->
describe 'property is found', ->
beforeEach(setupController())
it 'loads the given property', ->
httpBackend.flush()
expect(scope.property).toEqualData(fakeProperty)
describe 'property is not found', ->
beforeEach(setupController(false))
it 'loads the given property', ->
httpBackend.flush()
expect(scope.property).toBe(null)
expect(flash.error).toBe("There is no property with ID #{propertyId}") | 63550 | describe "PropertyController", ->
scope = null
ctrl = null
routeParams = null
httpBackend = null
propertyId = 10
flash = null
fakeProperty =
id: propertyId
name: "<NAME>"
description: "A pretty bad apartment"
address: "Nonexistant Str. 45"
property_type: "City"
floor: 3
setupController =(propertyExists=true)->
inject(($location, $routeParams, $rootScope, $httpBackend, $controller, _flash_)->
scope = $rootScope.$new()
location = $location
httpBackend = $httpBackend
routeParams = $routeParams
routeParams.propertyId = propertyId
flash = _flash_
request = new RegExp("\/properties/#{propertyId}")
results = if propertyExists
[200,fakeProperty]
else
[404]
httpBackend.expectGET(request).respond(results[0],results[1])
ctrl = $controller('PropertyController',
$scope: scope)
)
beforeEach(module("pms"))
afterEach ->
httpBackend.verifyNoOutstandingExpectation()
httpBackend.verifyNoOutstandingRequest()
describe 'controller initialization', ->
describe 'property is found', ->
beforeEach(setupController())
it 'loads the given property', ->
httpBackend.flush()
expect(scope.property).toEqualData(fakeProperty)
describe 'property is not found', ->
beforeEach(setupController(false))
it 'loads the given property', ->
httpBackend.flush()
expect(scope.property).toBe(null)
expect(flash.error).toBe("There is no property with ID #{propertyId}") | true | describe "PropertyController", ->
scope = null
ctrl = null
routeParams = null
httpBackend = null
propertyId = 10
flash = null
fakeProperty =
id: propertyId
name: "PI:NAME:<NAME>END_PI"
description: "A pretty bad apartment"
address: "Nonexistant Str. 45"
property_type: "City"
floor: 3
setupController =(propertyExists=true)->
inject(($location, $routeParams, $rootScope, $httpBackend, $controller, _flash_)->
scope = $rootScope.$new()
location = $location
httpBackend = $httpBackend
routeParams = $routeParams
routeParams.propertyId = propertyId
flash = _flash_
request = new RegExp("\/properties/#{propertyId}")
results = if propertyExists
[200,fakeProperty]
else
[404]
httpBackend.expectGET(request).respond(results[0],results[1])
ctrl = $controller('PropertyController',
$scope: scope)
)
beforeEach(module("pms"))
afterEach ->
httpBackend.verifyNoOutstandingExpectation()
httpBackend.verifyNoOutstandingRequest()
describe 'controller initialization', ->
describe 'property is found', ->
beforeEach(setupController())
it 'loads the given property', ->
httpBackend.flush()
expect(scope.property).toEqualData(fakeProperty)
describe 'property is not found', ->
beforeEach(setupController(false))
it 'loads the given property', ->
httpBackend.flush()
expect(scope.property).toBe(null)
expect(flash.error).toBe("There is no property with ID #{propertyId}") |
[
{
"context": "e cookieParser() \n\tapp.use express.session\n\t\tkey:\"shk-framework\"\n\t\tsecret:\"your-secret-key\"\n\n\tapp.use express.sta",
"end": 595,
"score": 0.9812285304069519,
"start": 582,
"tag": "KEY",
"value": "shk-framework"
}
] | app.coffee | shkarimpour/framework-core | 0 | module.exports = ->
express = require 'express'
path = require 'path'
favicon = require 'static-favicon'
logger = require 'morgan'
cookieParser = require 'cookie-parser'
bodyParser = require 'body-parser'
config = require '../../config'
app = express()
app.locals.routes = {}
# view engine setup
app.set 'views', path.join __dirname+"/../", config.autoload.viewsPath
app.set 'view engine', 'jade'
app.use favicon()
app.use logger('dev')
app.use bodyParser.json()
app.use bodyParser.urlencoded()
app.use cookieParser()
app.use express.session
key:"shk-framework"
secret:"your-secret-key"
app.use express.static path.join __dirname, 'public'
app.use app.router
app | 73215 | module.exports = ->
express = require 'express'
path = require 'path'
favicon = require 'static-favicon'
logger = require 'morgan'
cookieParser = require 'cookie-parser'
bodyParser = require 'body-parser'
config = require '../../config'
app = express()
app.locals.routes = {}
# view engine setup
app.set 'views', path.join __dirname+"/../", config.autoload.viewsPath
app.set 'view engine', 'jade'
app.use favicon()
app.use logger('dev')
app.use bodyParser.json()
app.use bodyParser.urlencoded()
app.use cookieParser()
app.use express.session
key:"<KEY>"
secret:"your-secret-key"
app.use express.static path.join __dirname, 'public'
app.use app.router
app | true | module.exports = ->
express = require 'express'
path = require 'path'
favicon = require 'static-favicon'
logger = require 'morgan'
cookieParser = require 'cookie-parser'
bodyParser = require 'body-parser'
config = require '../../config'
app = express()
app.locals.routes = {}
# view engine setup
app.set 'views', path.join __dirname+"/../", config.autoload.viewsPath
app.set 'view engine', 'jade'
app.use favicon()
app.use logger('dev')
app.use bodyParser.json()
app.use bodyParser.urlencoded()
app.use cookieParser()
app.use express.session
key:"PI:KEY:<KEY>END_PI"
secret:"your-secret-key"
app.use express.static path.join __dirname, 'public'
app.use app.router
app |
[
{
"context": " member.teamId = team.id\n member.firstName = 'Test'\n teamsnap.saveMember member, (err, result) ->",
"end": 456,
"score": 0.9985404014587402,
"start": 452,
"tag": "NAME",
"value": "Test"
}
] | test/forumPosts.coffee | teamsnap/teamsnap-javascript-sdk | 9 | describe 'Forum Posts', ->
topic = null
post = null
member = null
before (done) ->
topic = teamsnap.createForumTopic()
topic.teamId = team.id
topic.title = "What a topic"
teamsnap.saveForumTopic topic, (err, result) ->
expect(err).to.be.null
result.should.have.property('type', 'forumTopic')
done()
before (done) ->
member = teamsnap.createMember()
member.teamId = team.id
member.firstName = 'Test'
teamsnap.saveMember member, (err, result) ->
expect(err).to.be.null
done()
after (done) ->
teamsnap.deleteMember member, (err, result) ->
expect(err).to.be.null
done()
it 'should be able to create a forum post', (done) ->
post = teamsnap.createForumPost()
post.forumTopicId = topic.id
post.memberId = member.id
post.message = "What a post"
teamsnap.saveForumPost post, (err, result) ->
expect(err).to.be.null
result.should.have.property('type', 'forumPost')
done()
it 'should be able to load all forum posts for team', (done) ->
teamsnap.loadForumPosts {teamId: team.id}, (err, result) ->
expect(err).to.be.null
result.should.be.an('array')
done()
it 'should be able to delete a forum post', (done) ->
teamsnap.deleteForumPost post, (err, result) ->
expect(err).to.be.null
done()
| 56471 | describe 'Forum Posts', ->
topic = null
post = null
member = null
before (done) ->
topic = teamsnap.createForumTopic()
topic.teamId = team.id
topic.title = "What a topic"
teamsnap.saveForumTopic topic, (err, result) ->
expect(err).to.be.null
result.should.have.property('type', 'forumTopic')
done()
before (done) ->
member = teamsnap.createMember()
member.teamId = team.id
member.firstName = '<NAME>'
teamsnap.saveMember member, (err, result) ->
expect(err).to.be.null
done()
after (done) ->
teamsnap.deleteMember member, (err, result) ->
expect(err).to.be.null
done()
it 'should be able to create a forum post', (done) ->
post = teamsnap.createForumPost()
post.forumTopicId = topic.id
post.memberId = member.id
post.message = "What a post"
teamsnap.saveForumPost post, (err, result) ->
expect(err).to.be.null
result.should.have.property('type', 'forumPost')
done()
it 'should be able to load all forum posts for team', (done) ->
teamsnap.loadForumPosts {teamId: team.id}, (err, result) ->
expect(err).to.be.null
result.should.be.an('array')
done()
it 'should be able to delete a forum post', (done) ->
teamsnap.deleteForumPost post, (err, result) ->
expect(err).to.be.null
done()
| true | describe 'Forum Posts', ->
topic = null
post = null
member = null
before (done) ->
topic = teamsnap.createForumTopic()
topic.teamId = team.id
topic.title = "What a topic"
teamsnap.saveForumTopic topic, (err, result) ->
expect(err).to.be.null
result.should.have.property('type', 'forumTopic')
done()
before (done) ->
member = teamsnap.createMember()
member.teamId = team.id
member.firstName = 'PI:NAME:<NAME>END_PI'
teamsnap.saveMember member, (err, result) ->
expect(err).to.be.null
done()
after (done) ->
teamsnap.deleteMember member, (err, result) ->
expect(err).to.be.null
done()
it 'should be able to create a forum post', (done) ->
post = teamsnap.createForumPost()
post.forumTopicId = topic.id
post.memberId = member.id
post.message = "What a post"
teamsnap.saveForumPost post, (err, result) ->
expect(err).to.be.null
result.should.have.property('type', 'forumPost')
done()
it 'should be able to load all forum posts for team', (done) ->
teamsnap.loadForumPosts {teamId: team.id}, (err, result) ->
expect(err).to.be.null
result.should.be.an('array')
done()
it 'should be able to delete a forum post', (done) ->
teamsnap.deleteForumPost post, (err, result) ->
expect(err).to.be.null
done()
|
[
{
"context": "'''\nwatchOS : Docks\n\n@auther Jungho song (threeword.com)\n@since 2016.11.23\n'''\nclass expor",
"end": 40,
"score": 0.9998639822006226,
"start": 29,
"tag": "NAME",
"value": "Jungho song"
}
] | modules/watchos-kit-docks.coffee | framer-modules/watchos | 2 | '''
watchOS : Docks
@auther Jungho song (threeword.com)
@since 2016.11.23
'''
class exports.Docks extends Layer
# Basic apps
appsInfo = [
{ name: "메시지", icon: "images/ic_messages.png", image: "images/messages.png" }
{ name: "캘린더", icon: "images/ic_calendar.png", image: "images/calendar.png" }
{ name: "타이머", icon: "images/ic_stopwatch.png", image: "images/stopwatch.png" }
{ name: "지도", icon: "images/ic_maps.png", image: "images/maps.png" }
{ name: "운동", icon: "images/ic_workout.png", image: "images/workout.png" }
{ name: "날씨", icon: "images/ic_weather.png", image: "images/weather.png" }
{ name: "음악", icon: "images/ic_music.png", image: "images/music.png" }
]
# Constructor
constructor: (options = {}) ->
options.name = "Docks"
options.backgroundColor = "rgba(255,255,255,.2)"
options.opacity = 0
super options
#
Util.blur @
@onClick -> console.log "block"
@sendToBack()
# Page
@page = new PageComponent
name: "page"
width: @width, height: @height
scrollVertical: false
clip: false
parent: @
# Page : Indicator
@page.indicator = new Indicator page: @page
# Page : Dock
for appInfo in appsInfo
dock = new Dock width: @width, height: @height, info: appInfo
@page.addPage dock
dock.onClick => @selected()
# Page : Label
@page.label = new Label page: @page
# 기본 페이지 : 첫번째
@page.snapToPage @page.content.children[0], false
# Show
show: ->
@animateStop()
@bringToFront()
@opacity = 1
@page.animate y: 5, scale: 281 / @height
page.show() for page, i in @page.content.children
# Dismiss
dismiss: (forceClose = false) ->
return if @isAnimating
# Force close
if forceClose then @close()
else
# Exist select dock
if @selectDock
# Select mode
if @page.scale is 1 then @show()
# Selected
else @selected()
else
# Exsit recent dock
if @recentDock
# Current page is recent dock
if @page.currentPage is @recentDock then @selected()
else
@page.snapToPage @recentDock, true, time: .15
@page.content.once Events.AnimationEnd, => @selected()
else @close()
# Close
close: ->
@animate opacity: 0
@page.animate y: 0, scale: 1
page.default() for page, i in @page.content.children
@once Events.AnimationEnd, ->
@sendToBack() if @opacity is 0
@selectDock = undefined
# Selected
selected: ->
return if @page.isAnimating
@page.animate y: 0, scale: 1
@selectDock = @page.currentPage
@selectDock.selected()
# Exsit recent dock
if @recentDock and @selectDock is @recentDock
@page.once Events.AnimationEnd, =>
@sendToBack()
@removeDock @recentDock
@recentDock = undefined
# Add recent dock
addRecentDock: (appInfo) ->
if @recentDock then @recentDock.addContent appInfo.app
else
@recentDock = new Dock width: @width, height: @height, info: appInfo
@page.addPage @recentDock
@recentDock.onClick (event) => @selected()
@page.snapToPage @recentDock, false
@show()
# Remove dock
removeDock: (layer) ->
@page.content.removeChild layer
@page.updateContent()
# Snap last dock
@page.snapToPage @page.content.children[_.size(@page.content.children) - 1], false
# Update label
@page.label.updateContent()
# Update indicator
@page.indicator.updateContent()
# Label
class Label extends Layer
# Consturctor
constructor: (options = {}) ->
options.name ?= "label"
options.html ?= "메시지"
options.style =
fontSize: "41px", fontWeight: "400"
lineHeight: "1"
paddingLeft: "72px"
options.backgroundColor ?= ""
options.parent = options.page
super options
@page = options.page
Util.text.autoSize @
@props = x: Align.center, maxY: -9.7
# Icon
@icon = new Layer
name: ".icon"
y: Align.center
size: 58.3
borderRadius: 30
parent: @
# Events
@page.on "change:currentPage", => @updateContent()
#
@updateContent()
# Update
updateContent: ->
currentPage = @page.currentPage
@html = currentPage.name
Util.text.autoSize @
@centerX()
@icon.image = currentPage.icon
# Indicator
class Indicator extends Layer
# Constructor
constructor: (options = {}) ->
options.name = "Indicator"
options.backgroundColor = ""
options.parent = options.page
options.y ?= options.page.maxY + 22
super options
@page = options.page
@page.on "change:currentPage", => @changeDotState()
@page.content.on "change:children", => @updateContent()
# Update
updateContent: ->
child.destroy() for child in @children
for child, i in @page.content.children
dot = createDot()
dot.x += @contentFrame().width + 8 unless i is 0
@addChild dot
@size = @contentFrame()
@props = x: Align.center()
@changeDotState false
# Change dot state
changeDotState: (animate=true) ->
currentPage = @page.currentPage
pageIndex = @page.horizontalPageIndex currentPage
if animate
for dot, i in @children
dot.animate if i is pageIndex then "selected" else "normal"
else
for dot, i in @children
dot.stateSwitch if i is pageIndex then "selected" else "normal"
# Create dot
createDot = (options={}) ->
dot = new Layer
name: ".indicator.dot"
size: 13.879
backgroundColor: "white"
opacity: .35
borderRadius: 10
dot.states =
selected: scale: 1.2, opacity: 1, options: { time: .15 }
normal: scale: 1, opacity: .35, options: { time: .2 }
return dot
# Dock apps
class Dock extends Layer
# Constructor
constructor: (options = {}) ->
options.backgroundColor = "black"
super options
#
@info = options.info
@name = @info.name
@icon = @info.icon
# Contents
@content = new Layer
name: ".content"
width: @width, height: @height
backgroundColor: ""
clip: true
parent: @
if @info.image
@content.image = @info.image
@content.time = new Layer
name: ".content.time"
y: 3
width: @width, height: 38
html: Util.date.timeFormatter Util.date.getTime()
style:
fontSize: "32px", fontWeight: "600"
lineHeight: "38px"
textAlign: "right"
opacity: 0
backgroundColor: ""
parent: @content
if @info.app
@addContent @info.app
# Add content
addContent: (layer) ->
@content.addChild layer
child.ignoreEvents = true for child in layer.descendants
removeContent: (layer) ->
@content.removeChild layer
child.ignoreEvents = false for child in layer.descendants
# Show
show: ->
@animate scale: 265 / 281, borderRadius: 15, options: { time: .15 }
@content.animate scale: 237 / 265, options: { time: .15 }
if @info.image
@content.time.animate opacity: 0, options: { time: .20, delay: .3 }
@info.app.toDock() if @info.app
# Default
default: ->
@animate scale: 1, borderRadius: 0, options: { time: .25 }
@content.animate scale: 1, options: { time: .25 }
# Selected
selected: ->
@default()
if @info.image
@content.time.html = Util.date.timeFormatter Util.date.getTime()
@content.time.animate opacity: 1, options: { time: .15, delay: .2 }
if @info.app
@content.once Events.AnimationEnd, =>
@info.app.fromDock()
@removeContent @info.app
@destroy() | 184119 | '''
watchOS : Docks
@auther <NAME> (threeword.com)
@since 2016.11.23
'''
class exports.Docks extends Layer
# Basic apps
appsInfo = [
{ name: "메시지", icon: "images/ic_messages.png", image: "images/messages.png" }
{ name: "캘린더", icon: "images/ic_calendar.png", image: "images/calendar.png" }
{ name: "타이머", icon: "images/ic_stopwatch.png", image: "images/stopwatch.png" }
{ name: "지도", icon: "images/ic_maps.png", image: "images/maps.png" }
{ name: "운동", icon: "images/ic_workout.png", image: "images/workout.png" }
{ name: "날씨", icon: "images/ic_weather.png", image: "images/weather.png" }
{ name: "음악", icon: "images/ic_music.png", image: "images/music.png" }
]
# Constructor
constructor: (options = {}) ->
options.name = "Docks"
options.backgroundColor = "rgba(255,255,255,.2)"
options.opacity = 0
super options
#
Util.blur @
@onClick -> console.log "block"
@sendToBack()
# Page
@page = new PageComponent
name: "page"
width: @width, height: @height
scrollVertical: false
clip: false
parent: @
# Page : Indicator
@page.indicator = new Indicator page: @page
# Page : Dock
for appInfo in appsInfo
dock = new Dock width: @width, height: @height, info: appInfo
@page.addPage dock
dock.onClick => @selected()
# Page : Label
@page.label = new Label page: @page
# 기본 페이지 : 첫번째
@page.snapToPage @page.content.children[0], false
# Show
show: ->
@animateStop()
@bringToFront()
@opacity = 1
@page.animate y: 5, scale: 281 / @height
page.show() for page, i in @page.content.children
# Dismiss
dismiss: (forceClose = false) ->
return if @isAnimating
# Force close
if forceClose then @close()
else
# Exist select dock
if @selectDock
# Select mode
if @page.scale is 1 then @show()
# Selected
else @selected()
else
# Exsit recent dock
if @recentDock
# Current page is recent dock
if @page.currentPage is @recentDock then @selected()
else
@page.snapToPage @recentDock, true, time: .15
@page.content.once Events.AnimationEnd, => @selected()
else @close()
# Close
close: ->
@animate opacity: 0
@page.animate y: 0, scale: 1
page.default() for page, i in @page.content.children
@once Events.AnimationEnd, ->
@sendToBack() if @opacity is 0
@selectDock = undefined
# Selected
selected: ->
return if @page.isAnimating
@page.animate y: 0, scale: 1
@selectDock = @page.currentPage
@selectDock.selected()
# Exsit recent dock
if @recentDock and @selectDock is @recentDock
@page.once Events.AnimationEnd, =>
@sendToBack()
@removeDock @recentDock
@recentDock = undefined
# Add recent dock
addRecentDock: (appInfo) ->
if @recentDock then @recentDock.addContent appInfo.app
else
@recentDock = new Dock width: @width, height: @height, info: appInfo
@page.addPage @recentDock
@recentDock.onClick (event) => @selected()
@page.snapToPage @recentDock, false
@show()
# Remove dock
removeDock: (layer) ->
@page.content.removeChild layer
@page.updateContent()
# Snap last dock
@page.snapToPage @page.content.children[_.size(@page.content.children) - 1], false
# Update label
@page.label.updateContent()
# Update indicator
@page.indicator.updateContent()
# Label
class Label extends Layer
# Consturctor
constructor: (options = {}) ->
options.name ?= "label"
options.html ?= "메시지"
options.style =
fontSize: "41px", fontWeight: "400"
lineHeight: "1"
paddingLeft: "72px"
options.backgroundColor ?= ""
options.parent = options.page
super options
@page = options.page
Util.text.autoSize @
@props = x: Align.center, maxY: -9.7
# Icon
@icon = new Layer
name: ".icon"
y: Align.center
size: 58.3
borderRadius: 30
parent: @
# Events
@page.on "change:currentPage", => @updateContent()
#
@updateContent()
# Update
updateContent: ->
currentPage = @page.currentPage
@html = currentPage.name
Util.text.autoSize @
@centerX()
@icon.image = currentPage.icon
# Indicator
class Indicator extends Layer
# Constructor
constructor: (options = {}) ->
options.name = "Indicator"
options.backgroundColor = ""
options.parent = options.page
options.y ?= options.page.maxY + 22
super options
@page = options.page
@page.on "change:currentPage", => @changeDotState()
@page.content.on "change:children", => @updateContent()
# Update
updateContent: ->
child.destroy() for child in @children
for child, i in @page.content.children
dot = createDot()
dot.x += @contentFrame().width + 8 unless i is 0
@addChild dot
@size = @contentFrame()
@props = x: Align.center()
@changeDotState false
# Change dot state
changeDotState: (animate=true) ->
currentPage = @page.currentPage
pageIndex = @page.horizontalPageIndex currentPage
if animate
for dot, i in @children
dot.animate if i is pageIndex then "selected" else "normal"
else
for dot, i in @children
dot.stateSwitch if i is pageIndex then "selected" else "normal"
# Create dot
createDot = (options={}) ->
dot = new Layer
name: ".indicator.dot"
size: 13.879
backgroundColor: "white"
opacity: .35
borderRadius: 10
dot.states =
selected: scale: 1.2, opacity: 1, options: { time: .15 }
normal: scale: 1, opacity: .35, options: { time: .2 }
return dot
# Dock apps
class Dock extends Layer
# Constructor
constructor: (options = {}) ->
options.backgroundColor = "black"
super options
#
@info = options.info
@name = @info.name
@icon = @info.icon
# Contents
@content = new Layer
name: ".content"
width: @width, height: @height
backgroundColor: ""
clip: true
parent: @
if @info.image
@content.image = @info.image
@content.time = new Layer
name: ".content.time"
y: 3
width: @width, height: 38
html: Util.date.timeFormatter Util.date.getTime()
style:
fontSize: "32px", fontWeight: "600"
lineHeight: "38px"
textAlign: "right"
opacity: 0
backgroundColor: ""
parent: @content
if @info.app
@addContent @info.app
# Add content
addContent: (layer) ->
@content.addChild layer
child.ignoreEvents = true for child in layer.descendants
removeContent: (layer) ->
@content.removeChild layer
child.ignoreEvents = false for child in layer.descendants
# Show
show: ->
@animate scale: 265 / 281, borderRadius: 15, options: { time: .15 }
@content.animate scale: 237 / 265, options: { time: .15 }
if @info.image
@content.time.animate opacity: 0, options: { time: .20, delay: .3 }
@info.app.toDock() if @info.app
# Default
default: ->
@animate scale: 1, borderRadius: 0, options: { time: .25 }
@content.animate scale: 1, options: { time: .25 }
# Selected
selected: ->
@default()
if @info.image
@content.time.html = Util.date.timeFormatter Util.date.getTime()
@content.time.animate opacity: 1, options: { time: .15, delay: .2 }
if @info.app
@content.once Events.AnimationEnd, =>
@info.app.fromDock()
@removeContent @info.app
@destroy() | true | '''
watchOS : Docks
@auther PI:NAME:<NAME>END_PI (threeword.com)
@since 2016.11.23
'''
class exports.Docks extends Layer
# Basic apps
appsInfo = [
{ name: "메시지", icon: "images/ic_messages.png", image: "images/messages.png" }
{ name: "캘린더", icon: "images/ic_calendar.png", image: "images/calendar.png" }
{ name: "타이머", icon: "images/ic_stopwatch.png", image: "images/stopwatch.png" }
{ name: "지도", icon: "images/ic_maps.png", image: "images/maps.png" }
{ name: "운동", icon: "images/ic_workout.png", image: "images/workout.png" }
{ name: "날씨", icon: "images/ic_weather.png", image: "images/weather.png" }
{ name: "음악", icon: "images/ic_music.png", image: "images/music.png" }
]
# Constructor
constructor: (options = {}) ->
options.name = "Docks"
options.backgroundColor = "rgba(255,255,255,.2)"
options.opacity = 0
super options
#
Util.blur @
@onClick -> console.log "block"
@sendToBack()
# Page
@page = new PageComponent
name: "page"
width: @width, height: @height
scrollVertical: false
clip: false
parent: @
# Page : Indicator
@page.indicator = new Indicator page: @page
# Page : Dock
for appInfo in appsInfo
dock = new Dock width: @width, height: @height, info: appInfo
@page.addPage dock
dock.onClick => @selected()
# Page : Label
@page.label = new Label page: @page
# 기본 페이지 : 첫번째
@page.snapToPage @page.content.children[0], false
# Show
show: ->
@animateStop()
@bringToFront()
@opacity = 1
@page.animate y: 5, scale: 281 / @height
page.show() for page, i in @page.content.children
# Dismiss
dismiss: (forceClose = false) ->
return if @isAnimating
# Force close
if forceClose then @close()
else
# Exist select dock
if @selectDock
# Select mode
if @page.scale is 1 then @show()
# Selected
else @selected()
else
# Exsit recent dock
if @recentDock
# Current page is recent dock
if @page.currentPage is @recentDock then @selected()
else
@page.snapToPage @recentDock, true, time: .15
@page.content.once Events.AnimationEnd, => @selected()
else @close()
# Close
close: ->
@animate opacity: 0
@page.animate y: 0, scale: 1
page.default() for page, i in @page.content.children
@once Events.AnimationEnd, ->
@sendToBack() if @opacity is 0
@selectDock = undefined
# Selected
selected: ->
return if @page.isAnimating
@page.animate y: 0, scale: 1
@selectDock = @page.currentPage
@selectDock.selected()
# Exsit recent dock
if @recentDock and @selectDock is @recentDock
@page.once Events.AnimationEnd, =>
@sendToBack()
@removeDock @recentDock
@recentDock = undefined
# Add recent dock
addRecentDock: (appInfo) ->
if @recentDock then @recentDock.addContent appInfo.app
else
@recentDock = new Dock width: @width, height: @height, info: appInfo
@page.addPage @recentDock
@recentDock.onClick (event) => @selected()
@page.snapToPage @recentDock, false
@show()
# Remove dock
removeDock: (layer) ->
@page.content.removeChild layer
@page.updateContent()
# Snap last dock
@page.snapToPage @page.content.children[_.size(@page.content.children) - 1], false
# Update label
@page.label.updateContent()
# Update indicator
@page.indicator.updateContent()
# Label
class Label extends Layer
# Consturctor
constructor: (options = {}) ->
options.name ?= "label"
options.html ?= "메시지"
options.style =
fontSize: "41px", fontWeight: "400"
lineHeight: "1"
paddingLeft: "72px"
options.backgroundColor ?= ""
options.parent = options.page
super options
@page = options.page
Util.text.autoSize @
@props = x: Align.center, maxY: -9.7
# Icon
@icon = new Layer
name: ".icon"
y: Align.center
size: 58.3
borderRadius: 30
parent: @
# Events
@page.on "change:currentPage", => @updateContent()
#
@updateContent()
# Update
updateContent: ->
currentPage = @page.currentPage
@html = currentPage.name
Util.text.autoSize @
@centerX()
@icon.image = currentPage.icon
# Indicator
class Indicator extends Layer
# Constructor
constructor: (options = {}) ->
options.name = "Indicator"
options.backgroundColor = ""
options.parent = options.page
options.y ?= options.page.maxY + 22
super options
@page = options.page
@page.on "change:currentPage", => @changeDotState()
@page.content.on "change:children", => @updateContent()
# Update
updateContent: ->
child.destroy() for child in @children
for child, i in @page.content.children
dot = createDot()
dot.x += @contentFrame().width + 8 unless i is 0
@addChild dot
@size = @contentFrame()
@props = x: Align.center()
@changeDotState false
# Change dot state
changeDotState: (animate=true) ->
currentPage = @page.currentPage
pageIndex = @page.horizontalPageIndex currentPage
if animate
for dot, i in @children
dot.animate if i is pageIndex then "selected" else "normal"
else
for dot, i in @children
dot.stateSwitch if i is pageIndex then "selected" else "normal"
# Create dot
createDot = (options={}) ->
dot = new Layer
name: ".indicator.dot"
size: 13.879
backgroundColor: "white"
opacity: .35
borderRadius: 10
dot.states =
selected: scale: 1.2, opacity: 1, options: { time: .15 }
normal: scale: 1, opacity: .35, options: { time: .2 }
return dot
# Dock apps
class Dock extends Layer
# Constructor
constructor: (options = {}) ->
options.backgroundColor = "black"
super options
#
@info = options.info
@name = @info.name
@icon = @info.icon
# Contents
@content = new Layer
name: ".content"
width: @width, height: @height
backgroundColor: ""
clip: true
parent: @
if @info.image
@content.image = @info.image
@content.time = new Layer
name: ".content.time"
y: 3
width: @width, height: 38
html: Util.date.timeFormatter Util.date.getTime()
style:
fontSize: "32px", fontWeight: "600"
lineHeight: "38px"
textAlign: "right"
opacity: 0
backgroundColor: ""
parent: @content
if @info.app
@addContent @info.app
# Add content
addContent: (layer) ->
@content.addChild layer
child.ignoreEvents = true for child in layer.descendants
removeContent: (layer) ->
@content.removeChild layer
child.ignoreEvents = false for child in layer.descendants
# Show
show: ->
@animate scale: 265 / 281, borderRadius: 15, options: { time: .15 }
@content.animate scale: 237 / 265, options: { time: .15 }
if @info.image
@content.time.animate opacity: 0, options: { time: .20, delay: .3 }
@info.app.toDock() if @info.app
# Default
default: ->
@animate scale: 1, borderRadius: 0, options: { time: .25 }
@content.animate scale: 1, options: { time: .25 }
# Selected
selected: ->
@default()
if @info.image
@content.time.html = Util.date.timeFormatter Util.date.getTime()
@content.time.animate opacity: 1, options: { time: .15, delay: .2 }
if @info.app
@content.once Events.AnimationEnd, =>
@info.app.fromDock()
@removeContent @info.app
@destroy() |
[
{
"context": "#\n# The i18n test\n#\n# Copyright (C) 2011-2013 Nikolay Nemshilov\n#\n{Test, assert} = require('lovely')\n\n\ndescribe \"",
"end": 63,
"score": 0.9998790621757507,
"start": 46,
"tag": "NAME",
"value": "Nikolay Nemshilov"
}
] | stl/date/test/i18n_test.coffee | lovely-io/lovely.io-stl | 2 | #
# The i18n test
#
# Copyright (C) 2011-2013 Nikolay Nemshilov
#
{Test, assert} = require('lovely')
describe "Dates i18n", ->
Date = null
date = null
original = null
russian =
days: 'Воскресенье Понедельник Вторник Среда Четверг Пятница Суббота'
daysShort: 'Вск Пнд Втр Срд Чтв Птн Суб'
months: 'Январь Февраль Март Апрель Май Июнь Июль Август Сентябрь Октябрь Ноябрь Декабрь'
monthsShort: 'Янв Фев Мар Апр Май Инь Иль Авг Сен Окт Ноя Дек'
before Test.load (build)->
Date = build
original = Date.i18n
date = new Date()
date.setFullYear(2011)
date.setMonth(7)
date.setDate(18)
describe "dates formatting", ->
it "should internationalize %a", ->
Date.i18n = russian
assert.equal date.format("%a"), "Чтв"
Date.i18n = original
it "should internationalize %A", ->
Date.i18n = russian
assert.equal date.format("%A"), "Четверг"
Date.i18n = original
it "should internationalize %b", ->
Date.i18n = russian
assert.equal date.format("%b"), "Авг"
Date.i18n = original
it "should internationalize %B", ->
Date.i18n = russian
assert.equal date.format("%B"), "Август"
Date.i18n = original
describe "dates parsing", ->
it "should parse %b", ->
Date.i18n = russian
assert.equal Date.parse("Авг", "%b").getMonth(), 7
Date.i18n = original
it "should parse %B", ->
Date.i18n = russian
assert.equal Date.parse("Сентябрь", "%B").getMonth(), 8
Date.i18n = original
| 32470 | #
# The i18n test
#
# Copyright (C) 2011-2013 <NAME>
#
{Test, assert} = require('lovely')
describe "Dates i18n", ->
Date = null
date = null
original = null
russian =
days: 'Воскресенье Понедельник Вторник Среда Четверг Пятница Суббота'
daysShort: 'Вск Пнд Втр Срд Чтв Птн Суб'
months: 'Январь Февраль Март Апрель Май Июнь Июль Август Сентябрь Октябрь Ноябрь Декабрь'
monthsShort: 'Янв Фев Мар Апр Май Инь Иль Авг Сен Окт Ноя Дек'
before Test.load (build)->
Date = build
original = Date.i18n
date = new Date()
date.setFullYear(2011)
date.setMonth(7)
date.setDate(18)
describe "dates formatting", ->
it "should internationalize %a", ->
Date.i18n = russian
assert.equal date.format("%a"), "Чтв"
Date.i18n = original
it "should internationalize %A", ->
Date.i18n = russian
assert.equal date.format("%A"), "Четверг"
Date.i18n = original
it "should internationalize %b", ->
Date.i18n = russian
assert.equal date.format("%b"), "Авг"
Date.i18n = original
it "should internationalize %B", ->
Date.i18n = russian
assert.equal date.format("%B"), "Август"
Date.i18n = original
describe "dates parsing", ->
it "should parse %b", ->
Date.i18n = russian
assert.equal Date.parse("Авг", "%b").getMonth(), 7
Date.i18n = original
it "should parse %B", ->
Date.i18n = russian
assert.equal Date.parse("Сентябрь", "%B").getMonth(), 8
Date.i18n = original
| true | #
# The i18n test
#
# Copyright (C) 2011-2013 PI:NAME:<NAME>END_PI
#
{Test, assert} = require('lovely')
describe "Dates i18n", ->
Date = null
date = null
original = null
russian =
days: 'Воскресенье Понедельник Вторник Среда Четверг Пятница Суббота'
daysShort: 'Вск Пнд Втр Срд Чтв Птн Суб'
months: 'Январь Февраль Март Апрель Май Июнь Июль Август Сентябрь Октябрь Ноябрь Декабрь'
monthsShort: 'Янв Фев Мар Апр Май Инь Иль Авг Сен Окт Ноя Дек'
before Test.load (build)->
Date = build
original = Date.i18n
date = new Date()
date.setFullYear(2011)
date.setMonth(7)
date.setDate(18)
describe "dates formatting", ->
it "should internationalize %a", ->
Date.i18n = russian
assert.equal date.format("%a"), "Чтв"
Date.i18n = original
it "should internationalize %A", ->
Date.i18n = russian
assert.equal date.format("%A"), "Четверг"
Date.i18n = original
it "should internationalize %b", ->
Date.i18n = russian
assert.equal date.format("%b"), "Авг"
Date.i18n = original
it "should internationalize %B", ->
Date.i18n = russian
assert.equal date.format("%B"), "Август"
Date.i18n = original
describe "dates parsing", ->
it "should parse %b", ->
Date.i18n = russian
assert.equal Date.parse("Авг", "%b").getMonth(), 7
Date.i18n = original
it "should parse %B", ->
Date.i18n = russian
assert.equal Date.parse("Сентябрь", "%B").getMonth(), 8
Date.i18n = original
|
[
{
"context": ": \"A boilerplate for static site\"\n meta_author: 'Peter DeMartini'\n",
"end": 126,
"score": 0.9998590350151062,
"start": 111,
"tag": "NAME",
"value": "Peter DeMartini"
}
] | metadata.coffee | peterdemartini/nectarine | 0 | module.exports =
meta_title: 'Nectarine'
meta_description: "A boilerplate for static site"
meta_author: 'Peter DeMartini'
| 5563 | module.exports =
meta_title: 'Nectarine'
meta_description: "A boilerplate for static site"
meta_author: '<NAME>'
| true | module.exports =
meta_title: 'Nectarine'
meta_description: "A boilerplate for static site"
meta_author: 'PI:NAME:<NAME>END_PI'
|
[
{
"context": " 'name', placeholder: 'name')\n (input type: 'password', name: 'password', ref: 'password', placeholder:",
"end": 1075,
"score": 0.9983497858047485,
"start": 1067,
"tag": "PASSWORD",
"value": "password"
},
{
"context": " name: 'password', ref: 'password', placehold... | app/Login.coffee | fiatjaf/scrapboard | 2 | React = require 'lib/react'
superagent = require 'lib/superagent'
{div, time, a, form, input, textarea, button} = React.DOM
getSessionURL = ->
if typeof getBaseURL == 'function'
getBaseURL() + '/_session'
else if typeof basePath isnt 'undefined'
basePath + '/_session'
else
'/_session'
module.exports = React.createClass
getInitialState: ->
url: @props.url or getSessionURL()
loggedAs: null
dismissedAd: false
componentDidMount: ->
@checkLoginStatus()
checkLoginStatus: ->
superagent.get(getSessionURL())
.set('accept', 'application/json')
.withCredentials()
.end (err, res) =>
if not err and res.body.ok
@setState loggedAs: res.body.userCtx.name
if res.body.userCtx.name
@props.onLogin(res.body.userCtx.name) if @props.onLogin
render: ->
adWrapper = null
loginForm = (form
method: 'post'
action: @state.url
onSubmit: @doLogin
,
(input name: 'name', ref: 'name', placeholder: 'name')
(input type: 'password', name: 'password', ref: 'password', placeholder: 'password')
(button
type: 'submit'
, 'Login')
)
if not @state.loggedAs and not @props.children and not @state.dismissedAd
adWrapper = (div {},
(a
href: 'https://www.smileupps.com/store/apps/scrapbook'
target: '_blank'
, 'get a scrapbook')
' or '
(button
onClick: @dismissAd
, 'login')
)
(div className: @props.className,
(div {},
@props.children
adWrapper or loginForm
) if not @state.loggedAs
(div {},
"logged as #{@state.loggedAs} ("
(a {href: "#", onClick: @doLogout}, 'logout')
")"
) if @state.loggedAs
)
dismissAd: (e) ->
e.preventDefault()
@setState dismissedAd: true
doLogin: (e) ->
e.preventDefault()
name = @refs.name.getDOMNode().value
password = @refs.password.getDOMNode().value
superagent.post(@state.url)
.set('content-type', 'application/x-www-form-urlencoded')
.set('accept', 'application/json')
.send(name: name, password: password)
.withCredentials()
.end (err, res) =>
if err or not res.body.ok
return
@checkLoginStatus()
doLogout: (e) ->
e.preventDefault()
superagent.del(@state.url)
.withCredentials()
.end (err, res) =>
if err
return
@setState
loggedAs: null
@props.onLogout() if @props.onLogout
| 176094 | React = require 'lib/react'
superagent = require 'lib/superagent'
{div, time, a, form, input, textarea, button} = React.DOM
getSessionURL = ->
if typeof getBaseURL == 'function'
getBaseURL() + '/_session'
else if typeof basePath isnt 'undefined'
basePath + '/_session'
else
'/_session'
module.exports = React.createClass
getInitialState: ->
url: @props.url or getSessionURL()
loggedAs: null
dismissedAd: false
componentDidMount: ->
@checkLoginStatus()
checkLoginStatus: ->
superagent.get(getSessionURL())
.set('accept', 'application/json')
.withCredentials()
.end (err, res) =>
if not err and res.body.ok
@setState loggedAs: res.body.userCtx.name
if res.body.userCtx.name
@props.onLogin(res.body.userCtx.name) if @props.onLogin
render: ->
adWrapper = null
loginForm = (form
method: 'post'
action: @state.url
onSubmit: @doLogin
,
(input name: 'name', ref: 'name', placeholder: 'name')
(input type: '<PASSWORD>', name: 'password', ref: 'password', placeholder: '<PASSWORD>')
(button
type: 'submit'
, 'Login')
)
if not @state.loggedAs and not @props.children and not @state.dismissedAd
adWrapper = (div {},
(a
href: 'https://www.smileupps.com/store/apps/scrapbook'
target: '_blank'
, 'get a scrapbook')
' or '
(button
onClick: @dismissAd
, 'login')
)
(div className: @props.className,
(div {},
@props.children
adWrapper or loginForm
) if not @state.loggedAs
(div {},
"logged as #{@state.loggedAs} ("
(a {href: "#", onClick: @doLogout}, 'logout')
")"
) if @state.loggedAs
)
dismissAd: (e) ->
e.preventDefault()
@setState dismissedAd: true
doLogin: (e) ->
e.preventDefault()
name = @refs.name.getDOMNode().value
password = @refs.password.getDOMNode().value
superagent.post(@state.url)
.set('content-type', 'application/x-www-form-urlencoded')
.set('accept', 'application/json')
.send(name: name, password: <PASSWORD>)
.withCredentials()
.end (err, res) =>
if err or not res.body.ok
return
@checkLoginStatus()
doLogout: (e) ->
e.preventDefault()
superagent.del(@state.url)
.withCredentials()
.end (err, res) =>
if err
return
@setState
loggedAs: null
@props.onLogout() if @props.onLogout
| true | React = require 'lib/react'
superagent = require 'lib/superagent'
{div, time, a, form, input, textarea, button} = React.DOM
getSessionURL = ->
if typeof getBaseURL == 'function'
getBaseURL() + '/_session'
else if typeof basePath isnt 'undefined'
basePath + '/_session'
else
'/_session'
module.exports = React.createClass
getInitialState: ->
url: @props.url or getSessionURL()
loggedAs: null
dismissedAd: false
componentDidMount: ->
@checkLoginStatus()
checkLoginStatus: ->
superagent.get(getSessionURL())
.set('accept', 'application/json')
.withCredentials()
.end (err, res) =>
if not err and res.body.ok
@setState loggedAs: res.body.userCtx.name
if res.body.userCtx.name
@props.onLogin(res.body.userCtx.name) if @props.onLogin
render: ->
adWrapper = null
loginForm = (form
method: 'post'
action: @state.url
onSubmit: @doLogin
,
(input name: 'name', ref: 'name', placeholder: 'name')
(input type: 'PI:PASSWORD:<PASSWORD>END_PI', name: 'password', ref: 'password', placeholder: 'PI:PASSWORD:<PASSWORD>END_PI')
(button
type: 'submit'
, 'Login')
)
if not @state.loggedAs and not @props.children and not @state.dismissedAd
adWrapper = (div {},
(a
href: 'https://www.smileupps.com/store/apps/scrapbook'
target: '_blank'
, 'get a scrapbook')
' or '
(button
onClick: @dismissAd
, 'login')
)
(div className: @props.className,
(div {},
@props.children
adWrapper or loginForm
) if not @state.loggedAs
(div {},
"logged as #{@state.loggedAs} ("
(a {href: "#", onClick: @doLogout}, 'logout')
")"
) if @state.loggedAs
)
dismissAd: (e) ->
e.preventDefault()
@setState dismissedAd: true
doLogin: (e) ->
e.preventDefault()
name = @refs.name.getDOMNode().value
password = @refs.password.getDOMNode().value
superagent.post(@state.url)
.set('content-type', 'application/x-www-form-urlencoded')
.set('accept', 'application/json')
.send(name: name, password: PI:PASSWORD:<PASSWORD>END_PI)
.withCredentials()
.end (err, res) =>
if err or not res.body.ok
return
@checkLoginStatus()
doLogout: (e) ->
e.preventDefault()
superagent.del(@state.url)
.withCredentials()
.end (err, res) =>
if err
return
@setState
loggedAs: null
@props.onLogout() if @props.onLogout
|
[
{
"context": "(str) -> 'hex' + str\n\n#START:main\npasswordHash = 'e2fc714c4727ee9395f324cd2e7f331f'\n# do other stuff...\nsendPasswordHashToServer = (",
"end": 126,
"score": 0.9995155930519104,
"start": 94,
"tag": "PASSWORD",
"value": "e2fc714c4727ee9395f324cd2e7f331f"
},
{
"context... | code/LanguageBasics/passwordHash.coffee | mingchaoyan/CoffeeScript-Accelerated-JavaScript-Development | 0 | $ = ajax: (obj) -> return obj.data
hexMD5 = (str) -> 'hex' + str
#START:main
passwordHash = 'e2fc714c4727ee9395f324cd2e7f331f'
# do other stuff...
sendPasswordHashToServer = (password) ->
passwordHash = hexMD5(password)
$.ajax data: passwordHash
#END:main
assert = require 'assert'
assert.equal passwordHash, 'e2fc714c4727ee9395f324cd2e7f331f'
sendPasswordHashToServer 'password'
assert.equal passwordHash, 'hexpassword' | 184581 | $ = ajax: (obj) -> return obj.data
hexMD5 = (str) -> 'hex' + str
#START:main
passwordHash = '<PASSWORD>'
# do other stuff...
sendPasswordHashToServer = (password) ->
passwordHash = hexMD5(password)
$.ajax data: passwordHash
#END:main
assert = require 'assert'
assert.equal passwordHash, '<PASSWORD>'
sendPasswordHashToServer 'password'
assert.equal passwordHash, '<PASSWORD>' | true | $ = ajax: (obj) -> return obj.data
hexMD5 = (str) -> 'hex' + str
#START:main
passwordHash = 'PI:PASSWORD:<PASSWORD>END_PI'
# do other stuff...
sendPasswordHashToServer = (password) ->
passwordHash = hexMD5(password)
$.ajax data: passwordHash
#END:main
assert = require 'assert'
assert.equal passwordHash, 'PI:PASSWORD:<PASSWORD>END_PI'
sendPasswordHashToServer 'password'
assert.equal passwordHash, 'PI:PASSWORD:<PASSWORD>END_PI' |
[
{
"context": "# Copyright (c) ppy Pty Ltd <contact@ppy.sh>. Licensed under the GNU Affero General Public Li",
"end": 43,
"score": 0.9999122619628906,
"start": 29,
"tag": "EMAIL",
"value": "contact@ppy.sh"
}
] | resources/assets/coffee/_classes/forum-topic-watch-ajax.coffee | osu-katakuna/osu-katakuna-web | 5 | # Copyright (c) ppy Pty Ltd <contact@ppy.sh>. Licensed under the GNU Affero General Public License v3.0.
# See the LICENCE file in the repository root for full licence text.
class @ForumTopicWatchAjax
constructor: ->
$(document).on 'ajax:before', '.js-forum-topic-watch-ajax', @shouldContinue
$(document).on 'ajax:send', '.js-forum-topic-watch-ajax', @loading
$(document).on 'ajax:error', '.js-forum-topic-watch-ajax', @fail
$(document).on 'ajax:success', '.js-forum-topic-watch-ajax', @done
@xhr = []
@unknownState = false
abortAll: =>
xhr.abort() while (xhr = @xhr.pop())?
done: =>
@unknownState = false
fail: (e, _xhr, status) =>
target = e.currentTarget
target.dataset.skipAjaxErrorPopup = '1' if status == 'abort'
target.classList.remove 'js-forum-topic-watch-ajax--loading'
target.disabled = false
return
loading: (e, xhr) =>
@unknownState = true
@abortAll()
@xhr.push xhr
LoadingOverlay.hide()
target = e.currentTarget
target.dataset.skipAjaxErrorPopup = '0'
target.classList.add 'js-forum-topic-watch-ajax--loading'
target.disabled = true
return
shouldContinue: (e) =>
@unknownState || e.currentTarget.dataset.forumTopicWatchAjaxIsActive != '1'
| 20702 | # Copyright (c) ppy Pty Ltd <<EMAIL>>. Licensed under the GNU Affero General Public License v3.0.
# See the LICENCE file in the repository root for full licence text.
class @ForumTopicWatchAjax
constructor: ->
$(document).on 'ajax:before', '.js-forum-topic-watch-ajax', @shouldContinue
$(document).on 'ajax:send', '.js-forum-topic-watch-ajax', @loading
$(document).on 'ajax:error', '.js-forum-topic-watch-ajax', @fail
$(document).on 'ajax:success', '.js-forum-topic-watch-ajax', @done
@xhr = []
@unknownState = false
abortAll: =>
xhr.abort() while (xhr = @xhr.pop())?
done: =>
@unknownState = false
fail: (e, _xhr, status) =>
target = e.currentTarget
target.dataset.skipAjaxErrorPopup = '1' if status == 'abort'
target.classList.remove 'js-forum-topic-watch-ajax--loading'
target.disabled = false
return
loading: (e, xhr) =>
@unknownState = true
@abortAll()
@xhr.push xhr
LoadingOverlay.hide()
target = e.currentTarget
target.dataset.skipAjaxErrorPopup = '0'
target.classList.add 'js-forum-topic-watch-ajax--loading'
target.disabled = true
return
shouldContinue: (e) =>
@unknownState || e.currentTarget.dataset.forumTopicWatchAjaxIsActive != '1'
| true | # Copyright (c) ppy Pty Ltd <PI:EMAIL:<EMAIL>END_PI>. Licensed under the GNU Affero General Public License v3.0.
# See the LICENCE file in the repository root for full licence text.
class @ForumTopicWatchAjax
constructor: ->
$(document).on 'ajax:before', '.js-forum-topic-watch-ajax', @shouldContinue
$(document).on 'ajax:send', '.js-forum-topic-watch-ajax', @loading
$(document).on 'ajax:error', '.js-forum-topic-watch-ajax', @fail
$(document).on 'ajax:success', '.js-forum-topic-watch-ajax', @done
@xhr = []
@unknownState = false
abortAll: =>
xhr.abort() while (xhr = @xhr.pop())?
done: =>
@unknownState = false
fail: (e, _xhr, status) =>
target = e.currentTarget
target.dataset.skipAjaxErrorPopup = '1' if status == 'abort'
target.classList.remove 'js-forum-topic-watch-ajax--loading'
target.disabled = false
return
loading: (e, xhr) =>
@unknownState = true
@abortAll()
@xhr.push xhr
LoadingOverlay.hide()
target = e.currentTarget
target.dataset.skipAjaxErrorPopup = '0'
target.classList.add 'js-forum-topic-watch-ajax--loading'
target.disabled = true
return
shouldContinue: (e) =>
@unknownState || e.currentTarget.dataset.forumTopicWatchAjaxIsActive != '1'
|
[
{
"context": "raxis.com/IQScloudWeb/IQScloudManager/api?token=#{iqstoken}&tag=#{tag}\")\n .header('Accept', 'applicat",
"end": 365,
"score": 0.5451219081878662,
"start": 357,
"tag": "KEY",
"value": "iqstoken"
}
] | scripts/iqscloud.coffee | infraxis/ixbot | 0 | iqstoken = process.env.IQS_TOKEN
module.exports = (robot) ->
robot.respond /run iqs test (.*)/i, (rep) ->
tag = rep.match[1]
rep.reply ":rocket: OK, running the testset tagged with " + tag + " in iqs - I'll report back the results when done."
robot.http("https://iqscloud.infraxis.com/IQScloudWeb/IQScloudManager/api?token=#{iqstoken}&tag=#{tag}")
.header('Accept', 'application/json')
.get() (err, res, body) ->
# error checking code here
if res.statusCode isnt 200
data = JSON.parse body
rep.reply ":broken_heart: Hmm, running #{tag} in iqs didn't work out. #{data.message}"
return
data = JSON.parse body
rep.reply ":thumbsup: iqs has finished testset #{tag}\n
:smile: passed : #{data.passed}\n
:unamused: failed : #{data.failed}\n
:anguished: aborted: #{data.aborted}\n
:no_mouth: no result: #{data.noresult}\n
:nerd_face: passed percent: #{data.passedpercent}%"
| 55251 | iqstoken = process.env.IQS_TOKEN
module.exports = (robot) ->
robot.respond /run iqs test (.*)/i, (rep) ->
tag = rep.match[1]
rep.reply ":rocket: OK, running the testset tagged with " + tag + " in iqs - I'll report back the results when done."
robot.http("https://iqscloud.infraxis.com/IQScloudWeb/IQScloudManager/api?token=#{<KEY>}&tag=#{tag}")
.header('Accept', 'application/json')
.get() (err, res, body) ->
# error checking code here
if res.statusCode isnt 200
data = JSON.parse body
rep.reply ":broken_heart: Hmm, running #{tag} in iqs didn't work out. #{data.message}"
return
data = JSON.parse body
rep.reply ":thumbsup: iqs has finished testset #{tag}\n
:smile: passed : #{data.passed}\n
:unamused: failed : #{data.failed}\n
:anguished: aborted: #{data.aborted}\n
:no_mouth: no result: #{data.noresult}\n
:nerd_face: passed percent: #{data.passedpercent}%"
| true | iqstoken = process.env.IQS_TOKEN
module.exports = (robot) ->
robot.respond /run iqs test (.*)/i, (rep) ->
tag = rep.match[1]
rep.reply ":rocket: OK, running the testset tagged with " + tag + " in iqs - I'll report back the results when done."
robot.http("https://iqscloud.infraxis.com/IQScloudWeb/IQScloudManager/api?token=#{PI:KEY:<KEY>END_PI}&tag=#{tag}")
.header('Accept', 'application/json')
.get() (err, res, body) ->
# error checking code here
if res.statusCode isnt 200
data = JSON.parse body
rep.reply ":broken_heart: Hmm, running #{tag} in iqs didn't work out. #{data.message}"
return
data = JSON.parse body
rep.reply ":thumbsup: iqs has finished testset #{tag}\n
:smile: passed : #{data.passed}\n
:unamused: failed : #{data.failed}\n
:anguished: aborted: #{data.aborted}\n
:no_mouth: no result: #{data.noresult}\n
:nerd_face: passed percent: #{data.passedpercent}%"
|
[
{
"context": "[0]}\", advice[1]]\n else\n key = _.keys(advice)[0]\n advice = [\"#{namespace",
"end": 5001,
"score": 0.6022877097129822,
"start": 4999,
"tag": "KEY",
"value": "_."
}
] | lib/YouAreDaChef.coffee | raganwald/YouAreDaChef | 22 | _ = require 'underscore'
class Combinator
constructor: (args...) ->
@clazzes []
@methods []
@for(args...)
this
namespace: (name = null) ->
if name?
@_namespace = name
this
else @_namespace
clazzes: (args...) ->
if args.length > 0
@_clazzes = args
this
else @_clazzes
methods: (args...) ->
if args.length > 0
@_methods = args
this
else @_methods
for: (args...) ->
if args.length > 0 and _.all(args, _.isFunction)
@clazzes(args...)
else if args.length is 1
if _.isString(args[0])
@namespace( args[0] )
else
@namespace( _.keys(args[0])[0] )
clazz_arg = args[0][@namespace()]
if _.isArray(clazz_arg)
@clazzes(clazz_arg...)
else if _.isFunction(clazz_arg)
@clazzes(clazz_arg)
else throw "What do I do with { #{@namespace()}: #{clazz_arg} }?"
this
advise: (verb, advice, namespace, clazzes, pointcut_exprs) ->
if verb is 'unless'
verb = 'guard'
_advice = advice
advice = (args...) ->
!_advice.apply(this, args)
throw "Need to define one or more classes" unless clazzes.length
_.each clazzes, (clazz) ->
daemonize = (name, inject = []) ->
daemonology = (clazz.__YouAreDaChef ?= {})[name] ?= {}
_.defaults daemonology,
before: []
after: []
around: []
guard: []
default: []
unless clazz.prototype.hasOwnProperty("before_#{name}_daemon")
clazz.prototype["before_#{name}_daemon"] = (args...) ->
daemon_args = inject.concat args
# try a super-daemon if available
# execute specific daemons for side-effects
for daemon in daemonology.before.reverse()
daemon[1].apply(this, daemon_args)
# try a super-daemon if available
clazz.__super__?["before_#{name}_daemon"]?.apply(this, args)
clazz.prototype["after_#{name}_daemon"] = (args...) ->
daemon_args = inject.concat args
# try a super-daemon if available
clazz.__super__?["after_#{name}_daemon"]?.apply(this, args)
# execute specific daemons for side-effects
for daemon in daemonology.after
daemon[1].apply(this, daemon_args)
clazz.prototype["around_#{name}_daemon"] = (default_fn, args...) ->
daemon_args = inject.concat args
fn_list = []
# try a super-daemon if available
if clazz.__super__?["around_#{name}_daemon"]?
fn_list.unshift clazz.__super__?["around_#{name}_daemon"]
# specific daemons
for daemon in daemonology.around
fn_list.unshift daemon[1]
fn = _.reduce fn_list, (acc, advice) ->
(args...) -> advice.call(this, acc, daemon_args...)
, (args...) =>
default_fn.apply(this, args)
fn.apply(this, args)
clazz.prototype["guard_#{name}_daemon"] = (args...) ->
daemon_args = inject.concat args
# try a super-daemon if available
if clazz.__super__?["guard_#{name}_daemon"]?
return false unless clazz.__super__?["guard_#{name}_daemon"].apply(this, args)
# specific daemons
for daemon in daemonology.guard
return false unless daemon[1].apply(this, daemon_args)
true
# this patches the original method to call advices and pass match data
unless clazz.prototype.hasOwnProperty(name) and daemonology.default.length > 0
if _.include(_.keys(clazz.prototype), name)
daemonology.default.push ['Combinator: 1', clazz.prototype[name]]
else if clazz.__super__?
daemonology.default.push ['Combinator: 1', (args...) ->
clazz.__super__[name].apply(this, args)
]
else if clazz::__proto__?
daemonology.default.push ['Combinator: 1', (args...) ->
clazz::__proto__[name].apply(this, args)
]
else
daemonology.default.push ['Combinator: 1', (args...) ->
throw 'No method or superclass defined for ' + name
]
clazz.prototype[name] = (args...) ->
if clazz.prototype["guard_#{name}_daemon"].apply(this, args)
clazz.prototype["before_#{name}_daemon"].apply(this, args)
_.tap clazz.prototype["around_#{name}_daemon"].call(this, _.last(daemonology.default)[1], args...), (retv) =>
clazz.prototype["after_#{name}_daemon"].apply(this, args)
# Add the advice to the appropriate list
if namespace?
if _.isFunction(advice)
advice = ["#{namespace}: #{daemonology[verb].length + 1}", advice]
else if _.isArray(advice)
advice = ["#{namespace}: #{advice[0]}", advice[1]]
else
key = _.keys(advice)[0]
advice = ["#{namespace}: #{key}", advice[key]]
else
if _.isFunction(advice)
advice = ["#{daemonology[verb].length + 1}", advice]
else if _.isArray(advice)
# fine!
else
key = _.keys(advice)[0]
advice = [key, advice[key]]
daemonology[verb].push advice
if pointcut_exprs.length is 1 and (expr = pointcut_exprs[0]) instanceof RegExp
_.each _.functions(clazz.prototype), (name) ->
if match_data = name.match(expr)
daemonize name, match_data
else
_.each pointcut_exprs, (name) ->
if _.isString(name)
if verb is 'default' and !clazz.prototype["before_#{name}_daemon"] and _.isFunction(advice) # not hasOwnProperty, anywhere in the chain!
clazz.prototype[name] = advice
else daemonize name
else throw 'Specify a pointcut with a single regular expression or a list of strings'
clazz.__YouAreDaChef
_.each ['default', 'before', 'around', 'after', 'guard', 'unless'], (verb) ->
Combinator.prototype[verb] = (args...) ->
if args.length is 1
if _.isFunction(args[0])
# default syntax
@advise verb, args[0], @namespace(), @clazzes(), @methods()
else
# bulk syntax
for own expr, advice of args[0]
@advise verb, advice, @namespace(), @clazzes(), [expr]
else if args.length > 1 and _.isString(args[0]) or args[0] instanceof RegExp
# classic syntax
[pointcut_exprs..., advice] = args
@advise verb, advice, @namespace(), @clazzes(), pointcut_exprs
else throw "What do I do with #{args} for #{verb}?"
this
Combinator::def = Combinator::define = Combinator::default
Combinator::when = Combinator::guard
Combinator::except_when = Combinator::unless
Combinator::tag = Combinator::namespace
Combinator::method = Combinator::methods
Combinator::clazz = Combinator::clazzes
YouAreDaChef = (args...) ->
new Combinator(args...)
_.each ['for', 'namespace', 'clazz', 'method', 'clazzes', 'methods', 'tag'], (definition_method_name) ->
YouAreDaChef[definition_method_name] = (args...) ->
_.tap new Combinator(), (combinator) ->
combinator[definition_method_name](args...)
_.extend YouAreDaChef,
inspect: (clazz) ->
clazz.__YouAreDaChef
module.exports = YouAreDaChef
_.defaults module.exports, {YouAreDaChef}
this
| 210802 | _ = require 'underscore'
class Combinator
constructor: (args...) ->
@clazzes []
@methods []
@for(args...)
this
namespace: (name = null) ->
if name?
@_namespace = name
this
else @_namespace
clazzes: (args...) ->
if args.length > 0
@_clazzes = args
this
else @_clazzes
methods: (args...) ->
if args.length > 0
@_methods = args
this
else @_methods
for: (args...) ->
if args.length > 0 and _.all(args, _.isFunction)
@clazzes(args...)
else if args.length is 1
if _.isString(args[0])
@namespace( args[0] )
else
@namespace( _.keys(args[0])[0] )
clazz_arg = args[0][@namespace()]
if _.isArray(clazz_arg)
@clazzes(clazz_arg...)
else if _.isFunction(clazz_arg)
@clazzes(clazz_arg)
else throw "What do I do with { #{@namespace()}: #{clazz_arg} }?"
this
advise: (verb, advice, namespace, clazzes, pointcut_exprs) ->
if verb is 'unless'
verb = 'guard'
_advice = advice
advice = (args...) ->
!_advice.apply(this, args)
throw "Need to define one or more classes" unless clazzes.length
_.each clazzes, (clazz) ->
daemonize = (name, inject = []) ->
daemonology = (clazz.__YouAreDaChef ?= {})[name] ?= {}
_.defaults daemonology,
before: []
after: []
around: []
guard: []
default: []
unless clazz.prototype.hasOwnProperty("before_#{name}_daemon")
clazz.prototype["before_#{name}_daemon"] = (args...) ->
daemon_args = inject.concat args
# try a super-daemon if available
# execute specific daemons for side-effects
for daemon in daemonology.before.reverse()
daemon[1].apply(this, daemon_args)
# try a super-daemon if available
clazz.__super__?["before_#{name}_daemon"]?.apply(this, args)
clazz.prototype["after_#{name}_daemon"] = (args...) ->
daemon_args = inject.concat args
# try a super-daemon if available
clazz.__super__?["after_#{name}_daemon"]?.apply(this, args)
# execute specific daemons for side-effects
for daemon in daemonology.after
daemon[1].apply(this, daemon_args)
clazz.prototype["around_#{name}_daemon"] = (default_fn, args...) ->
daemon_args = inject.concat args
fn_list = []
# try a super-daemon if available
if clazz.__super__?["around_#{name}_daemon"]?
fn_list.unshift clazz.__super__?["around_#{name}_daemon"]
# specific daemons
for daemon in daemonology.around
fn_list.unshift daemon[1]
fn = _.reduce fn_list, (acc, advice) ->
(args...) -> advice.call(this, acc, daemon_args...)
, (args...) =>
default_fn.apply(this, args)
fn.apply(this, args)
clazz.prototype["guard_#{name}_daemon"] = (args...) ->
daemon_args = inject.concat args
# try a super-daemon if available
if clazz.__super__?["guard_#{name}_daemon"]?
return false unless clazz.__super__?["guard_#{name}_daemon"].apply(this, args)
# specific daemons
for daemon in daemonology.guard
return false unless daemon[1].apply(this, daemon_args)
true
# this patches the original method to call advices and pass match data
unless clazz.prototype.hasOwnProperty(name) and daemonology.default.length > 0
if _.include(_.keys(clazz.prototype), name)
daemonology.default.push ['Combinator: 1', clazz.prototype[name]]
else if clazz.__super__?
daemonology.default.push ['Combinator: 1', (args...) ->
clazz.__super__[name].apply(this, args)
]
else if clazz::__proto__?
daemonology.default.push ['Combinator: 1', (args...) ->
clazz::__proto__[name].apply(this, args)
]
else
daemonology.default.push ['Combinator: 1', (args...) ->
throw 'No method or superclass defined for ' + name
]
clazz.prototype[name] = (args...) ->
if clazz.prototype["guard_#{name}_daemon"].apply(this, args)
clazz.prototype["before_#{name}_daemon"].apply(this, args)
_.tap clazz.prototype["around_#{name}_daemon"].call(this, _.last(daemonology.default)[1], args...), (retv) =>
clazz.prototype["after_#{name}_daemon"].apply(this, args)
# Add the advice to the appropriate list
if namespace?
if _.isFunction(advice)
advice = ["#{namespace}: #{daemonology[verb].length + 1}", advice]
else if _.isArray(advice)
advice = ["#{namespace}: #{advice[0]}", advice[1]]
else
key = <KEY>keys(advice)[0]
advice = ["#{namespace}: #{key}", advice[key]]
else
if _.isFunction(advice)
advice = ["#{daemonology[verb].length + 1}", advice]
else if _.isArray(advice)
# fine!
else
key = _.keys(advice)[0]
advice = [key, advice[key]]
daemonology[verb].push advice
if pointcut_exprs.length is 1 and (expr = pointcut_exprs[0]) instanceof RegExp
_.each _.functions(clazz.prototype), (name) ->
if match_data = name.match(expr)
daemonize name, match_data
else
_.each pointcut_exprs, (name) ->
if _.isString(name)
if verb is 'default' and !clazz.prototype["before_#{name}_daemon"] and _.isFunction(advice) # not hasOwnProperty, anywhere in the chain!
clazz.prototype[name] = advice
else daemonize name
else throw 'Specify a pointcut with a single regular expression or a list of strings'
clazz.__YouAreDaChef
_.each ['default', 'before', 'around', 'after', 'guard', 'unless'], (verb) ->
Combinator.prototype[verb] = (args...) ->
if args.length is 1
if _.isFunction(args[0])
# default syntax
@advise verb, args[0], @namespace(), @clazzes(), @methods()
else
# bulk syntax
for own expr, advice of args[0]
@advise verb, advice, @namespace(), @clazzes(), [expr]
else if args.length > 1 and _.isString(args[0]) or args[0] instanceof RegExp
# classic syntax
[pointcut_exprs..., advice] = args
@advise verb, advice, @namespace(), @clazzes(), pointcut_exprs
else throw "What do I do with #{args} for #{verb}?"
this
Combinator::def = Combinator::define = Combinator::default
Combinator::when = Combinator::guard
Combinator::except_when = Combinator::unless
Combinator::tag = Combinator::namespace
Combinator::method = Combinator::methods
Combinator::clazz = Combinator::clazzes
YouAreDaChef = (args...) ->
new Combinator(args...)
_.each ['for', 'namespace', 'clazz', 'method', 'clazzes', 'methods', 'tag'], (definition_method_name) ->
YouAreDaChef[definition_method_name] = (args...) ->
_.tap new Combinator(), (combinator) ->
combinator[definition_method_name](args...)
_.extend YouAreDaChef,
inspect: (clazz) ->
clazz.__YouAreDaChef
module.exports = YouAreDaChef
_.defaults module.exports, {YouAreDaChef}
this
| true | _ = require 'underscore'
class Combinator
constructor: (args...) ->
@clazzes []
@methods []
@for(args...)
this
namespace: (name = null) ->
if name?
@_namespace = name
this
else @_namespace
clazzes: (args...) ->
if args.length > 0
@_clazzes = args
this
else @_clazzes
methods: (args...) ->
if args.length > 0
@_methods = args
this
else @_methods
for: (args...) ->
if args.length > 0 and _.all(args, _.isFunction)
@clazzes(args...)
else if args.length is 1
if _.isString(args[0])
@namespace( args[0] )
else
@namespace( _.keys(args[0])[0] )
clazz_arg = args[0][@namespace()]
if _.isArray(clazz_arg)
@clazzes(clazz_arg...)
else if _.isFunction(clazz_arg)
@clazzes(clazz_arg)
else throw "What do I do with { #{@namespace()}: #{clazz_arg} }?"
this
advise: (verb, advice, namespace, clazzes, pointcut_exprs) ->
if verb is 'unless'
verb = 'guard'
_advice = advice
advice = (args...) ->
!_advice.apply(this, args)
throw "Need to define one or more classes" unless clazzes.length
_.each clazzes, (clazz) ->
daemonize = (name, inject = []) ->
daemonology = (clazz.__YouAreDaChef ?= {})[name] ?= {}
_.defaults daemonology,
before: []
after: []
around: []
guard: []
default: []
unless clazz.prototype.hasOwnProperty("before_#{name}_daemon")
clazz.prototype["before_#{name}_daemon"] = (args...) ->
daemon_args = inject.concat args
# try a super-daemon if available
# execute specific daemons for side-effects
for daemon in daemonology.before.reverse()
daemon[1].apply(this, daemon_args)
# try a super-daemon if available
clazz.__super__?["before_#{name}_daemon"]?.apply(this, args)
clazz.prototype["after_#{name}_daemon"] = (args...) ->
daemon_args = inject.concat args
# try a super-daemon if available
clazz.__super__?["after_#{name}_daemon"]?.apply(this, args)
# execute specific daemons for side-effects
for daemon in daemonology.after
daemon[1].apply(this, daemon_args)
clazz.prototype["around_#{name}_daemon"] = (default_fn, args...) ->
daemon_args = inject.concat args
fn_list = []
# try a super-daemon if available
if clazz.__super__?["around_#{name}_daemon"]?
fn_list.unshift clazz.__super__?["around_#{name}_daemon"]
# specific daemons
for daemon in daemonology.around
fn_list.unshift daemon[1]
fn = _.reduce fn_list, (acc, advice) ->
(args...) -> advice.call(this, acc, daemon_args...)
, (args...) =>
default_fn.apply(this, args)
fn.apply(this, args)
clazz.prototype["guard_#{name}_daemon"] = (args...) ->
daemon_args = inject.concat args
# try a super-daemon if available
if clazz.__super__?["guard_#{name}_daemon"]?
return false unless clazz.__super__?["guard_#{name}_daemon"].apply(this, args)
# specific daemons
for daemon in daemonology.guard
return false unless daemon[1].apply(this, daemon_args)
true
# this patches the original method to call advices and pass match data
unless clazz.prototype.hasOwnProperty(name) and daemonology.default.length > 0
if _.include(_.keys(clazz.prototype), name)
daemonology.default.push ['Combinator: 1', clazz.prototype[name]]
else if clazz.__super__?
daemonology.default.push ['Combinator: 1', (args...) ->
clazz.__super__[name].apply(this, args)
]
else if clazz::__proto__?
daemonology.default.push ['Combinator: 1', (args...) ->
clazz::__proto__[name].apply(this, args)
]
else
daemonology.default.push ['Combinator: 1', (args...) ->
throw 'No method or superclass defined for ' + name
]
clazz.prototype[name] = (args...) ->
if clazz.prototype["guard_#{name}_daemon"].apply(this, args)
clazz.prototype["before_#{name}_daemon"].apply(this, args)
_.tap clazz.prototype["around_#{name}_daemon"].call(this, _.last(daemonology.default)[1], args...), (retv) =>
clazz.prototype["after_#{name}_daemon"].apply(this, args)
# Add the advice to the appropriate list
if namespace?
if _.isFunction(advice)
advice = ["#{namespace}: #{daemonology[verb].length + 1}", advice]
else if _.isArray(advice)
advice = ["#{namespace}: #{advice[0]}", advice[1]]
else
key = PI:KEY:<KEY>END_PIkeys(advice)[0]
advice = ["#{namespace}: #{key}", advice[key]]
else
if _.isFunction(advice)
advice = ["#{daemonology[verb].length + 1}", advice]
else if _.isArray(advice)
# fine!
else
key = _.keys(advice)[0]
advice = [key, advice[key]]
daemonology[verb].push advice
if pointcut_exprs.length is 1 and (expr = pointcut_exprs[0]) instanceof RegExp
_.each _.functions(clazz.prototype), (name) ->
if match_data = name.match(expr)
daemonize name, match_data
else
_.each pointcut_exprs, (name) ->
if _.isString(name)
if verb is 'default' and !clazz.prototype["before_#{name}_daemon"] and _.isFunction(advice) # not hasOwnProperty, anywhere in the chain!
clazz.prototype[name] = advice
else daemonize name
else throw 'Specify a pointcut with a single regular expression or a list of strings'
clazz.__YouAreDaChef
_.each ['default', 'before', 'around', 'after', 'guard', 'unless'], (verb) ->
Combinator.prototype[verb] = (args...) ->
if args.length is 1
if _.isFunction(args[0])
# default syntax
@advise verb, args[0], @namespace(), @clazzes(), @methods()
else
# bulk syntax
for own expr, advice of args[0]
@advise verb, advice, @namespace(), @clazzes(), [expr]
else if args.length > 1 and _.isString(args[0]) or args[0] instanceof RegExp
# classic syntax
[pointcut_exprs..., advice] = args
@advise verb, advice, @namespace(), @clazzes(), pointcut_exprs
else throw "What do I do with #{args} for #{verb}?"
this
Combinator::def = Combinator::define = Combinator::default
Combinator::when = Combinator::guard
Combinator::except_when = Combinator::unless
Combinator::tag = Combinator::namespace
Combinator::method = Combinator::methods
Combinator::clazz = Combinator::clazzes
YouAreDaChef = (args...) ->
new Combinator(args...)
_.each ['for', 'namespace', 'clazz', 'method', 'clazzes', 'methods', 'tag'], (definition_method_name) ->
YouAreDaChef[definition_method_name] = (args...) ->
_.tap new Combinator(), (combinator) ->
combinator[definition_method_name](args...)
_.extend YouAreDaChef,
inspect: (clazz) ->
clazz.__YouAreDaChef
module.exports = YouAreDaChef
_.defaults module.exports, {YouAreDaChef}
this
|
[
{
"context": "\n### Ben Scott # 2015-10-26 # Perlin Noise ###\n\n'use strict' # j",
"end": 14,
"score": 0.9998645186424255,
"start": 5,
"tag": "NAME",
"value": "Ben Scott"
},
{
"context": "\n- `@faloff` **real** : Octaves for smoothing\n- `@yw0,@yw1` **int** : Y Wrap\n- `@zw0,@zw1` *... | code/lib/perlin.coffee | evan-erdos/webgl-solarsystem | 1 |
### Ben Scott # 2015-10-26 # Perlin Noise ###
'use strict' # just like JavaScript
### Constants & Aliases ###
{abs,cos,floor,PI,random,sqrt} = Math
cos_s = (i) -> 0.5*(1.0-cos(i*PI))
### `Perlin`
An instance of this class will return pseudo-random values
in a naturally ordered, harmonic sequence.
- `@inc` **real** : increment value
- `@size` **int** : size of array
- `@faloff` **real** : Octaves for smoothing
- `@yw0,@yw1` **int** : Y Wrap
- `@zw0,@zw1` **int** : Z Wrap
- `@octave` **real** : Octaves for smoothing
###
class Perlin
[@size,@falloff,@octave] = [4095,0.5,4]
[@yw0,@zw0] = [4,8]
[@yw1,@zw1] = [1<<@yw0,1<<@yw1]
constructor: (@inc=0.01) ->
@arr = (random() for i in [0..@size])
@xoff = 0.0
next: (x=-1,y=0,z=0) ->
[x,y,z] = [abs(x),abs(y),abs(z)]
[x0,y0,z0] = [floor(x),floor(y),floor(z)]
[x1,y1,z1] = [x-x0,y-y0,z-z0]
[rx1,ry1] = [0,0]
[r,amp] = [0,0.5]
[n0,n1,n2] = [0,0,0]
for i in [0...@octave]
d0 = x0+(y0<<yw0)+(z0<<zw0)
[rx1,ry1] = [cos_s(x1),cos_s(y1)]
n0 = @arr[d0&@size]
n0 += rx1*(@arr[(d0+1)&@size]-n0)
n1 = @arr[(d0+yw1)&@size]
n1 += rx1*(@arr[(d0+yw1+1)&@size]-n1)
n0 += ry1*(n1-n0)
d0 += @zw1
n1 = @arr[d0&@size]
n1 += rx1*(@arr[(d0+1)&@size]-n1)
n2 = @arr[(d0+@yw1)&@size]
n2 += rx1*(@arr[(d0+@yw1+1)&@size]-n2)
n1 += ry1*(n2-n1)
n0 += cos_s(z1)*(n1-n0)
r += n0*amp
amp *= @falloff
[x0,y0,z0] = [x0<<1,y0<<1,z0<<1]
[x1,y1,z1] = [x1*2,y1*2,z1*2]
[x0,x1] = [x0+1,x1-1] if (x1>=1.0)
[y0,y1] = [y0+1,y1-1] if (y1>=1.0)
[z0,z1] = [z0+1,z1-1] if (z1>=1.0)
return r
| 13939 |
### <NAME> # 2015-10-26 # Perlin Noise ###
'use strict' # just like JavaScript
### Constants & Aliases ###
{abs,cos,floor,PI,random,sqrt} = Math
cos_s = (i) -> 0.5*(1.0-cos(i*PI))
### `Perlin`
An instance of this class will return pseudo-random values
in a naturally ordered, harmonic sequence.
- `@inc` **real** : increment value
- `@size` **int** : size of array
- `@faloff` **real** : Octaves for smoothing
- `@yw0,@yw1` **int** : Y Wrap
- `@zw0,@zw1` **int** : Z Wrap
- `@octave` **real** : Octaves for smoothing
###
class Perlin
[@size,@falloff,@octave] = [4095,0.5,4]
[@yw0,@zw0] = [4,8]
[@yw1,@zw1] = [1<<@yw0,1<<@yw1]
constructor: (@inc=0.01) ->
@arr = (random() for i in [0..@size])
@xoff = 0.0
next: (x=-1,y=0,z=0) ->
[x,y,z] = [abs(x),abs(y),abs(z)]
[x0,y0,z0] = [floor(x),floor(y),floor(z)]
[x1,y1,z1] = [x-x0,y-y0,z-z0]
[rx1,ry1] = [0,0]
[r,amp] = [0,0.5]
[n0,n1,n2] = [0,0,0]
for i in [0...@octave]
d0 = x0+(y0<<yw0)+(z0<<zw0)
[rx1,ry1] = [cos_s(x1),cos_s(y1)]
n0 = @arr[d0&@size]
n0 += rx1*(@arr[(d0+1)&@size]-n0)
n1 = @arr[(d0+yw1)&@size]
n1 += rx1*(@arr[(d0+yw1+1)&@size]-n1)
n0 += ry1*(n1-n0)
d0 += @zw1
n1 = @arr[d0&@size]
n1 += rx1*(@arr[(d0+1)&@size]-n1)
n2 = @arr[(d0+@yw1)&@size]
n2 += rx1*(@arr[(d0+@yw1+1)&@size]-n2)
n1 += ry1*(n2-n1)
n0 += cos_s(z1)*(n1-n0)
r += n0*amp
amp *= @falloff
[x0,y0,z0] = [x0<<1,y0<<1,z0<<1]
[x1,y1,z1] = [x1*2,y1*2,z1*2]
[x0,x1] = [x0+1,x1-1] if (x1>=1.0)
[y0,y1] = [y0+1,y1-1] if (y1>=1.0)
[z0,z1] = [z0+1,z1-1] if (z1>=1.0)
return r
| true |
### PI:NAME:<NAME>END_PI # 2015-10-26 # Perlin Noise ###
'use strict' # just like JavaScript
### Constants & Aliases ###
{abs,cos,floor,PI,random,sqrt} = Math
cos_s = (i) -> 0.5*(1.0-cos(i*PI))
### `Perlin`
An instance of this class will return pseudo-random values
in a naturally ordered, harmonic sequence.
- `@inc` **real** : increment value
- `@size` **int** : size of array
- `@faloff` **real** : Octaves for smoothing
- `@yw0,@yw1` **int** : Y Wrap
- `@zw0,@zw1` **int** : Z Wrap
- `@octave` **real** : Octaves for smoothing
###
class Perlin
[@size,@falloff,@octave] = [4095,0.5,4]
[@yw0,@zw0] = [4,8]
[@yw1,@zw1] = [1<<@yw0,1<<@yw1]
constructor: (@inc=0.01) ->
@arr = (random() for i in [0..@size])
@xoff = 0.0
next: (x=-1,y=0,z=0) ->
[x,y,z] = [abs(x),abs(y),abs(z)]
[x0,y0,z0] = [floor(x),floor(y),floor(z)]
[x1,y1,z1] = [x-x0,y-y0,z-z0]
[rx1,ry1] = [0,0]
[r,amp] = [0,0.5]
[n0,n1,n2] = [0,0,0]
for i in [0...@octave]
d0 = x0+(y0<<yw0)+(z0<<zw0)
[rx1,ry1] = [cos_s(x1),cos_s(y1)]
n0 = @arr[d0&@size]
n0 += rx1*(@arr[(d0+1)&@size]-n0)
n1 = @arr[(d0+yw1)&@size]
n1 += rx1*(@arr[(d0+yw1+1)&@size]-n1)
n0 += ry1*(n1-n0)
d0 += @zw1
n1 = @arr[d0&@size]
n1 += rx1*(@arr[(d0+1)&@size]-n1)
n2 = @arr[(d0+@yw1)&@size]
n2 += rx1*(@arr[(d0+@yw1+1)&@size]-n2)
n1 += ry1*(n2-n1)
n0 += cos_s(z1)*(n1-n0)
r += n0*amp
amp *= @falloff
[x0,y0,z0] = [x0<<1,y0<<1,z0<<1]
[x1,y1,z1] = [x1*2,y1*2,z1*2]
[x0,x1] = [x0+1,x1-1] if (x1>=1.0)
[y0,y1] = [y0+1,y1-1] if (y1>=1.0)
[z0,z1] = [z0+1,z1-1] if (z1>=1.0)
return r
|
[
{
"context": "### (c) 2013 Maxim Litvinov ###\n\niw = 10\nih = 20\nfirstTimeout = 1000\nspeedInc",
"end": 27,
"score": 0.9997259974479675,
"start": 13,
"tag": "NAME",
"value": "Maxim Litvinov"
}
] | 3/tetris.coffee | metalim/21 | 0 | ### (c) 2013 Maxim Litvinov ###
iw = 10
ih = 20
firstTimeout = 1000
speedIncrease = 1.05
shapes = [
[
[0,0]
[0,0]
]
[
[1,1,1,1]
]
[
[null,2,2]
[2,2,null]
]
[
[null,null,3]
[3,3,3]
]
[
[4,4,null]
[null,4,4]
]
[
[5,null,null]
[5,5,5]
]
[
[null,6,null]
[6,6,6]
]
]
styles = [
'black'
'grey'
'darkred'
'red'
'darkblue'
'blue'
'darkgreen'
'green'
]
$ ->
canvas = document.getElementById 'canvas'
ctx = canvas.getContext '2d'
w=h=l=x0=y0=state=part=ticker=timeout=null
playing = true
gameover = ->
playing = false
ctx.font = 'bold 200% sans-serif'
ctx.strokeStyle = 'darkred'
ctx.fillStyle = 'red'
ctx.textAlign = 'center'
ctx.fillText 'GAME OVER',w/2,h/2
ctx.strokeText 'GAME OVER',w/2,h/2
console.log 'GAME OVER'
newPart = ->
part =
x:4,
y:0,
shape: shapes[Math.random()*shapes.length|0]
draw()
if haveSpace part.shape,part.x,part.y
ticker = setTimeout tick,timeout
else
gameover()
tick = ->
clearTimeout ticker
ticker = null
if tryMove part.x, part.y+1
ticker = setTimeout tick,timeout
else
drop()
reset = ->
state = (null for x in [0...iw] for y in [0...ih])
playing = true
timeout = firstTimeout
resize()
newPart()
haveSpace = (shape,x,y)->
if x<0 or x+shape[0].length>iw or y+shape.length>ih
return false
for row,dy in shape when y+dy>=0
for cell,dx in row when cell?
return false if state[y+dy][x+dx]?
return true
tryRotate = ->
cx = part.shape[0].length/2|0
cy = part.shape.length/2|0
newShape = (part.shape[y][x] for y in [part.shape.length-1..0] for x of part.shape[0])
r =
x:part.x+cx-cy
y:part.y+cy-cx
shape:newShape
if haveSpace r.shape,r.x,r.y
part = r
draw()
tryMove = (x,y)->
if haveSpace part.shape,x,y
part.x=x
part.y=y
draw()
drop = ->
clearTimeout ticker
ticker = null
while tryMove(part.x, part.y+1) then
for row,dy in part.shape when part.y+dy>=0
for cell,dx in row when cell?
state[part.y+dy][part.x+dx]=cell
for row,y in state
cells=0
for cell,x in row when cell?
cells+=1
if cells is row.length
state[y] = null
newState = (row for row in state when row?)
for row in state when not row?
newState.unshift((null for x in [0...iw]))
timeout /= speedIncrease
state = newState
newPart()
drawShape = (x,y,shape)->
ctx.strokeStyle = 'orange'
for row,dy in shape
for cell,dx in row when cell?
ctx.beginPath()
ctx.fillStyle = styles[cell]
ctx.rect x0+(dx+x)*l, y0+(dy+y)*l, l, l
ctx.fill()
ctx.stroke()
draw = ->
ctx.clearRect 0,0,w,h
ctx.strokeStyle = 'black'
ctx.strokeRect x0-1,y0-1,l*iw+2,l*ih+2 # glass frame
if state?
for row,y in state
for cell,x in row when cell?
ctx.fillStyle = styles[cell]
ctx.fillRect x0+x*l, y0+y*l, l, l
if part?
drawShape part.x, part.y, part.shape
window.onresize = resize = ->
w = canvas.width = canvas.offsetWidth
h = canvas.height = canvas.offsetHeight
l = Math.min w/iw, h/ih
x0 = (w-l*iw)/2
y0 = (h-l*ih)/2
draw()
reset()
document.onkeydown = keydown = (e)->
if not playing
reset()
return
if e.keyCode is 37 #left
tryMove part.x-1, part.y
else if e.keyCode is 38 #up
tryRotate()
else if e.keyCode is 39 #right
tryMove part.x+1, part.y
else if e.keyCode is 40 #down
drop()
#canvas.onclick = click = (e)->
| 37325 | ### (c) 2013 <NAME> ###
iw = 10
ih = 20
firstTimeout = 1000
speedIncrease = 1.05
shapes = [
[
[0,0]
[0,0]
]
[
[1,1,1,1]
]
[
[null,2,2]
[2,2,null]
]
[
[null,null,3]
[3,3,3]
]
[
[4,4,null]
[null,4,4]
]
[
[5,null,null]
[5,5,5]
]
[
[null,6,null]
[6,6,6]
]
]
styles = [
'black'
'grey'
'darkred'
'red'
'darkblue'
'blue'
'darkgreen'
'green'
]
$ ->
canvas = document.getElementById 'canvas'
ctx = canvas.getContext '2d'
w=h=l=x0=y0=state=part=ticker=timeout=null
playing = true
gameover = ->
playing = false
ctx.font = 'bold 200% sans-serif'
ctx.strokeStyle = 'darkred'
ctx.fillStyle = 'red'
ctx.textAlign = 'center'
ctx.fillText 'GAME OVER',w/2,h/2
ctx.strokeText 'GAME OVER',w/2,h/2
console.log 'GAME OVER'
newPart = ->
part =
x:4,
y:0,
shape: shapes[Math.random()*shapes.length|0]
draw()
if haveSpace part.shape,part.x,part.y
ticker = setTimeout tick,timeout
else
gameover()
tick = ->
clearTimeout ticker
ticker = null
if tryMove part.x, part.y+1
ticker = setTimeout tick,timeout
else
drop()
reset = ->
state = (null for x in [0...iw] for y in [0...ih])
playing = true
timeout = firstTimeout
resize()
newPart()
haveSpace = (shape,x,y)->
if x<0 or x+shape[0].length>iw or y+shape.length>ih
return false
for row,dy in shape when y+dy>=0
for cell,dx in row when cell?
return false if state[y+dy][x+dx]?
return true
tryRotate = ->
cx = part.shape[0].length/2|0
cy = part.shape.length/2|0
newShape = (part.shape[y][x] for y in [part.shape.length-1..0] for x of part.shape[0])
r =
x:part.x+cx-cy
y:part.y+cy-cx
shape:newShape
if haveSpace r.shape,r.x,r.y
part = r
draw()
tryMove = (x,y)->
if haveSpace part.shape,x,y
part.x=x
part.y=y
draw()
drop = ->
clearTimeout ticker
ticker = null
while tryMove(part.x, part.y+1) then
for row,dy in part.shape when part.y+dy>=0
for cell,dx in row when cell?
state[part.y+dy][part.x+dx]=cell
for row,y in state
cells=0
for cell,x in row when cell?
cells+=1
if cells is row.length
state[y] = null
newState = (row for row in state when row?)
for row in state when not row?
newState.unshift((null for x in [0...iw]))
timeout /= speedIncrease
state = newState
newPart()
drawShape = (x,y,shape)->
ctx.strokeStyle = 'orange'
for row,dy in shape
for cell,dx in row when cell?
ctx.beginPath()
ctx.fillStyle = styles[cell]
ctx.rect x0+(dx+x)*l, y0+(dy+y)*l, l, l
ctx.fill()
ctx.stroke()
draw = ->
ctx.clearRect 0,0,w,h
ctx.strokeStyle = 'black'
ctx.strokeRect x0-1,y0-1,l*iw+2,l*ih+2 # glass frame
if state?
for row,y in state
for cell,x in row when cell?
ctx.fillStyle = styles[cell]
ctx.fillRect x0+x*l, y0+y*l, l, l
if part?
drawShape part.x, part.y, part.shape
window.onresize = resize = ->
w = canvas.width = canvas.offsetWidth
h = canvas.height = canvas.offsetHeight
l = Math.min w/iw, h/ih
x0 = (w-l*iw)/2
y0 = (h-l*ih)/2
draw()
reset()
document.onkeydown = keydown = (e)->
if not playing
reset()
return
if e.keyCode is 37 #left
tryMove part.x-1, part.y
else if e.keyCode is 38 #up
tryRotate()
else if e.keyCode is 39 #right
tryMove part.x+1, part.y
else if e.keyCode is 40 #down
drop()
#canvas.onclick = click = (e)->
| true | ### (c) 2013 PI:NAME:<NAME>END_PI ###
iw = 10
ih = 20
firstTimeout = 1000
speedIncrease = 1.05
shapes = [
[
[0,0]
[0,0]
]
[
[1,1,1,1]
]
[
[null,2,2]
[2,2,null]
]
[
[null,null,3]
[3,3,3]
]
[
[4,4,null]
[null,4,4]
]
[
[5,null,null]
[5,5,5]
]
[
[null,6,null]
[6,6,6]
]
]
styles = [
'black'
'grey'
'darkred'
'red'
'darkblue'
'blue'
'darkgreen'
'green'
]
$ ->
canvas = document.getElementById 'canvas'
ctx = canvas.getContext '2d'
w=h=l=x0=y0=state=part=ticker=timeout=null
playing = true
gameover = ->
playing = false
ctx.font = 'bold 200% sans-serif'
ctx.strokeStyle = 'darkred'
ctx.fillStyle = 'red'
ctx.textAlign = 'center'
ctx.fillText 'GAME OVER',w/2,h/2
ctx.strokeText 'GAME OVER',w/2,h/2
console.log 'GAME OVER'
newPart = ->
part =
x:4,
y:0,
shape: shapes[Math.random()*shapes.length|0]
draw()
if haveSpace part.shape,part.x,part.y
ticker = setTimeout tick,timeout
else
gameover()
tick = ->
clearTimeout ticker
ticker = null
if tryMove part.x, part.y+1
ticker = setTimeout tick,timeout
else
drop()
reset = ->
state = (null for x in [0...iw] for y in [0...ih])
playing = true
timeout = firstTimeout
resize()
newPart()
haveSpace = (shape,x,y)->
if x<0 or x+shape[0].length>iw or y+shape.length>ih
return false
for row,dy in shape when y+dy>=0
for cell,dx in row when cell?
return false if state[y+dy][x+dx]?
return true
tryRotate = ->
cx = part.shape[0].length/2|0
cy = part.shape.length/2|0
newShape = (part.shape[y][x] for y in [part.shape.length-1..0] for x of part.shape[0])
r =
x:part.x+cx-cy
y:part.y+cy-cx
shape:newShape
if haveSpace r.shape,r.x,r.y
part = r
draw()
tryMove = (x,y)->
if haveSpace part.shape,x,y
part.x=x
part.y=y
draw()
drop = ->
clearTimeout ticker
ticker = null
while tryMove(part.x, part.y+1) then
for row,dy in part.shape when part.y+dy>=0
for cell,dx in row when cell?
state[part.y+dy][part.x+dx]=cell
for row,y in state
cells=0
for cell,x in row when cell?
cells+=1
if cells is row.length
state[y] = null
newState = (row for row in state when row?)
for row in state when not row?
newState.unshift((null for x in [0...iw]))
timeout /= speedIncrease
state = newState
newPart()
drawShape = (x,y,shape)->
ctx.strokeStyle = 'orange'
for row,dy in shape
for cell,dx in row when cell?
ctx.beginPath()
ctx.fillStyle = styles[cell]
ctx.rect x0+(dx+x)*l, y0+(dy+y)*l, l, l
ctx.fill()
ctx.stroke()
draw = ->
ctx.clearRect 0,0,w,h
ctx.strokeStyle = 'black'
ctx.strokeRect x0-1,y0-1,l*iw+2,l*ih+2 # glass frame
if state?
for row,y in state
for cell,x in row when cell?
ctx.fillStyle = styles[cell]
ctx.fillRect x0+x*l, y0+y*l, l, l
if part?
drawShape part.x, part.y, part.shape
window.onresize = resize = ->
w = canvas.width = canvas.offsetWidth
h = canvas.height = canvas.offsetHeight
l = Math.min w/iw, h/ih
x0 = (w-l*iw)/2
y0 = (h-l*ih)/2
draw()
reset()
document.onkeydown = keydown = (e)->
if not playing
reset()
return
if e.keyCode is 37 #left
tryMove part.x-1, part.y
else if e.keyCode is 38 #up
tryRotate()
else if e.keyCode is 39 #right
tryMove part.x+1, part.y
else if e.keyCode is 40 #down
drop()
#canvas.onclick = click = (e)->
|
[
{
"context": "late:'hi ${user}!'\n data:\n user: 'Mikey'\n expect(result).to.be.equal 'hi Mikey!'\n\n ",
"end": 747,
"score": 0.7746235728263855,
"start": 742,
"tag": "NAME",
"value": "Mikey"
},
{
"context": "{user}} {{user}}!'\n data:\n user: 'M... | test/index-test.coffee | snowyu/task-registry-template-engine-lodash.js | 0 | chai = require 'chai'
sinon = require 'sinon'
sinonChai = require 'sinon-chai'
should = chai.should()
expect = chai.expect
assert = chai.assert
chai.use(sinonChai)
setImmediate = setImmediate || process.nextTick
LodashTemplateEngine = require '../src'
TemplateEngine = require 'task-registry-template-engine'
describe 'Lodash Template Engine', ->
templateEngine = TemplateEngine 'Lodash'
it 'should get the lodash template engine', ->
expect(templateEngine).to.instanceof LodashTemplateEngine
describe 'executeSync', ->
it 'should render a template', ->
result = templateEngine.executeSync
template:'hi ${user}!'
data:
user: 'Mikey'
expect(result).to.be.equal 'hi Mikey!'
it 'should render a template via new interpolate', ->
result = templateEngine.executeSync
template:'hi {{user}} {{user}}!'
data:
user: 'Mikey'
interpolate: /{{([\s\S]+?)}}/g
expect(result).to.be.equal 'hi Mikey Mikey!'
it 'should render a template via difference data', ->
result = templateEngine.executeSync
template:'hi ${user}!'
data:
user: 'Mikey'
write: true
expect(result).to.be.equal 'hi Mikey!'
result = templateEngine.executeSync data: user: 'Jacky'
expect(result).to.be.equal 'hi Jacky!'
describe 'execute', ->
it 'should render a template', (done)->
templateEngine.execute
template:'hi ${user}!'
data:
user: 'Mikey'
, (err, result)->
unless err
expect(result).to.be.equal 'hi Mikey!'
done(err)
it 'should render a template via new interpolate', (done)->
templateEngine.execute
template:'hi {{user}} {{user}}!'
data:
user: 'Mikey'
interpolate: /{{([\s\S]+?)}}/g
, (err, result)->
unless err
expect(result).to.be.equal 'hi Mikey Mikey!'
done(err)
it 'should render a template via difference data', (done)->
templateEngine.execute
template:'hi ${user}!'
data:
user: 'Mikey'
write: true
, (err, result)->
unless err
expect(result).to.be.equal 'hi Mikey!'
result = templateEngine.executeSync data: user: 'Jacky'
expect(result).to.be.equal 'hi Jacky!'
done(err)
| 126549 | chai = require 'chai'
sinon = require 'sinon'
sinonChai = require 'sinon-chai'
should = chai.should()
expect = chai.expect
assert = chai.assert
chai.use(sinonChai)
setImmediate = setImmediate || process.nextTick
LodashTemplateEngine = require '../src'
TemplateEngine = require 'task-registry-template-engine'
describe 'Lodash Template Engine', ->
templateEngine = TemplateEngine 'Lodash'
it 'should get the lodash template engine', ->
expect(templateEngine).to.instanceof LodashTemplateEngine
describe 'executeSync', ->
it 'should render a template', ->
result = templateEngine.executeSync
template:'hi ${user}!'
data:
user: '<NAME>'
expect(result).to.be.equal 'hi Mikey!'
it 'should render a template via new interpolate', ->
result = templateEngine.executeSync
template:'hi {{user}} {{user}}!'
data:
user: '<NAME>'
interpolate: /{{([\s\S]+?)}}/g
expect(result).to.be.equal 'hi Mikey Mikey!'
it 'should render a template via difference data', ->
result = templateEngine.executeSync
template:'hi ${user}!'
data:
user: '<NAME>'
write: true
expect(result).to.be.equal 'hi Mikey!'
result = templateEngine.executeSync data: user: '<NAME>'
expect(result).to.be.equal 'hi Jacky!'
describe 'execute', ->
it 'should render a template', (done)->
templateEngine.execute
template:'hi ${user}!'
data:
user: '<NAME>key'
, (err, result)->
unless err
expect(result).to.be.equal 'hi Mikey!'
done(err)
it 'should render a template via new interpolate', (done)->
templateEngine.execute
template:'hi {{user}} {{user}}!'
data:
user: '<NAME>key'
interpolate: /{{([\s\S]+?)}}/g
, (err, result)->
unless err
expect(result).to.be.equal 'hi Mikey Mikey!'
done(err)
it 'should render a template via difference data', (done)->
templateEngine.execute
template:'hi ${user}!'
data:
user: '<NAME>'
write: true
, (err, result)->
unless err
expect(result).to.be.equal 'hi <NAME>!'
result = templateEngine.executeSync data: user: 'Jack<NAME>'
expect(result).to.be.equal 'hi <NAME>!'
done(err)
| true | chai = require 'chai'
sinon = require 'sinon'
sinonChai = require 'sinon-chai'
should = chai.should()
expect = chai.expect
assert = chai.assert
chai.use(sinonChai)
setImmediate = setImmediate || process.nextTick
LodashTemplateEngine = require '../src'
TemplateEngine = require 'task-registry-template-engine'
describe 'Lodash Template Engine', ->
templateEngine = TemplateEngine 'Lodash'
it 'should get the lodash template engine', ->
expect(templateEngine).to.instanceof LodashTemplateEngine
describe 'executeSync', ->
it 'should render a template', ->
result = templateEngine.executeSync
template:'hi ${user}!'
data:
user: 'PI:NAME:<NAME>END_PI'
expect(result).to.be.equal 'hi Mikey!'
it 'should render a template via new interpolate', ->
result = templateEngine.executeSync
template:'hi {{user}} {{user}}!'
data:
user: 'PI:NAME:<NAME>END_PI'
interpolate: /{{([\s\S]+?)}}/g
expect(result).to.be.equal 'hi Mikey Mikey!'
it 'should render a template via difference data', ->
result = templateEngine.executeSync
template:'hi ${user}!'
data:
user: 'PI:NAME:<NAME>END_PI'
write: true
expect(result).to.be.equal 'hi Mikey!'
result = templateEngine.executeSync data: user: 'PI:NAME:<NAME>END_PI'
expect(result).to.be.equal 'hi Jacky!'
describe 'execute', ->
it 'should render a template', (done)->
templateEngine.execute
template:'hi ${user}!'
data:
user: 'PI:NAME:<NAME>END_PIkey'
, (err, result)->
unless err
expect(result).to.be.equal 'hi Mikey!'
done(err)
it 'should render a template via new interpolate', (done)->
templateEngine.execute
template:'hi {{user}} {{user}}!'
data:
user: 'PI:NAME:<NAME>END_PIkey'
interpolate: /{{([\s\S]+?)}}/g
, (err, result)->
unless err
expect(result).to.be.equal 'hi Mikey Mikey!'
done(err)
it 'should render a template via difference data', (done)->
templateEngine.execute
template:'hi ${user}!'
data:
user: 'PI:NAME:<NAME>END_PI'
write: true
, (err, result)->
unless err
expect(result).to.be.equal 'hi PI:NAME:<NAME>END_PI!'
result = templateEngine.executeSync data: user: 'JackPI:NAME:<NAME>END_PI'
expect(result).to.be.equal 'hi PI:NAME:<NAME>END_PI!'
done(err)
|
[
{
"context": "image: \"/images/missing_image.png\"\n name: \"Pablo Picasso\"\n sameAs: \"undefined/artist/#{artist.id}\"\n",
"end": 1796,
"score": 0.9997953772544861,
"start": 1783,
"tag": "NAME",
"value": "Pablo Picasso"
},
{
"context": " ->\n @partnerShow.attribu... | src/desktop/test/models/partner_show.coffee | kanaabe/force | 0 | _ = require 'underscore'
{ fabricate } = require 'antigravity'
sd = require('sharify').data
should = require 'should'
Backbone = require 'backbone'
PartnerShow = require '../../models/partner_show'
PartnerLocation = require '../../models/partner_location'
FairLocation = require '../../models/partner_location'
Fair = require '../../models/fair'
sinon = require 'sinon'
moment = require 'moment'
describe 'PartnerShow', ->
beforeEach ->
sinon.stub Backbone, 'sync'
@partnerShow = new PartnerShow fabricate('show')
afterEach ->
Backbone.sync.restore()
describe '#url', ->
it 'includes a partner in the url if the model has one', ->
partnerShow = new PartnerShow id: 'slug-for-show', partner: fabricate('partner')
partnerShow.url().should.equal "#{sd.API_URL}/api/v1/partner/#{partnerShow.get('partner').id}/show/#{partnerShow.get('id')}"
it 'returns a URL with no id for new models', ->
partnerShow = new PartnerShow id: 'slug-for-show'
partnerShow.url().should.equal "#{sd.API_URL}/api/v1/show/#{partnerShow.get('id')}"
describe '#toJSONLD', ->
it 'returns valid json', ->
artist = fabricate 'artist'
@partnerShow.set artists: [artist]
json = @partnerShow.toJSONLD()
json['@context'].should.equal 'http://schema.org'
json['@type'].should.equal 'Event'
json.name.should.equal 'Inez & Vinoodh'
json.location.name.should.equal 'Gagosian Gallery'
json.location.address.should.eql
'@type': 'PostalAddress'
streetAddress: '529 W 20th St.2nd Floor'
addressLocality: 'New York'
addressRegion: 'NY'
postalCode: '10011'
json.performer[0].should.eql {
"@type": "Person"
image: "/images/missing_image.png"
name: "Pablo Picasso"
sameAs: "undefined/artist/#{artist.id}"
}
describe '#toPageTitle', ->
it 'creates a title defensively handling empty or missing values', ->
@partnerShow.toPageTitle().should.containEql "Inez & Vinoodh | Gagosian Gallery |"
it 'omits the artworks for sale bit if the partner is not a gallery', ->
@partnerShow.attributes.partner.name = "White Cube"
@partnerShow.attributes.partner.type = "Institution"
@partnerShow.toPageTitle().should.not.containEql ", Artwork for Sale"
describe '#toPageDescription', ->
it 'correctly renders the meta description', ->
@partnerShow.toPageDescription().should.containEql 'Past show at Gagosian Gallery New York, 529 W 20th St. 2nd Floor'
it 'adds a single artist to the meta description', ->
@partnerShow.set 'artists', [fabricate('artist')]
@partnerShow.toPageDescription().should.containEql 'Past show featuring works by Pablo Picasso at Gagosian Gallery New York, 529 W 20th St. 2nd Floor'
it 'adds multiple artists to the meta description', ->
@partnerShow.set 'artists', [fabricate('artist'), fabricate('artist')]
@partnerShow.toPageDescription().should.containEql 'Past show featuring works by Pablo Picasso and Pablo Picasso at Gagosian Gallery New York, 529 W 20th St. 2nd Floor'
describe '#location', ->
it 'returns a partner location', ->
show = new PartnerShow fabricate 'show'
show.location().should.be.instanceOf(PartnerLocation)
it 'returns a fair location', ->
show = new PartnerShow(fabricate 'show',
fair_location:
display: 'Booth 1234'
)
show.location().should.be.instanceOf(FairLocation)
describe '#isOnlineExclusive', ->
it 'returns false when there is partner location', ->
show = new PartnerShow(fabricate 'show',
location: new PartnerLocation(fabricate: 'partner_location'),
partner_city: null,
fair: null)
show.isOnlineExclusive().should.be.false()
it 'returns false when there is a partner_city', ->
show = new PartnerShow(fabricate 'show',
location: null,
partner_city: 'Tehran',
fair: null
)
show.isOnlineExclusive().should.be.false()
it 'returns false when its a fair show', ->
show = new PartnerShow(fabricate 'show',
location: null,
fair: new Fair(fabricate: 'fair'),
partner_city: null
)
show.isOnlineExclusive().should.be.false()
it 'returns true when there is no location', ->
show = new PartnerShow(fabricate 'show',
location: null,
partner_location: null,
fair: null
)
show.isOnlineExclusive().should.be.true()
describe '#formatShowOrFairCity', ->
it 'returns undefined without location and fair', ->
show = new PartnerShow fabricate 'show', fair: null, location: null
_.isUndefined(show.formatShowOrFairCity()).should.be.true()
describe '#runningDates', ->
it 'gives a formatted date span for the running dates', ->
@partnerShow.runningDates().should.equal "Jul 12th – Aug 23rd 2013"
describe '#shareTitle', ->
it "includes fair location", ->
@partnerShow.set
fair_location:
display: 'Booth 1234'
@partnerShow.shareTitle().should.equal "Inez & Vinoodh, Booth 1234 See it on @artsy"
it "include partner name", ->
@partnerShow.shareTitle().should.equal 'See "Inez & Vinoodh" at Gagosian Gallery on @artsy'
describe '#formatArtists', ->
beforeEach ->
@partnerShow.set
artists: [
fabricate('artist', id: 'picasso-1')
fabricate('artist', id: 'picasso-2')
fabricate('artist', id: 'picasso-3')
fabricate('artist', id: 'picasso-4')
]
it "correctly limits artists", ->
@partnerShow.formatArtists(2).should.equal "<a href='/artist/picasso-1'>Pablo Picasso</a>, <a href='/artist/picasso-2'>Pablo Picasso</a> and 2 more"
it "correctly limits artists", ->
@partnerShow.formatArtists().should.equal "<a href='/artist/picasso-1'>Pablo Picasso</a>, <a href='/artist/picasso-2'>Pablo Picasso</a>, <a href='/artist/picasso-3'>Pablo Picasso</a>, <a href='/artist/picasso-4'>Pablo Picasso</a>"
describe '#fairLocationDisplay', ->
it "Returns fair location", ->
@partnerShow.set
fair_location:
display: 'Booth 1234'
@partnerShow.fairLocationDisplay().should.equal "<i>New York</i> – Booth 1234"
it 'works with a missing fair location', ->
@partnerShow.set fair_location: null
@partnerShow.fairLocationDisplay().should.equal "<i>New York</i> – "
describe '#posterImageUrl', ->
it 'returns an image', ->
@partnerShow.posterImageUrl().should.containEql 'partner_show_images/51f6a51d275b24a787000c36/1/large.jpg'
it 'returns a featured image', ->
@partnerShow.posterImageUrl(true).should.containEql '/partner_show_images/51f6a51d275b24a787000c36/1/featured.jpg'
it 'returns larger if featured or large is unavailable', (done) ->
@partnerShow.on 'fetch:posterImageUrl', (url) ->
url.should.containEql 'additional_images/4e7cb83e1c80dd00010038e2/1/large.jpg'
done()
@partnerShow.unset 'image_versions'
@partnerShow.posterImageUrl()
Backbone.sync.args[0][2].url.should.containEql "/api/v1/partner/#{@partnerShow.get('partner').id}/show/#{@partnerShow.id}/artworks"
Backbone.sync.args[0][2].success [fabricate 'artwork']
it 'returns empty when there really is no image', ->
@partnerShow.unset 'image_versions'
@partnerShow.posterImageUrl()
Backbone.sync.args[0][2].success []
describe '#openingThisWeek', ->
beforeEach ->
@starting = '2015-04-09T04:00:00+00:00'
@partnerShow.set 'start_at', @starting
xit 'returns a boolean if the show opens within "this week"', ->
# if today is a tuesday and show is opening the next thursday
@today = moment('2015-04-08T04:00:00+00:00')
@partnerShow.openingThisWeek().should.be.false()
# if today is the prior saturday and show is opening on a thursday
@today = moment('2015-04-04T04:00:00+00:00')
@partnerShow.openingThisWeek(@today).should.be.true()
# if today is the prior thursday and the show is opening on a thursday
@today = moment('2015-04-02T04:00:00+00:00')
@partnerShow.openingThisWeek(@today).should.be.false()
describe '#isEndingSoon', ->
beforeEach ->
@ending = '2013-08-23T04:00:00+00:00'
@partnerShow.set 'end_at', @ending
it 'returns a boolean if the show ends within the desired timeframe (default 5 days)', ->
@partnerShow.isEndingSoon(5, moment(@ending).subtract(3, 'days')).should.be.true()
@partnerShow.isEndingSoon(5, moment(@ending).subtract(5, 'days')).should.be.true()
@partnerShow.isEndingSoon(5, moment(@ending).subtract(5.5, 'days')).should.be.false()
@partnerShow.isEndingSoon(5, moment(@ending).subtract(6, 'days')).should.be.false()
it 'supports custom day values for "soon"', ->
@partnerShow.isEndingSoon(2, moment(@ending).subtract(3, 'days')).should.be.false()
@partnerShow.isEndingSoon(3, moment(@ending).subtract(3, 'days')).should.be.true()
describe '#endingIn', ->
beforeEach ->
@ending = '2013-08-23T04:00:00+00:00'
@partnerShow.set 'end_at', @ending
it 'returns the correct string', ->
@partnerShow.endingIn(moment(@ending).subtract(3, 'days')).should.equal 'in 3 days'
@partnerShow.endingIn(moment(@ending).subtract(1, 'day')).should.equal 'in 1 day'
@partnerShow.endingIn(moment(@ending)).should.equal 'today'
describe '#isOpeningToday', ->
beforeEach ->
@starting = '2013-07-12T04:00:00+00:00'
@partnerShow.set 'start_at', @starting
it 'returns a boolean value for whether or not the show opens *today*', ->
@partnerShow.isOpeningToday(moment(@starting).subtract(1, 'day')).should.be.false()
@partnerShow.isOpeningToday(moment(@starting).add(1, 'day')).should.be.false()
@partnerShow.isOpeningToday(moment(@starting)).should.be.true()
describe '#contextualLabel', ->
describe 'with name', ->
new PartnerShow(artists: [0, 0, 0], fair: null).contextualLabel('Foobar').should.equal 'Group Show including Foobar'
new PartnerShow(artists: [0], fair: null).contextualLabel('Foobar').should.equal 'Solo Show'
new PartnerShow(artists: [0], fair: 'existy').contextualLabel('Foobar').should.equal 'Fair Booth including Foobar'
new PartnerShow(artists: [0, 0, 0], fair: 'existy').contextualLabel('Foobar').should.equal 'Fair Booth including Foobar'
describe 'without name', ->
it 'returns the correct label', ->
new PartnerShow(artists: [0, 0, 0], fair: null).contextualLabel().should.equal 'Group Show'
new PartnerShow(artists: [0], fair: null).contextualLabel().should.equal 'Solo Show'
new PartnerShow(artists: [0], fair: 'existy').contextualLabel().should.equal 'Fair Booth'
new PartnerShow(artists: [0, 0, 0], fair: 'existy').contextualLabel().should.equal 'Fair Booth'
describe '#daySchedules', ->
beforeEach ->
@partnerShow = new PartnerShow fabricate 'show',
location: fabricate 'partner_location'
it 'returns true if a show has day schedules', ->
@partnerShow.daySchedules().should.be.true()
it 'returns false if a show has no schedules', ->
@partnerShow = new PartnerShow fabricate 'show',
location: fabricate 'partner_location',
day_schedules: []
@partnerShow.daySchedules().should.be.false()
describe '#formatDaySchedule', ->
beforeEach ->
@partnerShow = new PartnerShow fabricate 'show',
location: fabricate 'partner_location'
@partnerShow.get('location').day_schedules.push
_id: "5543d89472616978f1e40100",
start_time: 76000,
end_time: 88400,
day_of_week: "Tuesday"
it 'returns the formatted day schedule for a day of the week with a day schedule', ->
@partnerShow.formatDaySchedule('Monday').should.match { start: 'Monday', hours: '10am–7pm' }
it 'returns the formatted day schedule for a day of the week with no day schedule', ->
@partnerShow.formatDaySchedule('Friday').should.match { start: 'Friday', hours: 'Closed' }
it 'returns the formatted day schedule for a day with multiple schedule blocks', ->
@partnerShow.formatDaySchedule('Tuesday').should.match { start: 'Tuesday', hours: '10am–7pm, 9:06pm–12:33am' }
describe '#formatDaySchedules', ->
beforeEach ->
@partnerShow = new PartnerShow fabricate 'show',
location: fabricate 'partner_location'
@partnerShow.get('location').day_schedules.push
_id: "5543d89472616978f1e40100",
start_time: 76000,
end_time: 88400,
day_of_week: "Tuesday"
it 'returns a formatted string describing the days open and hours for the show', ->
@partnerShow.formatDaySchedules().should.match [
{ hours: '10am–7pm', start: 'Monday' }
{ hours: '10am–7pm, 9:06pm–12:33am', start: 'Tuesday' }
{ hours: '10am–7pm', start: 'Wednesday' }
{ hours: '10am–7pm', start: 'Thursday' }
{ hours: 'Closed', start: 'Friday' }
{ hours: 'Closed', start: 'Saturday' }
{ hours: '10am–7pm', start: 'Sunday' }
]
describe '#formatModalDaySchedules', ->
beforeEach ->
@partnerShow = new PartnerShow fabricate 'show',
location: fabricate 'partner_location'
it 'returns a formatted string describing the days open and hours for the show', ->
@partnerShow.formatModalDaySchedules().should.match [ days: 'Monday–Thursday, Sunday', hours: '10am–7pm' ]
it 'returns a correctly formatted string when a show has unusual hours', ->
@partnerShow = new PartnerShow fabricate 'show',
location: fabricate 'partner_location',
day_schedules: [
{
_id: "5543d893726169750b990100",
start_time: 42359,
end_time: 68992,
day_of_week: "Wednesday"
}, {
_id: "5543d8937261697591bd0100",
start_time: 1800,
end_time: 70250,
day_of_week: "Monday"
}, {
_id: "5543d89472616978f1e40100",
start_time: 42359,
end_time: 68992,
day_of_week: "Tuesday"
}, {
_id: "5543d8947261690f169d0100",
start_time: 1800,
end_time: 70250,
day_of_week: "Saturday"
}, {
_id: "5543d8947261695aea200200",
start_time: 42359,
end_time: 68992,
day_of_week: "Thursday"
}
]
@partnerShow.formatModalDaySchedules().should.match [
{ days: 'Monday, Saturday', hours: '12:30am–7:30pm' }
{ days: 'Tuesday–Thursday', hours: '11:45am–7:09pm' }
]
it 'returns a correctly formatted string when a show has overlapping days and multiple time blocks', ->
@partnerShow = new PartnerShow fabricate 'show',
location: fabricate 'partner_location',
day_schedules: [
{
_id: "5543d893726169750b990100",
start_time: 42359,
end_time: 68992,
day_of_week: "Wednesday"
}, {
_id: "5543d8937261697591bd0100",
start_time: 1800,
end_time: 70250,
day_of_week: "Monday"
}, {
_id: "5543d89472616978f1e40100",
start_time: 42359,
end_time: 68992,
day_of_week: "Tuesday"
}, {
_id: "5543d89472616978f1e40100",
start_time: 82359,
end_time: 98992,
day_of_week: "Tuesday"
}, {
_id: "5543d8947261690f169d0100",
start_time: 1800,
end_time: 70250,
day_of_week: "Saturday"
}, {
_id: "5543d8947261695aea200200",
start_time: 42359,
end_time: 68992,
day_of_week: "Thursday"
}, {
_id: "5543d89472616978f1e40100",
start_time: 82359,
end_time: 98992,
day_of_week: "Wednesday"
}
]
@partnerShow.formatModalDaySchedules().should.match [
{ days: 'Monday, Saturday', hours: '12:30am–7:30pm' },
{ days: 'Tuesday–Wednesday', hours: '11:45am–7:09pm, 10:52pm–3:29am' },
{ days: 'Thursday', hours: '11:45am–7:09pm' }
]
| 174265 | _ = require 'underscore'
{ fabricate } = require 'antigravity'
sd = require('sharify').data
should = require 'should'
Backbone = require 'backbone'
PartnerShow = require '../../models/partner_show'
PartnerLocation = require '../../models/partner_location'
FairLocation = require '../../models/partner_location'
Fair = require '../../models/fair'
sinon = require 'sinon'
moment = require 'moment'
describe 'PartnerShow', ->
beforeEach ->
sinon.stub Backbone, 'sync'
@partnerShow = new PartnerShow fabricate('show')
afterEach ->
Backbone.sync.restore()
describe '#url', ->
it 'includes a partner in the url if the model has one', ->
partnerShow = new PartnerShow id: 'slug-for-show', partner: fabricate('partner')
partnerShow.url().should.equal "#{sd.API_URL}/api/v1/partner/#{partnerShow.get('partner').id}/show/#{partnerShow.get('id')}"
it 'returns a URL with no id for new models', ->
partnerShow = new PartnerShow id: 'slug-for-show'
partnerShow.url().should.equal "#{sd.API_URL}/api/v1/show/#{partnerShow.get('id')}"
describe '#toJSONLD', ->
it 'returns valid json', ->
artist = fabricate 'artist'
@partnerShow.set artists: [artist]
json = @partnerShow.toJSONLD()
json['@context'].should.equal 'http://schema.org'
json['@type'].should.equal 'Event'
json.name.should.equal 'Inez & Vinoodh'
json.location.name.should.equal 'Gagosian Gallery'
json.location.address.should.eql
'@type': 'PostalAddress'
streetAddress: '529 W 20th St.2nd Floor'
addressLocality: 'New York'
addressRegion: 'NY'
postalCode: '10011'
json.performer[0].should.eql {
"@type": "Person"
image: "/images/missing_image.png"
name: "<NAME>"
sameAs: "undefined/artist/#{artist.id}"
}
describe '#toPageTitle', ->
it 'creates a title defensively handling empty or missing values', ->
@partnerShow.toPageTitle().should.containEql "Inez & Vinoodh | Gagosian Gallery |"
it 'omits the artworks for sale bit if the partner is not a gallery', ->
@partnerShow.attributes.partner.name = "<NAME>"
@partnerShow.attributes.partner.type = "Institution"
@partnerShow.toPageTitle().should.not.containEql ", Artwork for Sale"
describe '#toPageDescription', ->
it 'correctly renders the meta description', ->
@partnerShow.toPageDescription().should.containEql 'Past show at Gagosian Gallery New York, 529 W 20th St. 2nd Floor'
it 'adds a single artist to the meta description', ->
@partnerShow.set 'artists', [fabricate('artist')]
@partnerShow.toPageDescription().should.containEql 'Past show featuring works by Pablo Picasso at Gagosian Gallery New York, 529 W 20th St. 2nd Floor'
it 'adds multiple artists to the meta description', ->
@partnerShow.set 'artists', [fabricate('artist'), fabricate('artist')]
@partnerShow.toPageDescription().should.containEql 'Past show featuring works by Pablo Picasso and Pablo Picasso at Gagosian Gallery New York, 529 W 20th St. 2nd Floor'
describe '#location', ->
it 'returns a partner location', ->
show = new PartnerShow fabricate 'show'
show.location().should.be.instanceOf(PartnerLocation)
it 'returns a fair location', ->
show = new PartnerShow(fabricate 'show',
fair_location:
display: 'Booth 1234'
)
show.location().should.be.instanceOf(FairLocation)
describe '#isOnlineExclusive', ->
it 'returns false when there is partner location', ->
show = new PartnerShow(fabricate 'show',
location: new PartnerLocation(fabricate: 'partner_location'),
partner_city: null,
fair: null)
show.isOnlineExclusive().should.be.false()
it 'returns false when there is a partner_city', ->
show = new PartnerShow(fabricate 'show',
location: null,
partner_city: 'Tehran',
fair: null
)
show.isOnlineExclusive().should.be.false()
it 'returns false when its a fair show', ->
show = new PartnerShow(fabricate 'show',
location: null,
fair: new Fair(fabricate: 'fair'),
partner_city: null
)
show.isOnlineExclusive().should.be.false()
it 'returns true when there is no location', ->
show = new PartnerShow(fabricate 'show',
location: null,
partner_location: null,
fair: null
)
show.isOnlineExclusive().should.be.true()
describe '#formatShowOrFairCity', ->
it 'returns undefined without location and fair', ->
show = new PartnerShow fabricate 'show', fair: null, location: null
_.isUndefined(show.formatShowOrFairCity()).should.be.true()
describe '#runningDates', ->
it 'gives a formatted date span for the running dates', ->
@partnerShow.runningDates().should.equal "Jul 12th – Aug 23rd 2013"
describe '#shareTitle', ->
it "includes fair location", ->
@partnerShow.set
fair_location:
display: 'Booth 1234'
@partnerShow.shareTitle().should.equal "<NAME>, Booth 1234 See it on @artsy"
it "include partner name", ->
@partnerShow.shareTitle().should.equal 'See "<NAME> & <NAME>" at Gagosian Gallery on @artsy'
describe '#formatArtists', ->
beforeEach ->
@partnerShow.set
artists: [
fabricate('artist', id: 'picasso-1')
fabricate('artist', id: 'picasso-2')
fabricate('artist', id: 'picasso-3')
fabricate('artist', id: 'picasso-4')
]
it "correctly limits artists", ->
@partnerShow.formatArtists(2).should.equal "<a href='/artist/picasso-1'><NAME></a>, <a href='/artist/picasso-2'><NAME></a> and 2 more"
it "correctly limits artists", ->
@partnerShow.formatArtists().should.equal "<a href='/artist/picasso-1'><NAME></a>, <a href='/artist/picasso-2'><NAME></a>, <a href='/artist/picasso-3'><NAME></a>, <a href='/artist/picasso-4'><NAME></a>"
describe '#fairLocationDisplay', ->
it "Returns fair location", ->
@partnerShow.set
fair_location:
display: 'Booth 1234'
@partnerShow.fairLocationDisplay().should.equal "<i>New York</i> – Booth 1234"
it 'works with a missing fair location', ->
@partnerShow.set fair_location: null
@partnerShow.fairLocationDisplay().should.equal "<i>New York</i> – "
describe '#posterImageUrl', ->
it 'returns an image', ->
@partnerShow.posterImageUrl().should.containEql 'partner_show_images/51f6a51d275b24a787000c36/1/large.jpg'
it 'returns a featured image', ->
@partnerShow.posterImageUrl(true).should.containEql '/partner_show_images/51f6a51d275b24a787000c36/1/featured.jpg'
it 'returns larger if featured or large is unavailable', (done) ->
@partnerShow.on 'fetch:posterImageUrl', (url) ->
url.should.containEql 'additional_images/4e7cb83e1c80dd00010038e2/1/large.jpg'
done()
@partnerShow.unset 'image_versions'
@partnerShow.posterImageUrl()
Backbone.sync.args[0][2].url.should.containEql "/api/v1/partner/#{@partnerShow.get('partner').id}/show/#{@partnerShow.id}/artworks"
Backbone.sync.args[0][2].success [fabricate 'artwork']
it 'returns empty when there really is no image', ->
@partnerShow.unset 'image_versions'
@partnerShow.posterImageUrl()
Backbone.sync.args[0][2].success []
describe '#openingThisWeek', ->
beforeEach ->
@starting = '2015-04-09T04:00:00+00:00'
@partnerShow.set 'start_at', @starting
xit 'returns a boolean if the show opens within "this week"', ->
# if today is a tuesday and show is opening the next thursday
@today = moment('2015-04-08T04:00:00+00:00')
@partnerShow.openingThisWeek().should.be.false()
# if today is the prior saturday and show is opening on a thursday
@today = moment('2015-04-04T04:00:00+00:00')
@partnerShow.openingThisWeek(@today).should.be.true()
# if today is the prior thursday and the show is opening on a thursday
@today = moment('2015-04-02T04:00:00+00:00')
@partnerShow.openingThisWeek(@today).should.be.false()
describe '#isEndingSoon', ->
beforeEach ->
@ending = '2013-08-23T04:00:00+00:00'
@partnerShow.set 'end_at', @ending
it 'returns a boolean if the show ends within the desired timeframe (default 5 days)', ->
@partnerShow.isEndingSoon(5, moment(@ending).subtract(3, 'days')).should.be.true()
@partnerShow.isEndingSoon(5, moment(@ending).subtract(5, 'days')).should.be.true()
@partnerShow.isEndingSoon(5, moment(@ending).subtract(5.5, 'days')).should.be.false()
@partnerShow.isEndingSoon(5, moment(@ending).subtract(6, 'days')).should.be.false()
it 'supports custom day values for "soon"', ->
@partnerShow.isEndingSoon(2, moment(@ending).subtract(3, 'days')).should.be.false()
@partnerShow.isEndingSoon(3, moment(@ending).subtract(3, 'days')).should.be.true()
describe '#endingIn', ->
beforeEach ->
@ending = '2013-08-23T04:00:00+00:00'
@partnerShow.set 'end_at', @ending
it 'returns the correct string', ->
@partnerShow.endingIn(moment(@ending).subtract(3, 'days')).should.equal 'in 3 days'
@partnerShow.endingIn(moment(@ending).subtract(1, 'day')).should.equal 'in 1 day'
@partnerShow.endingIn(moment(@ending)).should.equal 'today'
describe '#isOpeningToday', ->
beforeEach ->
@starting = '2013-07-12T04:00:00+00:00'
@partnerShow.set 'start_at', @starting
it 'returns a boolean value for whether or not the show opens *today*', ->
@partnerShow.isOpeningToday(moment(@starting).subtract(1, 'day')).should.be.false()
@partnerShow.isOpeningToday(moment(@starting).add(1, 'day')).should.be.false()
@partnerShow.isOpeningToday(moment(@starting)).should.be.true()
describe '#contextualLabel', ->
describe 'with name', ->
new PartnerShow(artists: [0, 0, 0], fair: null).contextualLabel('Foobar').should.equal 'Group Show including Foobar'
new PartnerShow(artists: [0], fair: null).contextualLabel('Foobar').should.equal 'Solo Show'
new PartnerShow(artists: [0], fair: 'existy').contextualLabel('Foobar').should.equal 'Fair Booth including Foobar'
new PartnerShow(artists: [0, 0, 0], fair: 'existy').contextualLabel('Foobar').should.equal 'Fair Booth including Foobar'
describe 'without name', ->
it 'returns the correct label', ->
new PartnerShow(artists: [0, 0, 0], fair: null).contextualLabel().should.equal 'Group Show'
new PartnerShow(artists: [0], fair: null).contextualLabel().should.equal 'Solo Show'
new PartnerShow(artists: [0], fair: 'existy').contextualLabel().should.equal 'Fair Booth'
new PartnerShow(artists: [0, 0, 0], fair: 'existy').contextualLabel().should.equal 'Fair Booth'
describe '#daySchedules', ->
beforeEach ->
@partnerShow = new PartnerShow fabricate 'show',
location: fabricate 'partner_location'
it 'returns true if a show has day schedules', ->
@partnerShow.daySchedules().should.be.true()
it 'returns false if a show has no schedules', ->
@partnerShow = new PartnerShow fabricate 'show',
location: fabricate 'partner_location',
day_schedules: []
@partnerShow.daySchedules().should.be.false()
describe '#formatDaySchedule', ->
beforeEach ->
@partnerShow = new PartnerShow fabricate 'show',
location: fabricate 'partner_location'
@partnerShow.get('location').day_schedules.push
_id: "5543d89472616978f1e40100",
start_time: 76000,
end_time: 88400,
day_of_week: "Tuesday"
it 'returns the formatted day schedule for a day of the week with a day schedule', ->
@partnerShow.formatDaySchedule('Monday').should.match { start: 'Monday', hours: '10am–7pm' }
it 'returns the formatted day schedule for a day of the week with no day schedule', ->
@partnerShow.formatDaySchedule('Friday').should.match { start: 'Friday', hours: 'Closed' }
it 'returns the formatted day schedule for a day with multiple schedule blocks', ->
@partnerShow.formatDaySchedule('Tuesday').should.match { start: 'Tuesday', hours: '10am–7pm, 9:06pm–12:33am' }
describe '#formatDaySchedules', ->
beforeEach ->
@partnerShow = new PartnerShow fabricate 'show',
location: fabricate 'partner_location'
@partnerShow.get('location').day_schedules.push
_id: "5543d89472616978f1e40100",
start_time: 76000,
end_time: 88400,
day_of_week: "Tuesday"
it 'returns a formatted string describing the days open and hours for the show', ->
@partnerShow.formatDaySchedules().should.match [
{ hours: '10am–7pm', start: 'Monday' }
{ hours: '10am–7pm, 9:06pm–12:33am', start: 'Tuesday' }
{ hours: '10am–7pm', start: 'Wednesday' }
{ hours: '10am–7pm', start: 'Thursday' }
{ hours: 'Closed', start: 'Friday' }
{ hours: 'Closed', start: 'Saturday' }
{ hours: '10am–7pm', start: 'Sunday' }
]
describe '#formatModalDaySchedules', ->
beforeEach ->
@partnerShow = new PartnerShow fabricate 'show',
location: fabricate 'partner_location'
it 'returns a formatted string describing the days open and hours for the show', ->
@partnerShow.formatModalDaySchedules().should.match [ days: 'Monday–Thursday, Sunday', hours: '10am–7pm' ]
it 'returns a correctly formatted string when a show has unusual hours', ->
@partnerShow = new PartnerShow fabricate 'show',
location: fabricate 'partner_location',
day_schedules: [
{
_id: "5543d893726169750b990100",
start_time: 42359,
end_time: 68992,
day_of_week: "Wednesday"
}, {
_id: "5543d8937261697591bd0100",
start_time: 1800,
end_time: 70250,
day_of_week: "Monday"
}, {
_id: "5543d89472616978f1e40100",
start_time: 42359,
end_time: 68992,
day_of_week: "Tuesday"
}, {
_id: "5543d8947261690f169d0100",
start_time: 1800,
end_time: 70250,
day_of_week: "Saturday"
}, {
_id: "5543d8947261695aea200200",
start_time: 42359,
end_time: 68992,
day_of_week: "Thursday"
}
]
@partnerShow.formatModalDaySchedules().should.match [
{ days: 'Monday, Saturday', hours: '12:30am–7:30pm' }
{ days: 'Tuesday–Thursday', hours: '11:45am–7:09pm' }
]
it 'returns a correctly formatted string when a show has overlapping days and multiple time blocks', ->
@partnerShow = new PartnerShow fabricate 'show',
location: fabricate 'partner_location',
day_schedules: [
{
_id: "5543d893726169750b990100",
start_time: 42359,
end_time: 68992,
day_of_week: "Wednesday"
}, {
_id: "5543d8937261697591bd0100",
start_time: 1800,
end_time: 70250,
day_of_week: "Monday"
}, {
_id: "5543d89472616978f1e40100",
start_time: 42359,
end_time: 68992,
day_of_week: "Tuesday"
}, {
_id: "5543d89472616978f1e40100",
start_time: 82359,
end_time: 98992,
day_of_week: "Tuesday"
}, {
_id: "5543d8947261690f169d0100",
start_time: 1800,
end_time: 70250,
day_of_week: "Saturday"
}, {
_id: "5543d8947261695aea200200",
start_time: 42359,
end_time: 68992,
day_of_week: "Thursday"
}, {
_id: "5543d89472616978f1e40100",
start_time: 82359,
end_time: 98992,
day_of_week: "Wednesday"
}
]
@partnerShow.formatModalDaySchedules().should.match [
{ days: 'Monday, Saturday', hours: '12:30am–7:30pm' },
{ days: 'Tuesday–Wednesday', hours: '11:45am–7:09pm, 10:52pm–3:29am' },
{ days: 'Thursday', hours: '11:45am–7:09pm' }
]
| true | _ = require 'underscore'
{ fabricate } = require 'antigravity'
sd = require('sharify').data
should = require 'should'
Backbone = require 'backbone'
PartnerShow = require '../../models/partner_show'
PartnerLocation = require '../../models/partner_location'
FairLocation = require '../../models/partner_location'
Fair = require '../../models/fair'
sinon = require 'sinon'
moment = require 'moment'
describe 'PartnerShow', ->
beforeEach ->
sinon.stub Backbone, 'sync'
@partnerShow = new PartnerShow fabricate('show')
afterEach ->
Backbone.sync.restore()
describe '#url', ->
it 'includes a partner in the url if the model has one', ->
partnerShow = new PartnerShow id: 'slug-for-show', partner: fabricate('partner')
partnerShow.url().should.equal "#{sd.API_URL}/api/v1/partner/#{partnerShow.get('partner').id}/show/#{partnerShow.get('id')}"
it 'returns a URL with no id for new models', ->
partnerShow = new PartnerShow id: 'slug-for-show'
partnerShow.url().should.equal "#{sd.API_URL}/api/v1/show/#{partnerShow.get('id')}"
describe '#toJSONLD', ->
it 'returns valid json', ->
artist = fabricate 'artist'
@partnerShow.set artists: [artist]
json = @partnerShow.toJSONLD()
json['@context'].should.equal 'http://schema.org'
json['@type'].should.equal 'Event'
json.name.should.equal 'Inez & Vinoodh'
json.location.name.should.equal 'Gagosian Gallery'
json.location.address.should.eql
'@type': 'PostalAddress'
streetAddress: '529 W 20th St.2nd Floor'
addressLocality: 'New York'
addressRegion: 'NY'
postalCode: '10011'
json.performer[0].should.eql {
"@type": "Person"
image: "/images/missing_image.png"
name: "PI:NAME:<NAME>END_PI"
sameAs: "undefined/artist/#{artist.id}"
}
describe '#toPageTitle', ->
it 'creates a title defensively handling empty or missing values', ->
@partnerShow.toPageTitle().should.containEql "Inez & Vinoodh | Gagosian Gallery |"
it 'omits the artworks for sale bit if the partner is not a gallery', ->
@partnerShow.attributes.partner.name = "PI:NAME:<NAME>END_PI"
@partnerShow.attributes.partner.type = "Institution"
@partnerShow.toPageTitle().should.not.containEql ", Artwork for Sale"
describe '#toPageDescription', ->
it 'correctly renders the meta description', ->
@partnerShow.toPageDescription().should.containEql 'Past show at Gagosian Gallery New York, 529 W 20th St. 2nd Floor'
it 'adds a single artist to the meta description', ->
@partnerShow.set 'artists', [fabricate('artist')]
@partnerShow.toPageDescription().should.containEql 'Past show featuring works by Pablo Picasso at Gagosian Gallery New York, 529 W 20th St. 2nd Floor'
it 'adds multiple artists to the meta description', ->
@partnerShow.set 'artists', [fabricate('artist'), fabricate('artist')]
@partnerShow.toPageDescription().should.containEql 'Past show featuring works by Pablo Picasso and Pablo Picasso at Gagosian Gallery New York, 529 W 20th St. 2nd Floor'
describe '#location', ->
it 'returns a partner location', ->
show = new PartnerShow fabricate 'show'
show.location().should.be.instanceOf(PartnerLocation)
it 'returns a fair location', ->
show = new PartnerShow(fabricate 'show',
fair_location:
display: 'Booth 1234'
)
show.location().should.be.instanceOf(FairLocation)
describe '#isOnlineExclusive', ->
it 'returns false when there is partner location', ->
show = new PartnerShow(fabricate 'show',
location: new PartnerLocation(fabricate: 'partner_location'),
partner_city: null,
fair: null)
show.isOnlineExclusive().should.be.false()
it 'returns false when there is a partner_city', ->
show = new PartnerShow(fabricate 'show',
location: null,
partner_city: 'Tehran',
fair: null
)
show.isOnlineExclusive().should.be.false()
it 'returns false when its a fair show', ->
show = new PartnerShow(fabricate 'show',
location: null,
fair: new Fair(fabricate: 'fair'),
partner_city: null
)
show.isOnlineExclusive().should.be.false()
it 'returns true when there is no location', ->
show = new PartnerShow(fabricate 'show',
location: null,
partner_location: null,
fair: null
)
show.isOnlineExclusive().should.be.true()
describe '#formatShowOrFairCity', ->
it 'returns undefined without location and fair', ->
show = new PartnerShow fabricate 'show', fair: null, location: null
_.isUndefined(show.formatShowOrFairCity()).should.be.true()
describe '#runningDates', ->
it 'gives a formatted date span for the running dates', ->
@partnerShow.runningDates().should.equal "Jul 12th – Aug 23rd 2013"
describe '#shareTitle', ->
it "includes fair location", ->
@partnerShow.set
fair_location:
display: 'Booth 1234'
@partnerShow.shareTitle().should.equal "PI:NAME:<NAME>END_PI, Booth 1234 See it on @artsy"
it "include partner name", ->
@partnerShow.shareTitle().should.equal 'See "PI:NAME:<NAME>END_PI & PI:NAME:<NAME>END_PI" at Gagosian Gallery on @artsy'
describe '#formatArtists', ->
beforeEach ->
@partnerShow.set
artists: [
fabricate('artist', id: 'picasso-1')
fabricate('artist', id: 'picasso-2')
fabricate('artist', id: 'picasso-3')
fabricate('artist', id: 'picasso-4')
]
it "correctly limits artists", ->
@partnerShow.formatArtists(2).should.equal "<a href='/artist/picasso-1'>PI:NAME:<NAME>END_PI</a>, <a href='/artist/picasso-2'>PI:NAME:<NAME>END_PI</a> and 2 more"
it "correctly limits artists", ->
@partnerShow.formatArtists().should.equal "<a href='/artist/picasso-1'>PI:NAME:<NAME>END_PI</a>, <a href='/artist/picasso-2'>PI:NAME:<NAME>END_PI</a>, <a href='/artist/picasso-3'>PI:NAME:<NAME>END_PI</a>, <a href='/artist/picasso-4'>PI:NAME:<NAME>END_PI</a>"
describe '#fairLocationDisplay', ->
it "Returns fair location", ->
@partnerShow.set
fair_location:
display: 'Booth 1234'
@partnerShow.fairLocationDisplay().should.equal "<i>New York</i> – Booth 1234"
it 'works with a missing fair location', ->
@partnerShow.set fair_location: null
@partnerShow.fairLocationDisplay().should.equal "<i>New York</i> – "
describe '#posterImageUrl', ->
it 'returns an image', ->
@partnerShow.posterImageUrl().should.containEql 'partner_show_images/51f6a51d275b24a787000c36/1/large.jpg'
it 'returns a featured image', ->
@partnerShow.posterImageUrl(true).should.containEql '/partner_show_images/51f6a51d275b24a787000c36/1/featured.jpg'
it 'returns larger if featured or large is unavailable', (done) ->
@partnerShow.on 'fetch:posterImageUrl', (url) ->
url.should.containEql 'additional_images/4e7cb83e1c80dd00010038e2/1/large.jpg'
done()
@partnerShow.unset 'image_versions'
@partnerShow.posterImageUrl()
Backbone.sync.args[0][2].url.should.containEql "/api/v1/partner/#{@partnerShow.get('partner').id}/show/#{@partnerShow.id}/artworks"
Backbone.sync.args[0][2].success [fabricate 'artwork']
it 'returns empty when there really is no image', ->
@partnerShow.unset 'image_versions'
@partnerShow.posterImageUrl()
Backbone.sync.args[0][2].success []
describe '#openingThisWeek', ->
beforeEach ->
@starting = '2015-04-09T04:00:00+00:00'
@partnerShow.set 'start_at', @starting
xit 'returns a boolean if the show opens within "this week"', ->
# if today is a tuesday and show is opening the next thursday
@today = moment('2015-04-08T04:00:00+00:00')
@partnerShow.openingThisWeek().should.be.false()
# if today is the prior saturday and show is opening on a thursday
@today = moment('2015-04-04T04:00:00+00:00')
@partnerShow.openingThisWeek(@today).should.be.true()
# if today is the prior thursday and the show is opening on a thursday
@today = moment('2015-04-02T04:00:00+00:00')
@partnerShow.openingThisWeek(@today).should.be.false()
describe '#isEndingSoon', ->
beforeEach ->
@ending = '2013-08-23T04:00:00+00:00'
@partnerShow.set 'end_at', @ending
it 'returns a boolean if the show ends within the desired timeframe (default 5 days)', ->
@partnerShow.isEndingSoon(5, moment(@ending).subtract(3, 'days')).should.be.true()
@partnerShow.isEndingSoon(5, moment(@ending).subtract(5, 'days')).should.be.true()
@partnerShow.isEndingSoon(5, moment(@ending).subtract(5.5, 'days')).should.be.false()
@partnerShow.isEndingSoon(5, moment(@ending).subtract(6, 'days')).should.be.false()
it 'supports custom day values for "soon"', ->
@partnerShow.isEndingSoon(2, moment(@ending).subtract(3, 'days')).should.be.false()
@partnerShow.isEndingSoon(3, moment(@ending).subtract(3, 'days')).should.be.true()
describe '#endingIn', ->
beforeEach ->
@ending = '2013-08-23T04:00:00+00:00'
@partnerShow.set 'end_at', @ending
it 'returns the correct string', ->
@partnerShow.endingIn(moment(@ending).subtract(3, 'days')).should.equal 'in 3 days'
@partnerShow.endingIn(moment(@ending).subtract(1, 'day')).should.equal 'in 1 day'
@partnerShow.endingIn(moment(@ending)).should.equal 'today'
describe '#isOpeningToday', ->
beforeEach ->
@starting = '2013-07-12T04:00:00+00:00'
@partnerShow.set 'start_at', @starting
it 'returns a boolean value for whether or not the show opens *today*', ->
@partnerShow.isOpeningToday(moment(@starting).subtract(1, 'day')).should.be.false()
@partnerShow.isOpeningToday(moment(@starting).add(1, 'day')).should.be.false()
@partnerShow.isOpeningToday(moment(@starting)).should.be.true()
describe '#contextualLabel', ->
describe 'with name', ->
new PartnerShow(artists: [0, 0, 0], fair: null).contextualLabel('Foobar').should.equal 'Group Show including Foobar'
new PartnerShow(artists: [0], fair: null).contextualLabel('Foobar').should.equal 'Solo Show'
new PartnerShow(artists: [0], fair: 'existy').contextualLabel('Foobar').should.equal 'Fair Booth including Foobar'
new PartnerShow(artists: [0, 0, 0], fair: 'existy').contextualLabel('Foobar').should.equal 'Fair Booth including Foobar'
describe 'without name', ->
it 'returns the correct label', ->
new PartnerShow(artists: [0, 0, 0], fair: null).contextualLabel().should.equal 'Group Show'
new PartnerShow(artists: [0], fair: null).contextualLabel().should.equal 'Solo Show'
new PartnerShow(artists: [0], fair: 'existy').contextualLabel().should.equal 'Fair Booth'
new PartnerShow(artists: [0, 0, 0], fair: 'existy').contextualLabel().should.equal 'Fair Booth'
describe '#daySchedules', ->
beforeEach ->
@partnerShow = new PartnerShow fabricate 'show',
location: fabricate 'partner_location'
it 'returns true if a show has day schedules', ->
@partnerShow.daySchedules().should.be.true()
it 'returns false if a show has no schedules', ->
@partnerShow = new PartnerShow fabricate 'show',
location: fabricate 'partner_location',
day_schedules: []
@partnerShow.daySchedules().should.be.false()
describe '#formatDaySchedule', ->
beforeEach ->
@partnerShow = new PartnerShow fabricate 'show',
location: fabricate 'partner_location'
@partnerShow.get('location').day_schedules.push
_id: "5543d89472616978f1e40100",
start_time: 76000,
end_time: 88400,
day_of_week: "Tuesday"
it 'returns the formatted day schedule for a day of the week with a day schedule', ->
@partnerShow.formatDaySchedule('Monday').should.match { start: 'Monday', hours: '10am–7pm' }
it 'returns the formatted day schedule for a day of the week with no day schedule', ->
@partnerShow.formatDaySchedule('Friday').should.match { start: 'Friday', hours: 'Closed' }
it 'returns the formatted day schedule for a day with multiple schedule blocks', ->
@partnerShow.formatDaySchedule('Tuesday').should.match { start: 'Tuesday', hours: '10am–7pm, 9:06pm–12:33am' }
describe '#formatDaySchedules', ->
beforeEach ->
@partnerShow = new PartnerShow fabricate 'show',
location: fabricate 'partner_location'
@partnerShow.get('location').day_schedules.push
_id: "5543d89472616978f1e40100",
start_time: 76000,
end_time: 88400,
day_of_week: "Tuesday"
it 'returns a formatted string describing the days open and hours for the show', ->
@partnerShow.formatDaySchedules().should.match [
{ hours: '10am–7pm', start: 'Monday' }
{ hours: '10am–7pm, 9:06pm–12:33am', start: 'Tuesday' }
{ hours: '10am–7pm', start: 'Wednesday' }
{ hours: '10am–7pm', start: 'Thursday' }
{ hours: 'Closed', start: 'Friday' }
{ hours: 'Closed', start: 'Saturday' }
{ hours: '10am–7pm', start: 'Sunday' }
]
describe '#formatModalDaySchedules', ->
beforeEach ->
@partnerShow = new PartnerShow fabricate 'show',
location: fabricate 'partner_location'
it 'returns a formatted string describing the days open and hours for the show', ->
@partnerShow.formatModalDaySchedules().should.match [ days: 'Monday–Thursday, Sunday', hours: '10am–7pm' ]
it 'returns a correctly formatted string when a show has unusual hours', ->
@partnerShow = new PartnerShow fabricate 'show',
location: fabricate 'partner_location',
day_schedules: [
{
_id: "5543d893726169750b990100",
start_time: 42359,
end_time: 68992,
day_of_week: "Wednesday"
}, {
_id: "5543d8937261697591bd0100",
start_time: 1800,
end_time: 70250,
day_of_week: "Monday"
}, {
_id: "5543d89472616978f1e40100",
start_time: 42359,
end_time: 68992,
day_of_week: "Tuesday"
}, {
_id: "5543d8947261690f169d0100",
start_time: 1800,
end_time: 70250,
day_of_week: "Saturday"
}, {
_id: "5543d8947261695aea200200",
start_time: 42359,
end_time: 68992,
day_of_week: "Thursday"
}
]
@partnerShow.formatModalDaySchedules().should.match [
{ days: 'Monday, Saturday', hours: '12:30am–7:30pm' }
{ days: 'Tuesday–Thursday', hours: '11:45am–7:09pm' }
]
it 'returns a correctly formatted string when a show has overlapping days and multiple time blocks', ->
@partnerShow = new PartnerShow fabricate 'show',
location: fabricate 'partner_location',
day_schedules: [
{
_id: "5543d893726169750b990100",
start_time: 42359,
end_time: 68992,
day_of_week: "Wednesday"
}, {
_id: "5543d8937261697591bd0100",
start_time: 1800,
end_time: 70250,
day_of_week: "Monday"
}, {
_id: "5543d89472616978f1e40100",
start_time: 42359,
end_time: 68992,
day_of_week: "Tuesday"
}, {
_id: "5543d89472616978f1e40100",
start_time: 82359,
end_time: 98992,
day_of_week: "Tuesday"
}, {
_id: "5543d8947261690f169d0100",
start_time: 1800,
end_time: 70250,
day_of_week: "Saturday"
}, {
_id: "5543d8947261695aea200200",
start_time: 42359,
end_time: 68992,
day_of_week: "Thursday"
}, {
_id: "5543d89472616978f1e40100",
start_time: 82359,
end_time: 98992,
day_of_week: "Wednesday"
}
]
@partnerShow.formatModalDaySchedules().should.match [
{ days: 'Monday, Saturday', hours: '12:30am–7:30pm' },
{ days: 'Tuesday–Wednesday', hours: '11:45am–7:09pm, 10:52pm–3:29am' },
{ days: 'Thursday', hours: '11:45am–7:09pm' }
]
|
[
{
"context": "alize\n nodes: 'my_local_lxd_container': ip: '10.0.0.1', tags: 'type': 'test_container'\n clusters: ",
"end": 200,
"score": 0.9997542500495911,
"start": 192,
"tag": "IP_ADDRESS",
"value": "10.0.0.1"
},
{
"context": "xd_container'\n options: admin_pass... | packages/mariadb/test/server/configure.coffee | ryba-io/ryba | 24 | normalize = require 'masson/lib/config/normalize'
describe 'MariaDB - Configuration tests', ->
it 'default properties', () ->
normalize
nodes: 'my_local_lxd_container': ip: '10.0.0.1', tags: 'type': 'test_container'
clusters: 'cluster_test':
services: 'service_mariadb':
module: './src/server'
affinity: type: 'nodes', values: 'my_local_lxd_container'
options: admin_password: "secret", ssl: enabled: false
.clusters.cluster_test.services.service_mariadb.instances[0].options.should.eql
admin_password: 'secret',
ssl: enabled: false
sql_on_install: [],
current_password: '',
admin_username: 'root',
remove_anonymous: true,
disallow_remote_root_login: false,
remove_test_db: true,
reload_privileges: true,
fqdn: 'my_local_lxd_container',
iptables: undefined,
group: name: 'mysql'
user: name: 'mysql', home: '/var/lib/mysql', gid: 'mysql'
name: 'mariadb-server',
srv_name: 'mariadb',
chk_name: 'mariadb',
my_cnf:
mysqld:
general_log: 'OFF',
general_log_file: '/var/lib/mysql/log/log-general.log',
'log-bin': '/var/lib/mysql/log/bin',
binlog_format: 'mixed',
port: '3306',
'bind-address': '0.0.0.0',
'pid-file': '/var/run/mariadb/mysql.pid',
socket: '/var/lib/mysql/mysql.sock',
datadir: '/var/lib/mysql/data',
user: 'mysql',
event_scheduler: 'ON',
'character-set-server': 'latin1',
'collation-server': 'latin1_swedish_ci',
'skip-external-locking': '',
key_buffer_size: '384M',
max_allowed_packet: '1M',
table_open_cache: '512',
sort_buffer_size: '2M',
read_buffer_size: '2M',
read_rnd_buffer_size: '8M',
myisam_sort_buffer_size: '64M',
thread_cache_size: '8',
query_cache_size: '32M',
'secure-auth': '',
'secure-file-priv': '/var/lib/mysql/upload',
max_connections: '100',
max_user_connections: '50',
'log-error': '/var/log/mysqld/error.log',
slow_query_log_file: '/var/lib/mysql/log/slow-queries.log',
long_query_time: '4',
expire_logs_days: '7',
innodb_file_per_table: '',
innodb_data_home_dir: '/var/lib/mysql/data',
innodb_data_file_path: 'ibdata1:10M:autoextend',
innodb_log_group_home_dir: '/var/lib/mysql/log',
innodb_buffer_pool_size: '384M',
innodb_log_file_size: '100M',
innodb_log_buffer_size: '8M',
innodb_flush_log_at_trx_commit: '1',
innodb_lock_wait_timeout: '50'
mysqldump: quick: '', max_allowed_packet: '16M'
mysql: 'no-auto-rehash': ''
myisamchk: key_buffer_size: '256M', sort_buffer_size: '256M', read_buffer: '2M', write_buffer: '2M'
mysqlhotcopy: 'interactive-timeout': ''
client: socket: '/var/lib/mysql/mysql.sock'
mysqld_safe: 'pid-file': '/var/run/mariadb/mysql.pid'
ha_enabled: false,
journal_log_dir: '/var/lib/mysql/log',
repo: source: null, target: '/etc/yum.repos.d/mariadb.repo', replace: 'mariadb*'
wait_tcp: fqdn: 'my_local_lxd_container', port: '3306'
it 'ssl properties', () ->
config = normalize
nodes: 'my_local_lxd_container': ip: '10.0.0.1', tags: 'type': 'test_container'
clusters: 'cluster_test':
services: 'service_mariadb':
module: './src/server'
affinity: type: 'nodes', values: 'my_local_lxd_container'
options:
admin_password: "secret"
ssl:
enabled: true,
cacert: source: "/etc/mariadb/ca.pem"
cert: source: "/etc/mariadb/cert.pem"
key: source: "/etc/mariadb/key.pem"
# Retrieving the service_mariadb object
{ssl, my_cnf} = config.clusters.cluster_test.services.service_mariadb.instances[0].options
ssl.should.eql
enabled: true,
cacert: source: '/etc/mariadb/ca.pem', local:false
cert: source: '/etc/mariadb/cert.pem', local:false
key: source: '/etc/mariadb/key.pem', local:false
my_cnf.mysqld.should.containEql
'ssl-ca' : '/var/lib/mysql/tls/ca.pem'
'ssl-key' : '/var/lib/mysql/tls/key.pem'
'ssl-cert' : '/var/lib/mysql/tls/cert.pem'
it 'ha properties', () ->
config = normalize
nodes:
'my_local_lxd_container': ip: '10.0.0.1', tags: 'type': 'test_container'
'my_remote_lxd_container': ip: '10.0.0.2', tags: 'type': 'test_container'
clusters: 'cluster_test':
services: 'service_mariadb':
module: './src/server'
affinity: type: 'nodes', match: 'any', values: ['my_local_lxd_container', 'my_remote_lxd_container']
options:
admin_password: "secret"
ssl: enabled: false
repl_master: admin_password: 'passwd_one', password: 'passwd_two'
# Retrieving options for node 1
options = config.clusters.cluster_test.services.service_mariadb.instances[0].options
options.should.containEql
ha_enabled : true
replication_dir : '/var/lib/mysql/replication'
id : 1
{repl_master, my_cnf} = config.clusters.cluster_test.services.service_mariadb.instances[0].options
repl_master.should.eql
admin_password: 'passwd_one',
password: 'passwd_two',
fqdn: 'my_remote_lxd_container',
admin_username: 'root',
username: 'repl'
my_cnf.mysqld.should.containEql
'server-id' : 1
'relay-log' : '/var/lib/mysql/replication/mysql-relay-bin'
'relay-log-index' : '/var/lib/mysql/replication/mysql-relay-bin.index'
'master-info-file' : '/var/lib/mysql/replication/master.info'
'relay-log-info-file' : '/var/lib/mysql/replication/relay-log.info'
'log-slave-updates' : ''
'replicate-same-server-id' : '0'
'slave-skip-errors' : '1062'
# Retrieving options for node 2
options = config.clusters.cluster_test.services.service_mariadb.instances[1].options
options.should.containEql
ha_enabled : true
replication_dir : '/var/lib/mysql/replication'
id : 2
{repl_master, my_cnf} = config.clusters.cluster_test.services.service_mariadb.instances[1].options
repl_master.should.eql
admin_password: 'passwd_one',
password: 'passwd_two',
fqdn: 'my_local_lxd_container',
admin_username: 'root',
username: 'repl'
my_cnf.mysqld.should.containEql
'server-id' : 2
'relay-log' : '/var/lib/mysql/replication/mysql-relay-bin'
'relay-log-index' : '/var/lib/mysql/replication/mysql-relay-bin.index'
'master-info-file' : '/var/lib/mysql/replication/master.info'
'relay-log-info-file' : '/var/lib/mysql/replication/relay-log.info'
'log-slave-updates' : ''
'replicate-same-server-id' : '0'
'slave-skip-errors' : '1062'
| 13817 | normalize = require 'masson/lib/config/normalize'
describe 'MariaDB - Configuration tests', ->
it 'default properties', () ->
normalize
nodes: 'my_local_lxd_container': ip: '10.0.0.1', tags: 'type': 'test_container'
clusters: 'cluster_test':
services: 'service_mariadb':
module: './src/server'
affinity: type: 'nodes', values: 'my_local_lxd_container'
options: admin_password: "<PASSWORD>", ssl: enabled: false
.clusters.cluster_test.services.service_mariadb.instances[0].options.should.eql
admin_password: '<PASSWORD>',
ssl: enabled: false
sql_on_install: [],
current_password: '',
admin_username: 'root',
remove_anonymous: true,
disallow_remote_root_login: false,
remove_test_db: true,
reload_privileges: true,
fqdn: 'my_local_lxd_container',
iptables: undefined,
group: name: 'mysql'
user: name: 'mysql', home: '/var/lib/mysql', gid: 'mysql'
name: 'mariadb-server',
srv_name: 'mariadb',
chk_name: 'mariadb',
my_cnf:
mysqld:
general_log: 'OFF',
general_log_file: '/var/lib/mysql/log/log-general.log',
'log-bin': '/var/lib/mysql/log/bin',
binlog_format: 'mixed',
port: '3306',
'bind-address': '0.0.0.0',
'pid-file': '/var/run/mariadb/mysql.pid',
socket: '/var/lib/mysql/mysql.sock',
datadir: '/var/lib/mysql/data',
user: 'mysql',
event_scheduler: 'ON',
'character-set-server': 'latin1',
'collation-server': 'latin1_swedish_ci',
'skip-external-locking': '',
key_buffer_size: '384M',
max_allowed_packet: '1M',
table_open_cache: '512',
sort_buffer_size: '2M',
read_buffer_size: '2M',
read_rnd_buffer_size: '8M',
myisam_sort_buffer_size: '64M',
thread_cache_size: '8',
query_cache_size: '32M',
'secure-auth': '',
'secure-file-priv': '/var/lib/mysql/upload',
max_connections: '100',
max_user_connections: '50',
'log-error': '/var/log/mysqld/error.log',
slow_query_log_file: '/var/lib/mysql/log/slow-queries.log',
long_query_time: '4',
expire_logs_days: '7',
innodb_file_per_table: '',
innodb_data_home_dir: '/var/lib/mysql/data',
innodb_data_file_path: 'ibdata1:10M:autoextend',
innodb_log_group_home_dir: '/var/lib/mysql/log',
innodb_buffer_pool_size: '384M',
innodb_log_file_size: '100M',
innodb_log_buffer_size: '8M',
innodb_flush_log_at_trx_commit: '1',
innodb_lock_wait_timeout: '50'
mysqldump: quick: '', max_allowed_packet: '16M'
mysql: 'no-auto-rehash': ''
myisamchk: key_buffer_size: '256M', sort_buffer_size: '256M', read_buffer: '2M', write_buffer: '2M'
mysqlhotcopy: 'interactive-timeout': ''
client: socket: '/var/lib/mysql/mysql.sock'
mysqld_safe: 'pid-file': '/var/run/mariadb/mysql.pid'
ha_enabled: false,
journal_log_dir: '/var/lib/mysql/log',
repo: source: null, target: '/etc/yum.repos.d/mariadb.repo', replace: 'mariadb*'
wait_tcp: fqdn: 'my_local_lxd_container', port: '3306'
it 'ssl properties', () ->
config = normalize
nodes: 'my_local_lxd_container': ip: '10.0.0.1', tags: 'type': 'test_container'
clusters: 'cluster_test':
services: 'service_mariadb':
module: './src/server'
affinity: type: 'nodes', values: 'my_local_lxd_container'
options:
admin_password: "<PASSWORD>"
ssl:
enabled: true,
cacert: source: "/etc/mariadb/ca.pem"
cert: source: "/etc/mariadb/cert.pem"
key: source: "/etc/mariadb/key.pem"
# Retrieving the service_mariadb object
{ssl, my_cnf} = config.clusters.cluster_test.services.service_mariadb.instances[0].options
ssl.should.eql
enabled: true,
cacert: source: '/etc/mariadb/ca.pem', local:false
cert: source: '/etc/mariadb/cert.pem', local:false
key: source: '/etc/mariadb/key.pem', local:false
my_cnf.mysqld.should.containEql
'ssl-ca' : '/var/lib/mysql/tls/ca.pem'
'ssl-key' : '/var/lib/mysql/tls/key.pem'
'ssl-cert' : '/var/lib/mysql/tls/cert.pem'
it 'ha properties', () ->
config = normalize
nodes:
'my_local_lxd_container': ip: '10.0.0.1', tags: 'type': 'test_container'
'my_remote_lxd_container': ip: '10.0.0.2', tags: 'type': 'test_container'
clusters: 'cluster_test':
services: 'service_mariadb':
module: './src/server'
affinity: type: 'nodes', match: 'any', values: ['my_local_lxd_container', 'my_remote_lxd_container']
options:
admin_password: "<PASSWORD>"
ssl: enabled: false
repl_master: admin_password: '<PASSWORD>', password: '<PASSWORD>'
# Retrieving options for node 1
options = config.clusters.cluster_test.services.service_mariadb.instances[0].options
options.should.containEql
ha_enabled : true
replication_dir : '/var/lib/mysql/replication'
id : 1
{repl_master, my_cnf} = config.clusters.cluster_test.services.service_mariadb.instances[0].options
repl_master.should.eql
admin_password: '<PASSWORD>',
password: '<PASSWORD>',
fqdn: 'my_remote_lxd_container',
admin_username: 'root',
username: 'repl'
my_cnf.mysqld.should.containEql
'server-id' : 1
'relay-log' : '/var/lib/mysql/replication/mysql-relay-bin'
'relay-log-index' : '/var/lib/mysql/replication/mysql-relay-bin.index'
'master-info-file' : '/var/lib/mysql/replication/master.info'
'relay-log-info-file' : '/var/lib/mysql/replication/relay-log.info'
'log-slave-updates' : ''
'replicate-same-server-id' : '0'
'slave-skip-errors' : '1062'
# Retrieving options for node 2
options = config.clusters.cluster_test.services.service_mariadb.instances[1].options
options.should.containEql
ha_enabled : true
replication_dir : '/var/lib/mysql/replication'
id : 2
{repl_master, my_cnf} = config.clusters.cluster_test.services.service_mariadb.instances[1].options
repl_master.should.eql
admin_password: '<PASSWORD>',
password: '<PASSWORD>',
fqdn: 'my_local_lxd_container',
admin_username: 'root',
username: 'repl'
my_cnf.mysqld.should.containEql
'server-id' : 2
'relay-log' : '/var/lib/mysql/replication/mysql-relay-bin'
'relay-log-index' : '/var/lib/mysql/replication/mysql-relay-bin.index'
'master-info-file' : '/var/lib/mysql/replication/master.info'
'relay-log-info-file' : '/var/lib/mysql/replication/relay-log.info'
'log-slave-updates' : ''
'replicate-same-server-id' : '0'
'slave-skip-errors' : '1062'
| true | normalize = require 'masson/lib/config/normalize'
describe 'MariaDB - Configuration tests', ->
it 'default properties', () ->
normalize
nodes: 'my_local_lxd_container': ip: '10.0.0.1', tags: 'type': 'test_container'
clusters: 'cluster_test':
services: 'service_mariadb':
module: './src/server'
affinity: type: 'nodes', values: 'my_local_lxd_container'
options: admin_password: "PI:PASSWORD:<PASSWORD>END_PI", ssl: enabled: false
.clusters.cluster_test.services.service_mariadb.instances[0].options.should.eql
admin_password: 'PI:PASSWORD:<PASSWORD>END_PI',
ssl: enabled: false
sql_on_install: [],
current_password: '',
admin_username: 'root',
remove_anonymous: true,
disallow_remote_root_login: false,
remove_test_db: true,
reload_privileges: true,
fqdn: 'my_local_lxd_container',
iptables: undefined,
group: name: 'mysql'
user: name: 'mysql', home: '/var/lib/mysql', gid: 'mysql'
name: 'mariadb-server',
srv_name: 'mariadb',
chk_name: 'mariadb',
my_cnf:
mysqld:
general_log: 'OFF',
general_log_file: '/var/lib/mysql/log/log-general.log',
'log-bin': '/var/lib/mysql/log/bin',
binlog_format: 'mixed',
port: '3306',
'bind-address': '0.0.0.0',
'pid-file': '/var/run/mariadb/mysql.pid',
socket: '/var/lib/mysql/mysql.sock',
datadir: '/var/lib/mysql/data',
user: 'mysql',
event_scheduler: 'ON',
'character-set-server': 'latin1',
'collation-server': 'latin1_swedish_ci',
'skip-external-locking': '',
key_buffer_size: '384M',
max_allowed_packet: '1M',
table_open_cache: '512',
sort_buffer_size: '2M',
read_buffer_size: '2M',
read_rnd_buffer_size: '8M',
myisam_sort_buffer_size: '64M',
thread_cache_size: '8',
query_cache_size: '32M',
'secure-auth': '',
'secure-file-priv': '/var/lib/mysql/upload',
max_connections: '100',
max_user_connections: '50',
'log-error': '/var/log/mysqld/error.log',
slow_query_log_file: '/var/lib/mysql/log/slow-queries.log',
long_query_time: '4',
expire_logs_days: '7',
innodb_file_per_table: '',
innodb_data_home_dir: '/var/lib/mysql/data',
innodb_data_file_path: 'ibdata1:10M:autoextend',
innodb_log_group_home_dir: '/var/lib/mysql/log',
innodb_buffer_pool_size: '384M',
innodb_log_file_size: '100M',
innodb_log_buffer_size: '8M',
innodb_flush_log_at_trx_commit: '1',
innodb_lock_wait_timeout: '50'
mysqldump: quick: '', max_allowed_packet: '16M'
mysql: 'no-auto-rehash': ''
myisamchk: key_buffer_size: '256M', sort_buffer_size: '256M', read_buffer: '2M', write_buffer: '2M'
mysqlhotcopy: 'interactive-timeout': ''
client: socket: '/var/lib/mysql/mysql.sock'
mysqld_safe: 'pid-file': '/var/run/mariadb/mysql.pid'
ha_enabled: false,
journal_log_dir: '/var/lib/mysql/log',
repo: source: null, target: '/etc/yum.repos.d/mariadb.repo', replace: 'mariadb*'
wait_tcp: fqdn: 'my_local_lxd_container', port: '3306'
it 'ssl properties', () ->
config = normalize
nodes: 'my_local_lxd_container': ip: '10.0.0.1', tags: 'type': 'test_container'
clusters: 'cluster_test':
services: 'service_mariadb':
module: './src/server'
affinity: type: 'nodes', values: 'my_local_lxd_container'
options:
admin_password: "PI:PASSWORD:<PASSWORD>END_PI"
ssl:
enabled: true,
cacert: source: "/etc/mariadb/ca.pem"
cert: source: "/etc/mariadb/cert.pem"
key: source: "/etc/mariadb/key.pem"
# Retrieving the service_mariadb object
{ssl, my_cnf} = config.clusters.cluster_test.services.service_mariadb.instances[0].options
ssl.should.eql
enabled: true,
cacert: source: '/etc/mariadb/ca.pem', local:false
cert: source: '/etc/mariadb/cert.pem', local:false
key: source: '/etc/mariadb/key.pem', local:false
my_cnf.mysqld.should.containEql
'ssl-ca' : '/var/lib/mysql/tls/ca.pem'
'ssl-key' : '/var/lib/mysql/tls/key.pem'
'ssl-cert' : '/var/lib/mysql/tls/cert.pem'
it 'ha properties', () ->
config = normalize
nodes:
'my_local_lxd_container': ip: '10.0.0.1', tags: 'type': 'test_container'
'my_remote_lxd_container': ip: '10.0.0.2', tags: 'type': 'test_container'
clusters: 'cluster_test':
services: 'service_mariadb':
module: './src/server'
affinity: type: 'nodes', match: 'any', values: ['my_local_lxd_container', 'my_remote_lxd_container']
options:
admin_password: "PI:PASSWORD:<PASSWORD>END_PI"
ssl: enabled: false
repl_master: admin_password: 'PI:PASSWORD:<PASSWORD>END_PI', password: 'PI:PASSWORD:<PASSWORD>END_PI'
# Retrieving options for node 1
options = config.clusters.cluster_test.services.service_mariadb.instances[0].options
options.should.containEql
ha_enabled : true
replication_dir : '/var/lib/mysql/replication'
id : 1
{repl_master, my_cnf} = config.clusters.cluster_test.services.service_mariadb.instances[0].options
repl_master.should.eql
admin_password: 'PI:PASSWORD:<PASSWORD>END_PI',
password: 'PI:PASSWORD:<PASSWORD>END_PI',
fqdn: 'my_remote_lxd_container',
admin_username: 'root',
username: 'repl'
my_cnf.mysqld.should.containEql
'server-id' : 1
'relay-log' : '/var/lib/mysql/replication/mysql-relay-bin'
'relay-log-index' : '/var/lib/mysql/replication/mysql-relay-bin.index'
'master-info-file' : '/var/lib/mysql/replication/master.info'
'relay-log-info-file' : '/var/lib/mysql/replication/relay-log.info'
'log-slave-updates' : ''
'replicate-same-server-id' : '0'
'slave-skip-errors' : '1062'
# Retrieving options for node 2
options = config.clusters.cluster_test.services.service_mariadb.instances[1].options
options.should.containEql
ha_enabled : true
replication_dir : '/var/lib/mysql/replication'
id : 2
{repl_master, my_cnf} = config.clusters.cluster_test.services.service_mariadb.instances[1].options
repl_master.should.eql
admin_password: 'PI:PASSWORD:<PASSWORD>END_PI',
password: 'PI:PASSWORD:<PASSWORD>END_PI',
fqdn: 'my_local_lxd_container',
admin_username: 'root',
username: 'repl'
my_cnf.mysqld.should.containEql
'server-id' : 2
'relay-log' : '/var/lib/mysql/replication/mysql-relay-bin'
'relay-log-index' : '/var/lib/mysql/replication/mysql-relay-bin.index'
'master-info-file' : '/var/lib/mysql/replication/master.info'
'relay-log-info-file' : '/var/lib/mysql/replication/relay-log.info'
'log-slave-updates' : ''
'replicate-same-server-id' : '0'
'slave-skip-errors' : '1062'
|
[
{
"context": "roller 'myCtrl', ($scope)->\n $scope.firstName = \"John\"\n $scope.lastName = \"Doe\"\n $scope.$on 'form-ifr",
"end": 95,
"score": 0.9998448491096497,
"start": 91,
"tag": "NAME",
"value": "John"
},
{
"context": "\n $scope.firstName = \"John\"\n $scope.lastName = \"... | backend/web/js/controller/product/catalog.coffee | weratad/technc | 0 | app = angular.module 'myApp', []
app.controller 'myCtrl', ($scope)->
$scope.firstName = "John"
$scope.lastName = "Doe"
$scope.$on 'form-iframe', (e , message)->
$scope.messages.push(message);
return
$scope.message = () ->
$scop.$broadcast('from-parent', 'Sent from parent')
return
return
| 84900 | app = angular.module 'myApp', []
app.controller 'myCtrl', ($scope)->
$scope.firstName = "<NAME>"
$scope.lastName = "<NAME>"
$scope.$on 'form-iframe', (e , message)->
$scope.messages.push(message);
return
$scope.message = () ->
$scop.$broadcast('from-parent', 'Sent from parent')
return
return
| true | app = angular.module 'myApp', []
app.controller 'myCtrl', ($scope)->
$scope.firstName = "PI:NAME:<NAME>END_PI"
$scope.lastName = "PI:NAME:<NAME>END_PI"
$scope.$on 'form-iframe', (e , message)->
$scope.messages.push(message);
return
$scope.message = () ->
$scop.$broadcast('from-parent', 'Sent from parent')
return
return
|
[
{
"context": " user:\n _id: 'uuid'\n email: 'joe@example.com'\n givenName: 'joe'\n familyName:",
"end": 1655,
"score": 0.9999096989631653,
"start": 1640,
"tag": "EMAIL",
"value": "joe@example.com"
},
{
"context": " email: 'joe@example.com'\n ... | test/unit/oidc/sendVerificationEmail.coffee | LorianeE/connect | 331 | chai = require 'chai'
sinon = require 'sinon'
sinonChai = require 'sinon-chai'
expect = chai.expect
chai.use sinonChai
chai.should()
mailer = require '../../../boot/mailer'
fakeMailer =
sendMail: (tmpl, loc, opts, cb) ->
cb()
{sendVerificationEmail} = require '../../../oidc'
OneTimeToken = require '../../../models/OneTimeToken'
describe 'Send Verification Email', ->
before ->
sinon.stub(mailer, 'getMailer').returns(fakeMailer)
after ->
mailer.getMailer.restore()
{req,res,next} = {}
describe 'when not requested', ->
before ->
req =
provider:
emailVerification:
enable: false
sendVerificationEmail: false
res = {}
next = sinon.spy()
sinon.spy(OneTimeToken, 'issue')
sinon.spy(fakeMailer, 'sendMail')
sendVerificationEmail req, res, next
after ->
OneTimeToken.issue.restore()
fakeMailer.sendMail.restore()
it 'should continue', ->
next.should.have.been.called
it 'should not issue a OneTimeToken', ->
OneTimeToken.issue.should.not.have.been.called
it 'should not send an email', ->
fakeMailer.sendMail.should.not.have.been.called
describe 'when requested', ->
before ->
req =
connectParams:
redirect_uri: 'https://example.com/callback'
client_id: 'client-uuid'
response_type: 'id_token token'
scope: 'openid profile'
provider:
emailVerification:
enable: true
sendVerificationEmail: true
user:
_id: 'uuid'
email: 'joe@example.com'
givenName: 'joe'
familyName: 'johnson'
res = {}
next = sinon.spy()
sinon.stub(OneTimeToken, 'issue')
.callsArgWith(1, null, new OneTimeToken {
sub: req.user._id
ttl: 3600 * 24 * 7
use: 'emailVerification'
})
sinon.stub(fakeMailer, 'sendMail').callsArgWith 3, null, null
sendVerificationEmail req, res, next
after ->
OneTimeToken.issue.restore()
fakeMailer.sendMail.restore()
it 'should issue a token to the user', ->
OneTimeToken.issue.should.have.been.calledWith sinon.match({
sub: req.user._id
})
it 'should issue an expiring token', ->
OneTimeToken.issue.should.have.been.calledWith sinon.match({
ttl: sinon.match.number
})
it 'should issue a token for email verification', ->
OneTimeToken.issue.should.have.been.calledWith sinon.match({
use: 'emailVerification'
})
it 'should send to the user', ->
fakeMailer.sendMail.should.have.been
.calledWith 'verifyEmail', sinon.match.object, sinon.match({
to: req.user.email
})
it 'should provide a subject', ->
fakeMailer.sendMail.should.have.been
.calledWith 'verifyEmail', sinon.match.object, sinon.match({
subject: sinon.match.string
})
it 'should render with the user email', ->
fakeMailer.sendMail.should.have.been
.calledWith 'verifyEmail', sinon.match({
email: req.user.email
})
it 'should render with the user given name', ->
fakeMailer.sendMail.should.have.been
.calledWith 'verifyEmail', sinon.match({
name: {
first: req.user.givenName
}
})
it 'should render with the user family name', ->
fakeMailer.sendMail.should.have.been
.calledWith 'verifyEmail', sinon.match({
name: {
last: req.user.familyName
}
})
it 'should render with the verification url', ->
fakeMailer.sendMail.should.have.been
.calledWith 'verifyEmail', sinon.match({
verifyURL: sinon.match.string
})
it 'should continue', ->
next.should.have.been.called
next.should.not.have.been.calledWith sinon.match.any
| 201438 | chai = require 'chai'
sinon = require 'sinon'
sinonChai = require 'sinon-chai'
expect = chai.expect
chai.use sinonChai
chai.should()
mailer = require '../../../boot/mailer'
fakeMailer =
sendMail: (tmpl, loc, opts, cb) ->
cb()
{sendVerificationEmail} = require '../../../oidc'
OneTimeToken = require '../../../models/OneTimeToken'
describe 'Send Verification Email', ->
before ->
sinon.stub(mailer, 'getMailer').returns(fakeMailer)
after ->
mailer.getMailer.restore()
{req,res,next} = {}
describe 'when not requested', ->
before ->
req =
provider:
emailVerification:
enable: false
sendVerificationEmail: false
res = {}
next = sinon.spy()
sinon.spy(OneTimeToken, 'issue')
sinon.spy(fakeMailer, 'sendMail')
sendVerificationEmail req, res, next
after ->
OneTimeToken.issue.restore()
fakeMailer.sendMail.restore()
it 'should continue', ->
next.should.have.been.called
it 'should not issue a OneTimeToken', ->
OneTimeToken.issue.should.not.have.been.called
it 'should not send an email', ->
fakeMailer.sendMail.should.not.have.been.called
describe 'when requested', ->
before ->
req =
connectParams:
redirect_uri: 'https://example.com/callback'
client_id: 'client-uuid'
response_type: 'id_token token'
scope: 'openid profile'
provider:
emailVerification:
enable: true
sendVerificationEmail: true
user:
_id: 'uuid'
email: '<EMAIL>'
givenName: '<NAME>'
familyName: '<NAME>'
res = {}
next = sinon.spy()
sinon.stub(OneTimeToken, 'issue')
.callsArgWith(1, null, new OneTimeToken {
sub: req.user._id
ttl: 3600 * 24 * 7
use: 'emailVerification'
})
sinon.stub(fakeMailer, 'sendMail').callsArgWith 3, null, null
sendVerificationEmail req, res, next
after ->
OneTimeToken.issue.restore()
fakeMailer.sendMail.restore()
it 'should issue a token to the user', ->
OneTimeToken.issue.should.have.been.calledWith sinon.match({
sub: req.user._id
})
it 'should issue an expiring token', ->
OneTimeToken.issue.should.have.been.calledWith sinon.match({
ttl: sinon.match.number
})
it 'should issue a token for email verification', ->
OneTimeToken.issue.should.have.been.calledWith sinon.match({
use: 'emailVerification'
})
it 'should send to the user', ->
fakeMailer.sendMail.should.have.been
.calledWith 'verifyEmail', sinon.match.object, sinon.match({
to: req.user.email
})
it 'should provide a subject', ->
fakeMailer.sendMail.should.have.been
.calledWith 'verifyEmail', sinon.match.object, sinon.match({
subject: sinon.match.string
})
it 'should render with the user email', ->
fakeMailer.sendMail.should.have.been
.calledWith 'verifyEmail', sinon.match({
email: req.user.email
})
it 'should render with the user given name', ->
fakeMailer.sendMail.should.have.been
.calledWith 'verifyEmail', sinon.match({
name: {
first: req.user.givenName
}
})
it 'should render with the user family name', ->
fakeMailer.sendMail.should.have.been
.calledWith 'verifyEmail', sinon.match({
name: {
last: req.user.familyName
}
})
it 'should render with the verification url', ->
fakeMailer.sendMail.should.have.been
.calledWith 'verifyEmail', sinon.match({
verifyURL: sinon.match.string
})
it 'should continue', ->
next.should.have.been.called
next.should.not.have.been.calledWith sinon.match.any
| true | chai = require 'chai'
sinon = require 'sinon'
sinonChai = require 'sinon-chai'
expect = chai.expect
chai.use sinonChai
chai.should()
mailer = require '../../../boot/mailer'
fakeMailer =
sendMail: (tmpl, loc, opts, cb) ->
cb()
{sendVerificationEmail} = require '../../../oidc'
OneTimeToken = require '../../../models/OneTimeToken'
describe 'Send Verification Email', ->
before ->
sinon.stub(mailer, 'getMailer').returns(fakeMailer)
after ->
mailer.getMailer.restore()
{req,res,next} = {}
describe 'when not requested', ->
before ->
req =
provider:
emailVerification:
enable: false
sendVerificationEmail: false
res = {}
next = sinon.spy()
sinon.spy(OneTimeToken, 'issue')
sinon.spy(fakeMailer, 'sendMail')
sendVerificationEmail req, res, next
after ->
OneTimeToken.issue.restore()
fakeMailer.sendMail.restore()
it 'should continue', ->
next.should.have.been.called
it 'should not issue a OneTimeToken', ->
OneTimeToken.issue.should.not.have.been.called
it 'should not send an email', ->
fakeMailer.sendMail.should.not.have.been.called
describe 'when requested', ->
before ->
req =
connectParams:
redirect_uri: 'https://example.com/callback'
client_id: 'client-uuid'
response_type: 'id_token token'
scope: 'openid profile'
provider:
emailVerification:
enable: true
sendVerificationEmail: true
user:
_id: 'uuid'
email: 'PI:EMAIL:<EMAIL>END_PI'
givenName: 'PI:NAME:<NAME>END_PI'
familyName: 'PI:NAME:<NAME>END_PI'
res = {}
next = sinon.spy()
sinon.stub(OneTimeToken, 'issue')
.callsArgWith(1, null, new OneTimeToken {
sub: req.user._id
ttl: 3600 * 24 * 7
use: 'emailVerification'
})
sinon.stub(fakeMailer, 'sendMail').callsArgWith 3, null, null
sendVerificationEmail req, res, next
after ->
OneTimeToken.issue.restore()
fakeMailer.sendMail.restore()
it 'should issue a token to the user', ->
OneTimeToken.issue.should.have.been.calledWith sinon.match({
sub: req.user._id
})
it 'should issue an expiring token', ->
OneTimeToken.issue.should.have.been.calledWith sinon.match({
ttl: sinon.match.number
})
it 'should issue a token for email verification', ->
OneTimeToken.issue.should.have.been.calledWith sinon.match({
use: 'emailVerification'
})
it 'should send to the user', ->
fakeMailer.sendMail.should.have.been
.calledWith 'verifyEmail', sinon.match.object, sinon.match({
to: req.user.email
})
it 'should provide a subject', ->
fakeMailer.sendMail.should.have.been
.calledWith 'verifyEmail', sinon.match.object, sinon.match({
subject: sinon.match.string
})
it 'should render with the user email', ->
fakeMailer.sendMail.should.have.been
.calledWith 'verifyEmail', sinon.match({
email: req.user.email
})
it 'should render with the user given name', ->
fakeMailer.sendMail.should.have.been
.calledWith 'verifyEmail', sinon.match({
name: {
first: req.user.givenName
}
})
it 'should render with the user family name', ->
fakeMailer.sendMail.should.have.been
.calledWith 'verifyEmail', sinon.match({
name: {
last: req.user.familyName
}
})
it 'should render with the verification url', ->
fakeMailer.sendMail.should.have.been
.calledWith 'verifyEmail', sinon.match({
verifyURL: sinon.match.string
})
it 'should continue', ->
next.should.have.been.called
next.should.not.have.been.calledWith sinon.match.any
|
[
{
"context": "js.highlightBlock(el, \" \")\n\nBase64 =\n _keyStr: \"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=\"\n encode: (input) ->\n output = \"\"\n chr1 = u",
"end": 5320,
"score": 0.9996522068977356,
"start": 5254,
"tag": "KEY",
"value": "ABCDEFGHIJKLMNOP... | vendor/DocumentUp/src/browser/documentup.coffee | afeld/backbone-nested | 107 | class window.DocumentUp
# Base template that'll lie in the <body> of the page
@template = (locals)->
"""
<nav id="nav">
<header>
<a href="#" id="logo">#{locals.name}</a>
</header>
<ul id="sections">
</ul>
</nav>
<div id="content">
<div id="loader">
Loading documentation...
</div>
</div>
"""
# Decent defaults
@defaults =
color: "#369"
twitter: null
issues: true
travis: false
# Documentation method
@document = (@options)->
if "string" == typeof @options
repo = @options
@options =
repo: repo
if !@options or !@options.repo or !/\//.test(@options.repo)
throw new Error("Repository required with format: username/repository")
@options[key] = value for key, value of @defaults when !@options[key]
@options.name ||= @options.repo.replace(/.+\//, "")
# Prepare layout
$.domReady =>
$("title").text(@options.name)
$("body").html @template(@options)
$("head").append """
<style type="text/css">
a {color: #{@options.color}}
</style>
"""
$nav = $("#nav")
$nav.append """
<div id="github" class="extra">
<a href="https://github.com/#{@options.repo}">Source on Github</a>
</div>
"""
if @options.issues
$nav.append """
<div id="github-issues" class="extra">
<a href="https://github.com/#{@options.repo}/issues">Issues</a>
</div>
"""
if @options.travis
$nav.append """
<div id="travis" class="extra">
<a href="http://travis-ci.org/#{@options.repo}">
<img src="https://secure.travis-ci.org/#{@options.repo}.png">
</a>
</div>
"""
if @options.twitter
@options.twitter = [@options.twitter] unless @options.twitter instanceof Array
for twitter in @options.twitter
twitter = twitter.replace("@", "")
extra = $("<div class='extra twitter'>")
iframe = $('<iframe allowtransparency="true" frameborder="0" scrolling="no" style="width:162px; height:20px;">')
iframe.attr "src", "https://platform.twitter.com/widgets/follow_button.html?screen_name=#{twitter}&show_count=false"
extra.append(iframe)
$nav.append extra
@getReadme (err, @html)=>
return throw err if err
$.domReady =>
@renderContent()
@getReadme = (callback)->
using_cache = false
if html = localStorage.getItem(@options.repo + ":cached_content")
callback(null, html)
@usingCache = true
$.ajax
url: "https://api.github.com/repos/#{@options.repo}/git/trees/master?callback=?"
type: "jsonp"
success: (resp)=>
readme_sha = obj.sha for obj in resp.data.tree when /readme/i.test(obj.path)
last_sha = localStorage.getItem(@options.repo + ":readme_sha")
if readme_sha != last_sha
$.ajax
url: "https://api.github.com/repos/#{@options.repo}/git/blobs/#{readme_sha}?callback=?"
type: "jsonp"
success: (resp)=>
html = marked(Base64.decode(resp.data.content))
localStorage.setItem(@options.repo + ":cached_content", html)
localStorage.setItem(@options.repo + ":readme_sha", readme_sha)
return callback(null, html) unless @usingCache
$.domReady ->
# Show a link to tell the user to refresh his browser to get
# the latest version of the readme
refresh_link = $("<a id='refresh' href='#'>There's a new version of the documentation<br>Click here or refresh to see it.</a>")
$("body").append(refresh_link)
refresh_link.bind "click", (event)=>
event.preventDefault()
callback(null, html)
refresh_link.remove()
@renderContent = ->
# Populate HTML content
$content = $("#content")
$content.html @html
# Generate the navigation tree with the document structure
current_section = 0
current_subsection = 0
$sections = $("#sections")
$sections.empty()
$("h2, h3").each (el)->
if el.tagName == "H2"
current_subsection = 0
current_section++
el.id = section_id = "section-#{current_section}"
$sections.append """
<li id="for-#{section_id}">
<a href="##{section_id}">#{el.textContent}</a>
</li>
"""
else if el.tagName == "H3"
current_subsection++
el.id = section_id = "section-#{current_section}-#{current_subsection}"
$subsection = $("#for-section-#{current_section} ul")
unless $subsection.length
$("#for-section-#{current_section}").append("<ul></ul>")
$subsection = $("#for-section-#{current_section} ul")
$subsection.append """
<li id="for-#{section_id}">
<a href="##{section_id}">#{el.textContent}</a>
</li>
"""
# Highlight the code bits:
$("pre code").each (el)->
hljs.highlightBlock(el, " ")
Base64 =
_keyStr: "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/="
encode: (input) ->
output = ""
chr1 = undefined
chr2 = undefined
chr3 = undefined
enc1 = undefined
enc2 = undefined
enc3 = undefined
enc4 = undefined
i = 0
input = Base64._utf8_encode(input)
while i < input.length
chr1 = input.charCodeAt(i++)
chr2 = input.charCodeAt(i++)
chr3 = input.charCodeAt(i++)
enc1 = chr1 >> 2
enc2 = ((chr1 & 3) << 4) | (chr2 >> 4)
enc3 = ((chr2 & 15) << 2) | (chr3 >> 6)
enc4 = chr3 & 63
if isNaN(chr2)
enc3 = enc4 = 64
else enc4 = 64 if isNaN(chr3)
output = output + @_keyStr.charAt(enc1) + @_keyStr.charAt(enc2) + @_keyStr.charAt(enc3) + @_keyStr.charAt(enc4)
output
decode: (input) ->
output = ""
chr1 = undefined
chr2 = undefined
chr3 = undefined
enc1 = undefined
enc2 = undefined
enc3 = undefined
enc4 = undefined
i = 0
input = input.replace(/[^A-Za-z0-9\+\/\=]/g, "")
while i < input.length
enc1 = @_keyStr.indexOf(input.charAt(i++))
enc2 = @_keyStr.indexOf(input.charAt(i++))
enc3 = @_keyStr.indexOf(input.charAt(i++))
enc4 = @_keyStr.indexOf(input.charAt(i++))
chr1 = (enc1 << 2) | (enc2 >> 4)
chr2 = ((enc2 & 15) << 4) | (enc3 >> 2)
chr3 = ((enc3 & 3) << 6) | enc4
output = output + String.fromCharCode(chr1)
output = output + String.fromCharCode(chr2) unless enc3 is 64
output = output + String.fromCharCode(chr3) unless enc4 is 64
output = Base64._utf8_decode(output)
output
_utf8_encode: (string) ->
string = string.replace(/\r\n/g, "\n")
utftext = ""
n = 0
while n < string.length
c = string.charCodeAt(n)
if c < 128
utftext += String.fromCharCode(c)
else if (c > 127) and (c < 2048)
utftext += String.fromCharCode((c >> 6) | 192)
utftext += String.fromCharCode((c & 63) | 128)
else
utftext += String.fromCharCode((c >> 12) | 224)
utftext += String.fromCharCode(((c >> 6) & 63) | 128)
utftext += String.fromCharCode((c & 63) | 128)
n++
utftext
_utf8_decode: (utftext) ->
string = ""
i = 0
c = c1 = c2 = 0
while i < utftext.length
c = utftext.charCodeAt(i)
if c < 128
string += String.fromCharCode(c)
i++
else if (c > 191) and (c < 224)
c2 = utftext.charCodeAt(i + 1)
string += String.fromCharCode(((c & 31) << 6) | (c2 & 63))
i += 2
else
c2 = utftext.charCodeAt(i + 1)
c3 = utftext.charCodeAt(i + 2)
string += String.fromCharCode(((c & 15) << 12) | ((c2 & 63) << 6) | (c3 & 63))
i += 3
string
# decode64 = (input) ->
# output = ""
# chr1 = undefined
# chr2 = undefined
# chr3 = ""
# enc1 = undefined
# enc2 = undefined
# enc3 = undefined
# enc4 = ""
# i = 0
# input = input.replace(/[^A-Za-z0-9\+\/\=]/g, "")
# loop
# enc1 = keyStr.indexOf(input.charAt(i++))
# enc2 = keyStr.indexOf(input.charAt(i++))
# enc3 = keyStr.indexOf(input.charAt(i++))
# enc4 = keyStr.indexOf(input.charAt(i++))
# chr1 = (enc1 << 2) | (enc2 >> 4)
# chr2 = ((enc2 & 15) << 4) | (enc3 >> 2)
# chr3 = ((enc3 & 3) << 6) | enc4
# output = output + String.fromCharCode(chr1)
# output = output + String.fromCharCode(chr2) unless enc3 is 64
# output = output + String.fromCharCode(chr3) unless enc4 is 64
# chr1 = chr2 = chr3 = ""
# enc1 = enc2 = enc3 = enc4 = ""
# break unless i < input.length
# unescape output
# keyStr = "ABCDEFGHIJKLMNOP" + "QRSTUVWXYZabcdef" + "ghijklmnopqrstuv" + "wxyz0123456789+/" + "="
| 134753 | class window.DocumentUp
# Base template that'll lie in the <body> of the page
@template = (locals)->
"""
<nav id="nav">
<header>
<a href="#" id="logo">#{locals.name}</a>
</header>
<ul id="sections">
</ul>
</nav>
<div id="content">
<div id="loader">
Loading documentation...
</div>
</div>
"""
# Decent defaults
@defaults =
color: "#369"
twitter: null
issues: true
travis: false
# Documentation method
@document = (@options)->
if "string" == typeof @options
repo = @options
@options =
repo: repo
if !@options or !@options.repo or !/\//.test(@options.repo)
throw new Error("Repository required with format: username/repository")
@options[key] = value for key, value of @defaults when !@options[key]
@options.name ||= @options.repo.replace(/.+\//, "")
# Prepare layout
$.domReady =>
$("title").text(@options.name)
$("body").html @template(@options)
$("head").append """
<style type="text/css">
a {color: #{@options.color}}
</style>
"""
$nav = $("#nav")
$nav.append """
<div id="github" class="extra">
<a href="https://github.com/#{@options.repo}">Source on Github</a>
</div>
"""
if @options.issues
$nav.append """
<div id="github-issues" class="extra">
<a href="https://github.com/#{@options.repo}/issues">Issues</a>
</div>
"""
if @options.travis
$nav.append """
<div id="travis" class="extra">
<a href="http://travis-ci.org/#{@options.repo}">
<img src="https://secure.travis-ci.org/#{@options.repo}.png">
</a>
</div>
"""
if @options.twitter
@options.twitter = [@options.twitter] unless @options.twitter instanceof Array
for twitter in @options.twitter
twitter = twitter.replace("@", "")
extra = $("<div class='extra twitter'>")
iframe = $('<iframe allowtransparency="true" frameborder="0" scrolling="no" style="width:162px; height:20px;">')
iframe.attr "src", "https://platform.twitter.com/widgets/follow_button.html?screen_name=#{twitter}&show_count=false"
extra.append(iframe)
$nav.append extra
@getReadme (err, @html)=>
return throw err if err
$.domReady =>
@renderContent()
@getReadme = (callback)->
using_cache = false
if html = localStorage.getItem(@options.repo + ":cached_content")
callback(null, html)
@usingCache = true
$.ajax
url: "https://api.github.com/repos/#{@options.repo}/git/trees/master?callback=?"
type: "jsonp"
success: (resp)=>
readme_sha = obj.sha for obj in resp.data.tree when /readme/i.test(obj.path)
last_sha = localStorage.getItem(@options.repo + ":readme_sha")
if readme_sha != last_sha
$.ajax
url: "https://api.github.com/repos/#{@options.repo}/git/blobs/#{readme_sha}?callback=?"
type: "jsonp"
success: (resp)=>
html = marked(Base64.decode(resp.data.content))
localStorage.setItem(@options.repo + ":cached_content", html)
localStorage.setItem(@options.repo + ":readme_sha", readme_sha)
return callback(null, html) unless @usingCache
$.domReady ->
# Show a link to tell the user to refresh his browser to get
# the latest version of the readme
refresh_link = $("<a id='refresh' href='#'>There's a new version of the documentation<br>Click here or refresh to see it.</a>")
$("body").append(refresh_link)
refresh_link.bind "click", (event)=>
event.preventDefault()
callback(null, html)
refresh_link.remove()
@renderContent = ->
# Populate HTML content
$content = $("#content")
$content.html @html
# Generate the navigation tree with the document structure
current_section = 0
current_subsection = 0
$sections = $("#sections")
$sections.empty()
$("h2, h3").each (el)->
if el.tagName == "H2"
current_subsection = 0
current_section++
el.id = section_id = "section-#{current_section}"
$sections.append """
<li id="for-#{section_id}">
<a href="##{section_id}">#{el.textContent}</a>
</li>
"""
else if el.tagName == "H3"
current_subsection++
el.id = section_id = "section-#{current_section}-#{current_subsection}"
$subsection = $("#for-section-#{current_section} ul")
unless $subsection.length
$("#for-section-#{current_section}").append("<ul></ul>")
$subsection = $("#for-section-#{current_section} ul")
$subsection.append """
<li id="for-#{section_id}">
<a href="##{section_id}">#{el.textContent}</a>
</li>
"""
# Highlight the code bits:
$("pre code").each (el)->
hljs.highlightBlock(el, " ")
Base64 =
_keyStr: "<KEY>
encode: (input) ->
output = ""
chr1 = undefined
chr2 = undefined
chr3 = undefined
enc1 = undefined
enc2 = undefined
enc3 = undefined
enc4 = undefined
i = 0
input = Base64._utf8_encode(input)
while i < input.length
chr1 = input.charCodeAt(i++)
chr2 = input.charCodeAt(i++)
chr3 = input.charCodeAt(i++)
enc1 = chr1 >> 2
enc2 = ((chr1 & 3) << 4) | (chr2 >> 4)
enc3 = ((chr2 & 15) << 2) | (chr3 >> 6)
enc4 = chr3 & 63
if isNaN(chr2)
enc3 = enc4 = 64
else enc4 = 64 if isNaN(chr3)
output = output + @_keyStr.charAt(enc1) + @_keyStr.charAt(enc2) + @_keyStr.charAt(enc3) + @_keyStr.charAt(enc4)
output
decode: (input) ->
output = ""
chr1 = undefined
chr2 = undefined
chr3 = undefined
enc1 = undefined
enc2 = undefined
enc3 = undefined
enc4 = undefined
i = 0
input = input.replace(/[^A-Za-z0-9\+\/\=]/g, "")
while i < input.length
enc1 = @_keyStr.indexOf(input.charAt(i++))
enc2 = @_keyStr.indexOf(input.charAt(i++))
enc3 = @_keyStr.indexOf(input.charAt(i++))
enc4 = @_keyStr.indexOf(input.charAt(i++))
chr1 = (enc1 << 2) | (enc2 >> 4)
chr2 = ((enc2 & 15) << 4) | (enc3 >> 2)
chr3 = ((enc3 & 3) << 6) | enc4
output = output + String.fromCharCode(chr1)
output = output + String.fromCharCode(chr2) unless enc3 is 64
output = output + String.fromCharCode(chr3) unless enc4 is 64
output = Base64._utf8_decode(output)
output
_utf8_encode: (string) ->
string = string.replace(/\r\n/g, "\n")
utftext = ""
n = 0
while n < string.length
c = string.charCodeAt(n)
if c < 128
utftext += String.fromCharCode(c)
else if (c > 127) and (c < 2048)
utftext += String.fromCharCode((c >> 6) | 192)
utftext += String.fromCharCode((c & 63) | 128)
else
utftext += String.fromCharCode((c >> 12) | 224)
utftext += String.fromCharCode(((c >> 6) & 63) | 128)
utftext += String.fromCharCode((c & 63) | 128)
n++
utftext
_utf8_decode: (utftext) ->
string = ""
i = 0
c = c1 = c2 = 0
while i < utftext.length
c = utftext.charCodeAt(i)
if c < 128
string += String.fromCharCode(c)
i++
else if (c > 191) and (c < 224)
c2 = utftext.charCodeAt(i + 1)
string += String.fromCharCode(((c & 31) << 6) | (c2 & 63))
i += 2
else
c2 = utftext.charCodeAt(i + 1)
c3 = utftext.charCodeAt(i + 2)
string += String.fromCharCode(((c & 15) << 12) | ((c2 & 63) << 6) | (c3 & 63))
i += 3
string
# decode64 = (input) ->
# output = ""
# chr1 = undefined
# chr2 = undefined
# chr3 = ""
# enc1 = undefined
# enc2 = undefined
# enc3 = undefined
# enc4 = ""
# i = 0
# input = input.replace(/[^A-Za-z0-9\+\/\=]/g, "")
# loop
# enc1 = keyStr.indexOf(input.charAt(i++))
# enc2 = keyStr.indexOf(input.charAt(i++))
# enc3 = keyStr.indexOf(input.charAt(i++))
# enc4 = keyStr.indexOf(input.charAt(i++))
# chr1 = (enc1 << 2) | (enc2 >> 4)
# chr2 = ((enc2 & 15) << 4) | (enc3 >> 2)
# chr3 = ((enc3 & 3) << 6) | enc4
# output = output + String.fromCharCode(chr1)
# output = output + String.fromCharCode(chr2) unless enc3 is 64
# output = output + String.fromCharCode(chr3) unless enc4 is 64
# chr1 = chr2 = chr3 = ""
# enc1 = enc2 = enc3 = enc4 = ""
# break unless i < input.length
# unescape output
# keyStr = <KEY> <KEY>
| true | class window.DocumentUp
# Base template that'll lie in the <body> of the page
@template = (locals)->
"""
<nav id="nav">
<header>
<a href="#" id="logo">#{locals.name}</a>
</header>
<ul id="sections">
</ul>
</nav>
<div id="content">
<div id="loader">
Loading documentation...
</div>
</div>
"""
# Decent defaults
@defaults =
color: "#369"
twitter: null
issues: true
travis: false
# Documentation method
@document = (@options)->
if "string" == typeof @options
repo = @options
@options =
repo: repo
if !@options or !@options.repo or !/\//.test(@options.repo)
throw new Error("Repository required with format: username/repository")
@options[key] = value for key, value of @defaults when !@options[key]
@options.name ||= @options.repo.replace(/.+\//, "")
# Prepare layout
$.domReady =>
$("title").text(@options.name)
$("body").html @template(@options)
$("head").append """
<style type="text/css">
a {color: #{@options.color}}
</style>
"""
$nav = $("#nav")
$nav.append """
<div id="github" class="extra">
<a href="https://github.com/#{@options.repo}">Source on Github</a>
</div>
"""
if @options.issues
$nav.append """
<div id="github-issues" class="extra">
<a href="https://github.com/#{@options.repo}/issues">Issues</a>
</div>
"""
if @options.travis
$nav.append """
<div id="travis" class="extra">
<a href="http://travis-ci.org/#{@options.repo}">
<img src="https://secure.travis-ci.org/#{@options.repo}.png">
</a>
</div>
"""
if @options.twitter
@options.twitter = [@options.twitter] unless @options.twitter instanceof Array
for twitter in @options.twitter
twitter = twitter.replace("@", "")
extra = $("<div class='extra twitter'>")
iframe = $('<iframe allowtransparency="true" frameborder="0" scrolling="no" style="width:162px; height:20px;">')
iframe.attr "src", "https://platform.twitter.com/widgets/follow_button.html?screen_name=#{twitter}&show_count=false"
extra.append(iframe)
$nav.append extra
@getReadme (err, @html)=>
return throw err if err
$.domReady =>
@renderContent()
@getReadme = (callback)->
using_cache = false
if html = localStorage.getItem(@options.repo + ":cached_content")
callback(null, html)
@usingCache = true
$.ajax
url: "https://api.github.com/repos/#{@options.repo}/git/trees/master?callback=?"
type: "jsonp"
success: (resp)=>
readme_sha = obj.sha for obj in resp.data.tree when /readme/i.test(obj.path)
last_sha = localStorage.getItem(@options.repo + ":readme_sha")
if readme_sha != last_sha
$.ajax
url: "https://api.github.com/repos/#{@options.repo}/git/blobs/#{readme_sha}?callback=?"
type: "jsonp"
success: (resp)=>
html = marked(Base64.decode(resp.data.content))
localStorage.setItem(@options.repo + ":cached_content", html)
localStorage.setItem(@options.repo + ":readme_sha", readme_sha)
return callback(null, html) unless @usingCache
$.domReady ->
# Show a link to tell the user to refresh his browser to get
# the latest version of the readme
refresh_link = $("<a id='refresh' href='#'>There's a new version of the documentation<br>Click here or refresh to see it.</a>")
$("body").append(refresh_link)
refresh_link.bind "click", (event)=>
event.preventDefault()
callback(null, html)
refresh_link.remove()
@renderContent = ->
# Populate HTML content
$content = $("#content")
$content.html @html
# Generate the navigation tree with the document structure
current_section = 0
current_subsection = 0
$sections = $("#sections")
$sections.empty()
$("h2, h3").each (el)->
if el.tagName == "H2"
current_subsection = 0
current_section++
el.id = section_id = "section-#{current_section}"
$sections.append """
<li id="for-#{section_id}">
<a href="##{section_id}">#{el.textContent}</a>
</li>
"""
else if el.tagName == "H3"
current_subsection++
el.id = section_id = "section-#{current_section}-#{current_subsection}"
$subsection = $("#for-section-#{current_section} ul")
unless $subsection.length
$("#for-section-#{current_section}").append("<ul></ul>")
$subsection = $("#for-section-#{current_section} ul")
$subsection.append """
<li id="for-#{section_id}">
<a href="##{section_id}">#{el.textContent}</a>
</li>
"""
# Highlight the code bits:
$("pre code").each (el)->
hljs.highlightBlock(el, " ")
Base64 =
_keyStr: "PI:KEY:<KEY>END_PI
encode: (input) ->
output = ""
chr1 = undefined
chr2 = undefined
chr3 = undefined
enc1 = undefined
enc2 = undefined
enc3 = undefined
enc4 = undefined
i = 0
input = Base64._utf8_encode(input)
while i < input.length
chr1 = input.charCodeAt(i++)
chr2 = input.charCodeAt(i++)
chr3 = input.charCodeAt(i++)
enc1 = chr1 >> 2
enc2 = ((chr1 & 3) << 4) | (chr2 >> 4)
enc3 = ((chr2 & 15) << 2) | (chr3 >> 6)
enc4 = chr3 & 63
if isNaN(chr2)
enc3 = enc4 = 64
else enc4 = 64 if isNaN(chr3)
output = output + @_keyStr.charAt(enc1) + @_keyStr.charAt(enc2) + @_keyStr.charAt(enc3) + @_keyStr.charAt(enc4)
output
decode: (input) ->
output = ""
chr1 = undefined
chr2 = undefined
chr3 = undefined
enc1 = undefined
enc2 = undefined
enc3 = undefined
enc4 = undefined
i = 0
input = input.replace(/[^A-Za-z0-9\+\/\=]/g, "")
while i < input.length
enc1 = @_keyStr.indexOf(input.charAt(i++))
enc2 = @_keyStr.indexOf(input.charAt(i++))
enc3 = @_keyStr.indexOf(input.charAt(i++))
enc4 = @_keyStr.indexOf(input.charAt(i++))
chr1 = (enc1 << 2) | (enc2 >> 4)
chr2 = ((enc2 & 15) << 4) | (enc3 >> 2)
chr3 = ((enc3 & 3) << 6) | enc4
output = output + String.fromCharCode(chr1)
output = output + String.fromCharCode(chr2) unless enc3 is 64
output = output + String.fromCharCode(chr3) unless enc4 is 64
output = Base64._utf8_decode(output)
output
_utf8_encode: (string) ->
string = string.replace(/\r\n/g, "\n")
utftext = ""
n = 0
while n < string.length
c = string.charCodeAt(n)
if c < 128
utftext += String.fromCharCode(c)
else if (c > 127) and (c < 2048)
utftext += String.fromCharCode((c >> 6) | 192)
utftext += String.fromCharCode((c & 63) | 128)
else
utftext += String.fromCharCode((c >> 12) | 224)
utftext += String.fromCharCode(((c >> 6) & 63) | 128)
utftext += String.fromCharCode((c & 63) | 128)
n++
utftext
_utf8_decode: (utftext) ->
string = ""
i = 0
c = c1 = c2 = 0
while i < utftext.length
c = utftext.charCodeAt(i)
if c < 128
string += String.fromCharCode(c)
i++
else if (c > 191) and (c < 224)
c2 = utftext.charCodeAt(i + 1)
string += String.fromCharCode(((c & 31) << 6) | (c2 & 63))
i += 2
else
c2 = utftext.charCodeAt(i + 1)
c3 = utftext.charCodeAt(i + 2)
string += String.fromCharCode(((c & 15) << 12) | ((c2 & 63) << 6) | (c3 & 63))
i += 3
string
# decode64 = (input) ->
# output = ""
# chr1 = undefined
# chr2 = undefined
# chr3 = ""
# enc1 = undefined
# enc2 = undefined
# enc3 = undefined
# enc4 = ""
# i = 0
# input = input.replace(/[^A-Za-z0-9\+\/\=]/g, "")
# loop
# enc1 = keyStr.indexOf(input.charAt(i++))
# enc2 = keyStr.indexOf(input.charAt(i++))
# enc3 = keyStr.indexOf(input.charAt(i++))
# enc4 = keyStr.indexOf(input.charAt(i++))
# chr1 = (enc1 << 2) | (enc2 >> 4)
# chr2 = ((enc2 & 15) << 4) | (enc3 >> 2)
# chr3 = ((enc3 & 3) << 6) | enc4
# output = output + String.fromCharCode(chr1)
# output = output + String.fromCharCode(chr2) unless enc3 is 64
# output = output + String.fromCharCode(chr3) unless enc4 is 64
# chr1 = chr2 = chr3 = ""
# enc1 = enc2 = enc3 = enc4 = ""
# break unless i < input.length
# unescape output
# keyStr = PI:KEY:<KEY>END_PI PI:KEY:<KEY>END_PI
|
[
{
"context": "graph\n @transactions = []\n\n genKey: (revId) -> \"#{@graph.properties.id}_#{revId}\"\n\n putTransaction:",
"end": 177,
"score": 0.954387366771698,
"start": 174,
"tag": "KEY",
"value": "\"#{"
},
{
"context": "s = []\n\n genKey: (revId) -> \"#{@graph.properties.id... | src/JournalStore.coffee | J-Zeitler/spwebapiflow-ui | 1 | noflo = require 'noflo'
class IDBJournalStore extends noflo.journal.JournalStore
constructor: (graph, @db) ->
super graph
@transactions = []
genKey: (revId) -> "#{@graph.properties.id}_#{revId}"
putTransaction: (revId, entries) ->
super revId, entries
trans = @db.transaction ['journals'], 'readwrite'
store = trans.objectStore 'journals'
# We're using add for writing, which will correctly fail if revId alreadyn exists
# for the graph
req = store.add
id: @genKey revId
graph: @graph.properties.id
revId: revId
entries: entries
@transactions[revId] = entries
super revId, entries
fetchTransaction: (revId) ->
return @transactions[revId]
init: (cb) ->
trans = @db.transaction ['journals']
store = trans.objectStore 'journals'
idx = store.index 'graph'
idx.openCursor().onsuccess = (event) =>
cursor = event.target.result
return cb() unless cursor
@transactions[cursor.value.revId] = cursor.value.entries
@lastRevision = cursor.value.revId if cursor.value.revId > @lastRevision
do cursor.continue
exports.IDBJournalStore = IDBJournalStore
| 134215 | noflo = require 'noflo'
class IDBJournalStore extends noflo.journal.JournalStore
constructor: (graph, @db) ->
super graph
@transactions = []
genKey: (revId) -> <KEY>@graph.properties.<KEY>
putTransaction: (revId, entries) ->
super revId, entries
trans = @db.transaction ['journals'], 'readwrite'
store = trans.objectStore 'journals'
# We're using add for writing, which will correctly fail if revId alreadyn exists
# for the graph
req = store.add
id: @genKey revId
graph: @graph.properties.id
revId: revId
entries: entries
@transactions[revId] = entries
super revId, entries
fetchTransaction: (revId) ->
return @transactions[revId]
init: (cb) ->
trans = @db.transaction ['journals']
store = trans.objectStore 'journals'
idx = store.index 'graph'
idx.openCursor().onsuccess = (event) =>
cursor = event.target.result
return cb() unless cursor
@transactions[cursor.value.revId] = cursor.value.entries
@lastRevision = cursor.value.revId if cursor.value.revId > @lastRevision
do cursor.continue
exports.IDBJournalStore = IDBJournalStore
| true | noflo = require 'noflo'
class IDBJournalStore extends noflo.journal.JournalStore
constructor: (graph, @db) ->
super graph
@transactions = []
genKey: (revId) -> PI:KEY:<KEY>END_PI@graph.properties.PI:KEY:<KEY>END_PI
putTransaction: (revId, entries) ->
super revId, entries
trans = @db.transaction ['journals'], 'readwrite'
store = trans.objectStore 'journals'
# We're using add for writing, which will correctly fail if revId alreadyn exists
# for the graph
req = store.add
id: @genKey revId
graph: @graph.properties.id
revId: revId
entries: entries
@transactions[revId] = entries
super revId, entries
fetchTransaction: (revId) ->
return @transactions[revId]
init: (cb) ->
trans = @db.transaction ['journals']
store = trans.objectStore 'journals'
idx = store.index 'graph'
idx.openCursor().onsuccess = (event) =>
cursor = event.target.result
return cb() unless cursor
@transactions[cursor.value.revId] = cursor.value.entries
@lastRevision = cursor.value.revId if cursor.value.revId > @lastRevision
do cursor.continue
exports.IDBJournalStore = IDBJournalStore
|
[
{
"context": "Print-Loop.\n# Based of CoffeeScript repl.coffee by Jeremy Ashkenas.\n\n# Open `stdin` and `stdout`\nstdin = process.ope",
"end": 92,
"score": 0.9998927712440491,
"start": 77,
"tag": "NAME",
"value": "Jeremy Ashkenas"
}
] | repl.coffee | tcr/syrup | 9 | # A very simple Read-Eval-Print-Loop.
# Based of CoffeeScript repl.coffee by Jeremy Ashkenas.
# Open `stdin` and `stdout`
stdin = process.openStdin()
stdout = process.stdout
# Require the **syrup** module to get access to the interpreter.
syrup = require './syrup'
readline = require 'readline'
{inspect} = require 'util'
{Script} = require 'vm'
Module = require 'module'
# REPL Setup
# Config
REPL_PROMPT = 'syrup> '
REPL_PROMPT_MULTILINE = '------> '
REPL_PROMPT_CONTINUATION = '.....> '
enableColours = no
unless process.platform is 'win32'
enableColours = not process.env.NODE_DISABLE_COLORS
# Log an error.
error = (err) ->
stdout.write (err.stack or err.toString()) + '\n'
# Make sure that uncaught exceptions don't kill the REPL.
process.on 'uncaughtException', error
# The current backlog of multi-line code.
backlog = ''
# Global context.
ctx = new syrup.DefaultContext
# The main REPL function. **run** is called every time a line of code is entered.
# Attempt to evaluate the command. If there's an exception, print it out instead
# of exiting.
run = (buffer) ->
buffer = buffer.replace /[\r\n]+$/, ""
if multilineMode
backlog += "#{buffer}\n"
repl.setPrompt REPL_PROMPT_CONTINUATION
repl.prompt()
return
if !buffer.toString().trim() and !backlog
repl.prompt()
return
code = backlog += buffer
if code[code.length - 1] is '\\'
backlog = "#{backlog[...-1]}\n"
repl.setPrompt REPL_PROMPT_CONTINUATION
repl.prompt()
return
repl.setPrompt REPL_PROMPT
backlog = ''
try
returnValue = syrup.eval code, ctx
repl.output.write "#{inspect returnValue, no, 2, enableColours}\n"
catch err
error err
repl.prompt()
if stdin.readable
# handle piped input
pipedInput = ''
repl =
prompt: -> stdout.write @_prompt
setPrompt: (p) -> @_prompt = p
input: stdin
output: stdout
on: ->
stdin.on 'data', (chunk) ->
pipedInput += chunk
stdin.on 'end', ->
for line in pipedInput.trim().split "\n"
stdout.write "#{line}\n"
run line
stdout.write '\n'
process.exit 0
else
# Create the REPL by listening to **stdin**.
repl = readline.createInterface stdin, stdout
multilineMode = off
# Handle multi-line mode switch
repl.input.on 'keypress', (char, key) ->
# test for Ctrl-v
return unless key and key.ctrl and not key.meta and not key.shift and key.name is 'v'
cursorPos = repl.cursor
repl.output.cursorTo 0
repl.output.clearLine 1
multilineMode = not multilineMode
backlog = ''
repl.setPrompt (newPrompt = if multilineMode then REPL_PROMPT_MULTILINE else REPL_PROMPT)
repl.prompt()
repl.output.cursorTo newPrompt.length + (repl.cursor = cursorPos)
# Handle Ctrl-d press at end of last line in multiline mode
repl.input.on 'keypress', (char, key) ->
return unless multilineMode and repl.line
# test for Ctrl-d
return unless key and key.ctrl and not key.meta and not key.shift and key.name is 'd'
multilineMode = off
repl._line()
repl.on 'attemptClose', ->
if multilineMode
multilineMode = off
repl.output.cursorTo 0
repl.output.clearLine 1
repl._onLine repl.line
return
if backlog
backlog = ''
repl.output.write '\n'
repl.setPrompt REPL_PROMPT
repl.prompt()
else
repl.close()
repl.on 'close', ->
repl.output.write '\n'
repl.input.destroy()
repl.on 'line', run
repl.setPrompt REPL_PROMPT
repl.prompt() | 214811 | # A very simple Read-Eval-Print-Loop.
# Based of CoffeeScript repl.coffee by <NAME>.
# Open `stdin` and `stdout`
stdin = process.openStdin()
stdout = process.stdout
# Require the **syrup** module to get access to the interpreter.
syrup = require './syrup'
readline = require 'readline'
{inspect} = require 'util'
{Script} = require 'vm'
Module = require 'module'
# REPL Setup
# Config
REPL_PROMPT = 'syrup> '
REPL_PROMPT_MULTILINE = '------> '
REPL_PROMPT_CONTINUATION = '.....> '
enableColours = no
unless process.platform is 'win32'
enableColours = not process.env.NODE_DISABLE_COLORS
# Log an error.
error = (err) ->
stdout.write (err.stack or err.toString()) + '\n'
# Make sure that uncaught exceptions don't kill the REPL.
process.on 'uncaughtException', error
# The current backlog of multi-line code.
backlog = ''
# Global context.
ctx = new syrup.DefaultContext
# The main REPL function. **run** is called every time a line of code is entered.
# Attempt to evaluate the command. If there's an exception, print it out instead
# of exiting.
run = (buffer) ->
buffer = buffer.replace /[\r\n]+$/, ""
if multilineMode
backlog += "#{buffer}\n"
repl.setPrompt REPL_PROMPT_CONTINUATION
repl.prompt()
return
if !buffer.toString().trim() and !backlog
repl.prompt()
return
code = backlog += buffer
if code[code.length - 1] is '\\'
backlog = "#{backlog[...-1]}\n"
repl.setPrompt REPL_PROMPT_CONTINUATION
repl.prompt()
return
repl.setPrompt REPL_PROMPT
backlog = ''
try
returnValue = syrup.eval code, ctx
repl.output.write "#{inspect returnValue, no, 2, enableColours}\n"
catch err
error err
repl.prompt()
if stdin.readable
# handle piped input
pipedInput = ''
repl =
prompt: -> stdout.write @_prompt
setPrompt: (p) -> @_prompt = p
input: stdin
output: stdout
on: ->
stdin.on 'data', (chunk) ->
pipedInput += chunk
stdin.on 'end', ->
for line in pipedInput.trim().split "\n"
stdout.write "#{line}\n"
run line
stdout.write '\n'
process.exit 0
else
# Create the REPL by listening to **stdin**.
repl = readline.createInterface stdin, stdout
multilineMode = off
# Handle multi-line mode switch
repl.input.on 'keypress', (char, key) ->
# test for Ctrl-v
return unless key and key.ctrl and not key.meta and not key.shift and key.name is 'v'
cursorPos = repl.cursor
repl.output.cursorTo 0
repl.output.clearLine 1
multilineMode = not multilineMode
backlog = ''
repl.setPrompt (newPrompt = if multilineMode then REPL_PROMPT_MULTILINE else REPL_PROMPT)
repl.prompt()
repl.output.cursorTo newPrompt.length + (repl.cursor = cursorPos)
# Handle Ctrl-d press at end of last line in multiline mode
repl.input.on 'keypress', (char, key) ->
return unless multilineMode and repl.line
# test for Ctrl-d
return unless key and key.ctrl and not key.meta and not key.shift and key.name is 'd'
multilineMode = off
repl._line()
repl.on 'attemptClose', ->
if multilineMode
multilineMode = off
repl.output.cursorTo 0
repl.output.clearLine 1
repl._onLine repl.line
return
if backlog
backlog = ''
repl.output.write '\n'
repl.setPrompt REPL_PROMPT
repl.prompt()
else
repl.close()
repl.on 'close', ->
repl.output.write '\n'
repl.input.destroy()
repl.on 'line', run
repl.setPrompt REPL_PROMPT
repl.prompt() | true | # A very simple Read-Eval-Print-Loop.
# Based of CoffeeScript repl.coffee by PI:NAME:<NAME>END_PI.
# Open `stdin` and `stdout`
stdin = process.openStdin()
stdout = process.stdout
# Require the **syrup** module to get access to the interpreter.
syrup = require './syrup'
readline = require 'readline'
{inspect} = require 'util'
{Script} = require 'vm'
Module = require 'module'
# REPL Setup
# Config
REPL_PROMPT = 'syrup> '
REPL_PROMPT_MULTILINE = '------> '
REPL_PROMPT_CONTINUATION = '.....> '
enableColours = no
unless process.platform is 'win32'
enableColours = not process.env.NODE_DISABLE_COLORS
# Log an error.
error = (err) ->
stdout.write (err.stack or err.toString()) + '\n'
# Make sure that uncaught exceptions don't kill the REPL.
process.on 'uncaughtException', error
# The current backlog of multi-line code.
backlog = ''
# Global context.
ctx = new syrup.DefaultContext
# The main REPL function. **run** is called every time a line of code is entered.
# Attempt to evaluate the command. If there's an exception, print it out instead
# of exiting.
run = (buffer) ->
buffer = buffer.replace /[\r\n]+$/, ""
if multilineMode
backlog += "#{buffer}\n"
repl.setPrompt REPL_PROMPT_CONTINUATION
repl.prompt()
return
if !buffer.toString().trim() and !backlog
repl.prompt()
return
code = backlog += buffer
if code[code.length - 1] is '\\'
backlog = "#{backlog[...-1]}\n"
repl.setPrompt REPL_PROMPT_CONTINUATION
repl.prompt()
return
repl.setPrompt REPL_PROMPT
backlog = ''
try
returnValue = syrup.eval code, ctx
repl.output.write "#{inspect returnValue, no, 2, enableColours}\n"
catch err
error err
repl.prompt()
if stdin.readable
# handle piped input
pipedInput = ''
repl =
prompt: -> stdout.write @_prompt
setPrompt: (p) -> @_prompt = p
input: stdin
output: stdout
on: ->
stdin.on 'data', (chunk) ->
pipedInput += chunk
stdin.on 'end', ->
for line in pipedInput.trim().split "\n"
stdout.write "#{line}\n"
run line
stdout.write '\n'
process.exit 0
else
# Create the REPL by listening to **stdin**.
repl = readline.createInterface stdin, stdout
multilineMode = off
# Handle multi-line mode switch
repl.input.on 'keypress', (char, key) ->
# test for Ctrl-v
return unless key and key.ctrl and not key.meta and not key.shift and key.name is 'v'
cursorPos = repl.cursor
repl.output.cursorTo 0
repl.output.clearLine 1
multilineMode = not multilineMode
backlog = ''
repl.setPrompt (newPrompt = if multilineMode then REPL_PROMPT_MULTILINE else REPL_PROMPT)
repl.prompt()
repl.output.cursorTo newPrompt.length + (repl.cursor = cursorPos)
# Handle Ctrl-d press at end of last line in multiline mode
repl.input.on 'keypress', (char, key) ->
return unless multilineMode and repl.line
# test for Ctrl-d
return unless key and key.ctrl and not key.meta and not key.shift and key.name is 'd'
multilineMode = off
repl._line()
repl.on 'attemptClose', ->
if multilineMode
multilineMode = off
repl.output.cursorTo 0
repl.output.clearLine 1
repl._onLine repl.line
return
if backlog
backlog = ''
repl.output.write '\n'
repl.setPrompt REPL_PROMPT
repl.prompt()
else
repl.close()
repl.on 'close', ->
repl.output.write '\n'
repl.input.destroy()
repl.on 'line', run
repl.setPrompt REPL_PROMPT
repl.prompt() |
[
{
"context": "anID = @product.planID\n stripe.token = e.token.id\n me.set 'stripe', stripe\n\n @listenToOnce me",
"end": 2683,
"score": 0.44367972016334534,
"start": 2681,
"tag": "PASSWORD",
"value": "id"
}
] | app/views/play/modal/SubscribeModal.coffee | FedericoTomas/codecombat | 1 | ModalView = require 'views/core/ModalView'
template = require 'templates/play/modal/subscribe-modal'
stripeHandler = require 'core/services/stripe'
utils = require 'core/utils'
AuthModal = require 'views/core/AuthModal'
module.exports = class SubscribeModal extends ModalView
id: 'subscribe-modal'
template: template
plain: true
closesOnClickOutside: false
product:
amount: 999
planID: 'basic'
subscriptions:
'stripe:received-token': 'onStripeReceivedToken'
events:
'click .purchase-button': 'onClickPurchaseButton'
'click #close-modal': 'hide'
constructor: (options) ->
super(options)
@state = 'standby'
getRenderData: ->
c = super()
c.state = @state
c.stateMessage = @stateMessage
#c.price = @product.amount / 100
c.price = 5.99 # Sale
c.BTest = me.getSubscribeCopyGroup() is 'new'
return c
afterRender: ->
super()
popoverTitle = $.i18n.t 'subscribe.parents_title'
popoverContent = "<p>" + $.i18n.t('subscribe.parents_blurb1') + "</p>"
popoverContent += "<p>" + $.i18n.t('subscribe.parents_blurb2') + "</p>"
popoverContent += "<p>" + $.i18n.t('subscribe.parents_blurb3') + "</p>"
popoverContent = popoverContent.replace /9[.,]99/g, '5.99' # Sale
window.popoverContent = popoverContent
@$el.find('#parents-info').popover(
animation: true
html: true
placement: 'top'
trigger: 'hover'
title: popoverTitle
content: popoverContent
container: @$el
).on 'shown.bs.popover', =>
application.tracker?.trackEvent 'Subscription parent hover', {}
application.tracker?.trackPageView "subscription/parent-hover", ['Google Analytics']
onClickPurchaseButton: (e) ->
@playSound 'menu-button-click'
return @openModalView new AuthModal() if me.get('anonymous')
application.tracker?.trackEvent 'Started subscription purchase', {}
application.tracker?.trackPageView "subscription/start-purchase", ['Google Analytics']
options = {
description: $.i18n.t('subscribe.stripe_description')
amount: @product.amount
}
# SALE LOGIC
# overwrite amount with sale price
# maybe also put in another description with details about how long it lasts, etc
# NOTE: Do not change this price without updating the context.price in getRenderData
options = {
description: 'Monthly Subscription (HoC sale)'
amount: 599
}
@purchasedAmount = options.amount
stripeHandler.open(options)
onStripeReceivedToken: (e) ->
@state = 'purchasing'
@render()
stripe = _.clone(me.get('stripe') ? {})
stripe.planID = @product.planID
stripe.token = e.token.id
me.set 'stripe', stripe
@listenToOnce me, 'sync', @onSubscriptionSuccess
@listenToOnce me, 'error', @onSubscriptionError
me.patch({headers: {'X-Change-Plan': 'true'}})
onSubscriptionSuccess: ->
application.tracker?.trackEvent 'Finished subscription purchase', revenue: @purchasedAmount / 100
application.tracker?.trackPageView "subscription/finish-purchase", ['Google Analytics']
Backbone.Mediator.publish 'subscribe-modal:subscribed', {}
@playSound 'victory'
@hide()
onSubscriptionError: (user, response, options) ->
console.error 'We got an error subscribing with Stripe from our server:', response
stripe = me.get('stripe') ? {}
delete stripe.token
delete stripe.planID
xhr = options.xhr
if xhr.status is 402
@state = 'declined'
else
@state = 'unknown_error'
@stateMessage = "#{xhr.status}: #{xhr.responseText}"
@render()
| 215279 | ModalView = require 'views/core/ModalView'
template = require 'templates/play/modal/subscribe-modal'
stripeHandler = require 'core/services/stripe'
utils = require 'core/utils'
AuthModal = require 'views/core/AuthModal'
module.exports = class SubscribeModal extends ModalView
id: 'subscribe-modal'
template: template
plain: true
closesOnClickOutside: false
product:
amount: 999
planID: 'basic'
subscriptions:
'stripe:received-token': 'onStripeReceivedToken'
events:
'click .purchase-button': 'onClickPurchaseButton'
'click #close-modal': 'hide'
constructor: (options) ->
super(options)
@state = 'standby'
getRenderData: ->
c = super()
c.state = @state
c.stateMessage = @stateMessage
#c.price = @product.amount / 100
c.price = 5.99 # Sale
c.BTest = me.getSubscribeCopyGroup() is 'new'
return c
afterRender: ->
super()
popoverTitle = $.i18n.t 'subscribe.parents_title'
popoverContent = "<p>" + $.i18n.t('subscribe.parents_blurb1') + "</p>"
popoverContent += "<p>" + $.i18n.t('subscribe.parents_blurb2') + "</p>"
popoverContent += "<p>" + $.i18n.t('subscribe.parents_blurb3') + "</p>"
popoverContent = popoverContent.replace /9[.,]99/g, '5.99' # Sale
window.popoverContent = popoverContent
@$el.find('#parents-info').popover(
animation: true
html: true
placement: 'top'
trigger: 'hover'
title: popoverTitle
content: popoverContent
container: @$el
).on 'shown.bs.popover', =>
application.tracker?.trackEvent 'Subscription parent hover', {}
application.tracker?.trackPageView "subscription/parent-hover", ['Google Analytics']
onClickPurchaseButton: (e) ->
@playSound 'menu-button-click'
return @openModalView new AuthModal() if me.get('anonymous')
application.tracker?.trackEvent 'Started subscription purchase', {}
application.tracker?.trackPageView "subscription/start-purchase", ['Google Analytics']
options = {
description: $.i18n.t('subscribe.stripe_description')
amount: @product.amount
}
# SALE LOGIC
# overwrite amount with sale price
# maybe also put in another description with details about how long it lasts, etc
# NOTE: Do not change this price without updating the context.price in getRenderData
options = {
description: 'Monthly Subscription (HoC sale)'
amount: 599
}
@purchasedAmount = options.amount
stripeHandler.open(options)
onStripeReceivedToken: (e) ->
@state = 'purchasing'
@render()
stripe = _.clone(me.get('stripe') ? {})
stripe.planID = @product.planID
stripe.token = e.token.<PASSWORD>
me.set 'stripe', stripe
@listenToOnce me, 'sync', @onSubscriptionSuccess
@listenToOnce me, 'error', @onSubscriptionError
me.patch({headers: {'X-Change-Plan': 'true'}})
onSubscriptionSuccess: ->
application.tracker?.trackEvent 'Finished subscription purchase', revenue: @purchasedAmount / 100
application.tracker?.trackPageView "subscription/finish-purchase", ['Google Analytics']
Backbone.Mediator.publish 'subscribe-modal:subscribed', {}
@playSound 'victory'
@hide()
onSubscriptionError: (user, response, options) ->
console.error 'We got an error subscribing with Stripe from our server:', response
stripe = me.get('stripe') ? {}
delete stripe.token
delete stripe.planID
xhr = options.xhr
if xhr.status is 402
@state = 'declined'
else
@state = 'unknown_error'
@stateMessage = "#{xhr.status}: #{xhr.responseText}"
@render()
| true | ModalView = require 'views/core/ModalView'
template = require 'templates/play/modal/subscribe-modal'
stripeHandler = require 'core/services/stripe'
utils = require 'core/utils'
AuthModal = require 'views/core/AuthModal'
module.exports = class SubscribeModal extends ModalView
id: 'subscribe-modal'
template: template
plain: true
closesOnClickOutside: false
product:
amount: 999
planID: 'basic'
subscriptions:
'stripe:received-token': 'onStripeReceivedToken'
events:
'click .purchase-button': 'onClickPurchaseButton'
'click #close-modal': 'hide'
constructor: (options) ->
super(options)
@state = 'standby'
getRenderData: ->
c = super()
c.state = @state
c.stateMessage = @stateMessage
#c.price = @product.amount / 100
c.price = 5.99 # Sale
c.BTest = me.getSubscribeCopyGroup() is 'new'
return c
afterRender: ->
super()
popoverTitle = $.i18n.t 'subscribe.parents_title'
popoverContent = "<p>" + $.i18n.t('subscribe.parents_blurb1') + "</p>"
popoverContent += "<p>" + $.i18n.t('subscribe.parents_blurb2') + "</p>"
popoverContent += "<p>" + $.i18n.t('subscribe.parents_blurb3') + "</p>"
popoverContent = popoverContent.replace /9[.,]99/g, '5.99' # Sale
window.popoverContent = popoverContent
@$el.find('#parents-info').popover(
animation: true
html: true
placement: 'top'
trigger: 'hover'
title: popoverTitle
content: popoverContent
container: @$el
).on 'shown.bs.popover', =>
application.tracker?.trackEvent 'Subscription parent hover', {}
application.tracker?.trackPageView "subscription/parent-hover", ['Google Analytics']
onClickPurchaseButton: (e) ->
@playSound 'menu-button-click'
return @openModalView new AuthModal() if me.get('anonymous')
application.tracker?.trackEvent 'Started subscription purchase', {}
application.tracker?.trackPageView "subscription/start-purchase", ['Google Analytics']
options = {
description: $.i18n.t('subscribe.stripe_description')
amount: @product.amount
}
# SALE LOGIC
# overwrite amount with sale price
# maybe also put in another description with details about how long it lasts, etc
# NOTE: Do not change this price without updating the context.price in getRenderData
options = {
description: 'Monthly Subscription (HoC sale)'
amount: 599
}
@purchasedAmount = options.amount
stripeHandler.open(options)
onStripeReceivedToken: (e) ->
@state = 'purchasing'
@render()
stripe = _.clone(me.get('stripe') ? {})
stripe.planID = @product.planID
stripe.token = e.token.PI:PASSWORD:<PASSWORD>END_PI
me.set 'stripe', stripe
@listenToOnce me, 'sync', @onSubscriptionSuccess
@listenToOnce me, 'error', @onSubscriptionError
me.patch({headers: {'X-Change-Plan': 'true'}})
onSubscriptionSuccess: ->
application.tracker?.trackEvent 'Finished subscription purchase', revenue: @purchasedAmount / 100
application.tracker?.trackPageView "subscription/finish-purchase", ['Google Analytics']
Backbone.Mediator.publish 'subscribe-modal:subscribed', {}
@playSound 'victory'
@hide()
onSubscriptionError: (user, response, options) ->
console.error 'We got an error subscribing with Stripe from our server:', response
stripe = me.get('stripe') ? {}
delete stripe.token
delete stripe.planID
xhr = options.xhr
if xhr.status is 402
@state = 'declined'
else
@state = 'unknown_error'
@stateMessage = "#{xhr.status}: #{xhr.responseText}"
@render()
|
[
{
"context": "allback\", (done) ->\n worker.process { name: \"asdf\" }, (err) ->\n expect(err).to.exist\n ",
"end": 9872,
"score": 0.951738178730011,
"start": 9868,
"tag": "NAME",
"value": "asdf"
}
] | test/test_worker.coffee | shakhar/monq | 0 | Async = require "async"
Sinon = require "sinon"
Queue = require "../src/queue"
Worker = require "../src/worker"
MongoClient = require("mongodb").MongoClient
RedisClient = require("redis").createClient()
{ expect } = require "chai"
uri = "mongodb://localhost:27017/monq_tests"
describe "Worker", ->
job = queues = worker = undefined
before (done) ->
MongoClient.connect uri, (err, @db) => done(err)
after (done) ->
@db.close done
beforeEach ->
job =
data: {}
complete: ->
fail: ->
queues = ["foo", "bar", "baz"].map (name) =>
new Queue { db: @db }, name
worker = new Worker queues
afterEach (done) ->
Async.parallel [
(next) -> RedisClient.flushdb next
(next) -> queues[0].collection.remove {}, next
], done
after (done) ->
RedisClient.quit done
it "has default polling interval", ->
expect(worker.interval).to.equal 5000
it "is an event emitter", (done) ->
worker.on "foo", (bar) ->
expect(bar).to.equal "bar"
done()
worker.emit "foo", "bar"
describe "when dequeuing", ->
foo = bar = baz = undefined
beforeEach ->
foo = Sinon.stub(worker.queues[0], "dequeue").yields()
bar = Sinon.stub(worker.queues[1], "dequeue").yields()
baz = Sinon.stub(worker.queues[2], "dequeue").yields()
worker.dequeue ->
worker.dequeue ->
worker.dequeue ->
worker.dequeue ->
it "cycles queues", ->
expect(foo.calledTwice).to.be.true
expect(foo.calledBefore(bar)).to.be.true
expect(bar.calledOnce).to.be.true
expect(bar.calledBefore(baz)).to.be.true
expect(baz.calledOnce).to.be.true
expect(baz.calledBefore(foo)).to.be.true
describe "when starting", ->
beforeEach ->
worker.poll = ->
describe "when job already dequeued", ->
beforeEach (done) ->
Async.series [
(next) -> worker.start next
(next) -> worker.queues[0].enqueue "foo", {}, next
(next) -> worker.queues[0].dequeue next
(next) ->
worker.queues[0].collection.findOne { status: "dequeued" }, (err, doc) ->
expect(err).to.not.exist
expect(doc.name).to.equal "foo"
worker.stop()
next()
(next) -> worker.start next
], done
it "returns dequeued job to queue", (done) ->
worker.queues[0].collection.findOne { status: "queued" }, (err, doc) ->
expect(err).to.not.exist
expect(doc.name).to.equal "foo"
done()
describe "when job from foreign queue already dequeued", ->
foreignQueue = undefined
beforeEach (done) ->
foreignQueue = new Queue { db: @db }, "foreign"
Async.series [
(next) -> worker.start next
(next) -> foreignQueue.enqueue "foo", {}, next
(next) -> foreignQueue.dequeue next
(next) ->
foreignQueue.collection.findOne { status: "dequeued" }, (err, doc) ->
expect(err).to.not.exist
expect(doc.name).to.equal "foo"
worker.stop()
next()
(next) -> worker.start next
], done
it "does not return dequeued job to queue", (done) ->
foreignQueue.collection.find { status: "queued" }, (err, docs) ->
expect(err).to.not.exist
expect(docs[0]).to.not.exist
done()
describe "when polling", ->
describe "when error", ->
it "emits an 'error' event", (done) ->
error = new Error()
Sinon.stub(worker, "dequeue").yields error
worker.on "error", (err) ->
expect(err).to.equal error
done()
worker.start()
describe "when job is available", ->
work = undefined
beforeEach ->
work = Sinon.stub worker, "work"
Sinon.stub(worker.queues[0], "dequeue").yields null, job
it "works on the job", (done) ->
worker.start ->
expect(work.calledOnce).to.be.true
expect(work.getCall(0).args[0]).to.equal job
done()
it "emits 'dequeued' event", (done) ->
worker.on "dequeued", (j) ->
expect(j).to.equal job.data
done()
worker.start()
describe "when no job is available", ->
clock = undefined
beforeEach ->
clock = Sinon.useFakeTimers()
Sinon.stub(worker.queues[0], "dequeue").yields()
Sinon.stub(worker.queues[1], "dequeue").yields()
Sinon.stub(worker.queues[2], "dequeue").yields()
afterEach ->
clock.restore()
it "waits an interval before polling again", (done) ->
worker.start ->
poll = Sinon.spy worker, "poll"
clock.tick worker.interval
worker.stop()
expect(poll.calledOnce).to.be.true
done()
describe "when stopping with a job in progress", ->
dequeueStubs = undefined
beforeEach (done) ->
dequeueStubs = worker.queues.map (queue) ->
Sinon.stub(queue, "dequeue").yieldsAsync null, job
Sinon.stub(worker, "process").yields null, "foobar"
Sinon.stub(job, "complete").yields()
worker.start ->
worker.work job
done()
it "waits for the job to finish", (done) ->
expect(worker.working).to.be.true
worker.stop ->
expect(worker.working).to.be.false
expect(dequeueStubs[0].calledOnce).to.be.true
# It doesn't get the stop signal until after the next dequeue is in motion
expect(dequeueStubs[1].calledOnce).to.be.true
# Make sure it didn't continue polling after we told it to stop
expect(dequeueStubs[2].calledOnce).to.be.false
expect(worker.listeners("done")).to.be.empty
done()
describe "when stopping during an empty dequeue", ->
dequeueStubs = undefined
beforeEach (done) ->
dequeueStubs = worker.queues.map (queue) ->
Sinon.stub(queue, "dequeue").yieldsAsync()
worker.start done
it "stops cleanly", (done) ->
expect(worker.working).to.be.true
worker.stop ->
expect(worker.working).to.be.false
expect(dequeueStubs[0].called).to.be.true
# Make sure it didn't continue polling after we told it to stop
expect(dequeueStubs[1].called).to.be.false
expect(dequeueStubs[2].called).to.be.false
expect(worker.listeners("done")).to.be.empty
done()
describe "when stopping between polls", ->
dequeueStubs = undefined
beforeEach (done) ->
dequeueStubs = worker.queues.map (queue) ->
Sinon.stub(queue, "dequeue").yieldsAsync()
worker.start done
it "stops cleanly", (done) ->
expect(worker.working).to.be.true
worker.once "empty", ->
worker.stop ->
expect(worker.working).to.be.false
expect(dequeueStubs[0].called).to.be.true
# Make sure it didn't continue polling after we told it to stop
expect(dequeueStubs[1].called).to.be.false
expect(dequeueStubs[2].called).to.be.false
expect(worker.listeners("done")).to.be.empty
done()
describe "when stopping twice", ->
dequeueStubs = undefined
beforeEach (done) ->
dequeueStubs = worker.queues.map (queue) ->
Sinon.stub(queue, "dequeue").yieldsAsync()
worker.start done
it "does not error", (done) ->
worker.stop ->
worker.stop()
done()
describe "when working", ->
describe "when processing fails", ->
error = fail = poll = undefined
beforeEach ->
error = new Error()
fail = Sinon.stub(job, "fail").yields()
poll = Sinon.spy worker, "poll"
Sinon.stub(worker, "process").yields error
it "fails the job", ->
worker.work job
expect(fail.calledOnce).to.be.true
expect(fail.getCall(0).args[0]).to.equal error
it "emits 'done' event", (done) ->
worker.on "done", (data) ->
expect(data).to.equal job.data
done()
worker.work job
it "emits 'failed' event", (done) ->
worker.on "failed", (data) ->
expect(data).to.equal job.data
done()
worker.work(job)
it "polls for a new job", ->
worker.work job
expect(poll.calledOnce).to.be.true
describe "when processing succeeds", ->
complete = poll = undefined
beforeEach ->
complete = Sinon.stub(job, "complete").yields()
poll = Sinon.spy worker, "poll"
Sinon.stub(worker, "process").yields null, "foobar"
it "completes the job", ->
worker.work job
expect(complete.calledOnce).to.be.true
expect(complete.getCall(0).args[0]).to.equal "foobar"
it "emits 'done' event", (done) ->
worker.on "done", (data) ->
expect(data).to.equal job.data
done()
worker.work job
it "emits 'complete' event", (done) ->
worker.on "complete", (data) ->
expect(data).to.equal job.data
done()
worker.work job
it "polls for a new job", ->
worker.work job
expect(poll.calledOnce).to.be.true
describe "when processing", ->
beforeEach ->
worker.register
example: (params, callback) -> callback(null, params)
it "passes job to registered callback", (done) ->
worker.process { name: "example", params: { foo: "bar" } }, (err, result) ->
expect(result).to.deep.equal { foo: "bar" }
done()
it "returns error if there is no registered callback", (done) ->
worker.process { name: "asdf" }, (err) ->
expect(err).to.exist
done()
describe "when locking", ->
beforeEach (done) ->
locked = true
queue = new Queue { db: @db }, "foo"
worker = new Worker [queue]
worker.register
example: (params, callback) -> callback(null, params)
worker.registerLock
example: (job, callback) ->
return callback() unless locked
locked = false
callback new Error("locked")
worker.poll = ->
Async.series [
(next) -> worker.start next
(next) -> worker.queues[0].enqueue "example", {}, next
], done
it "passes job to registered callback", (done) ->
Async.series [
(next) ->
worker.dequeue (err) ->
expect(err).to.exist
expect(err.message).to.equal "locked"
next()
(next) ->
worker.queues[0].collection.findOne (err, doc) ->
expect(err).to.not.exist
expect(doc.name).to.equal "example"
expect(doc.status).to.equal "queued"
next()
(next) ->
worker.dequeue (err) ->
expect(err).to.not.exist
next()
(next) ->
worker.queues[0].collection.findOne (err, doc) ->
expect(err).to.not.exist
expect(doc.name).to.equal "example"
expect(doc.status).to.equal "dequeued"
next()
], done
| 210224 | Async = require "async"
Sinon = require "sinon"
Queue = require "../src/queue"
Worker = require "../src/worker"
MongoClient = require("mongodb").MongoClient
RedisClient = require("redis").createClient()
{ expect } = require "chai"
uri = "mongodb://localhost:27017/monq_tests"
describe "Worker", ->
job = queues = worker = undefined
before (done) ->
MongoClient.connect uri, (err, @db) => done(err)
after (done) ->
@db.close done
beforeEach ->
job =
data: {}
complete: ->
fail: ->
queues = ["foo", "bar", "baz"].map (name) =>
new Queue { db: @db }, name
worker = new Worker queues
afterEach (done) ->
Async.parallel [
(next) -> RedisClient.flushdb next
(next) -> queues[0].collection.remove {}, next
], done
after (done) ->
RedisClient.quit done
it "has default polling interval", ->
expect(worker.interval).to.equal 5000
it "is an event emitter", (done) ->
worker.on "foo", (bar) ->
expect(bar).to.equal "bar"
done()
worker.emit "foo", "bar"
describe "when dequeuing", ->
foo = bar = baz = undefined
beforeEach ->
foo = Sinon.stub(worker.queues[0], "dequeue").yields()
bar = Sinon.stub(worker.queues[1], "dequeue").yields()
baz = Sinon.stub(worker.queues[2], "dequeue").yields()
worker.dequeue ->
worker.dequeue ->
worker.dequeue ->
worker.dequeue ->
it "cycles queues", ->
expect(foo.calledTwice).to.be.true
expect(foo.calledBefore(bar)).to.be.true
expect(bar.calledOnce).to.be.true
expect(bar.calledBefore(baz)).to.be.true
expect(baz.calledOnce).to.be.true
expect(baz.calledBefore(foo)).to.be.true
describe "when starting", ->
beforeEach ->
worker.poll = ->
describe "when job already dequeued", ->
beforeEach (done) ->
Async.series [
(next) -> worker.start next
(next) -> worker.queues[0].enqueue "foo", {}, next
(next) -> worker.queues[0].dequeue next
(next) ->
worker.queues[0].collection.findOne { status: "dequeued" }, (err, doc) ->
expect(err).to.not.exist
expect(doc.name).to.equal "foo"
worker.stop()
next()
(next) -> worker.start next
], done
it "returns dequeued job to queue", (done) ->
worker.queues[0].collection.findOne { status: "queued" }, (err, doc) ->
expect(err).to.not.exist
expect(doc.name).to.equal "foo"
done()
describe "when job from foreign queue already dequeued", ->
foreignQueue = undefined
beforeEach (done) ->
foreignQueue = new Queue { db: @db }, "foreign"
Async.series [
(next) -> worker.start next
(next) -> foreignQueue.enqueue "foo", {}, next
(next) -> foreignQueue.dequeue next
(next) ->
foreignQueue.collection.findOne { status: "dequeued" }, (err, doc) ->
expect(err).to.not.exist
expect(doc.name).to.equal "foo"
worker.stop()
next()
(next) -> worker.start next
], done
it "does not return dequeued job to queue", (done) ->
foreignQueue.collection.find { status: "queued" }, (err, docs) ->
expect(err).to.not.exist
expect(docs[0]).to.not.exist
done()
describe "when polling", ->
describe "when error", ->
it "emits an 'error' event", (done) ->
error = new Error()
Sinon.stub(worker, "dequeue").yields error
worker.on "error", (err) ->
expect(err).to.equal error
done()
worker.start()
describe "when job is available", ->
work = undefined
beforeEach ->
work = Sinon.stub worker, "work"
Sinon.stub(worker.queues[0], "dequeue").yields null, job
it "works on the job", (done) ->
worker.start ->
expect(work.calledOnce).to.be.true
expect(work.getCall(0).args[0]).to.equal job
done()
it "emits 'dequeued' event", (done) ->
worker.on "dequeued", (j) ->
expect(j).to.equal job.data
done()
worker.start()
describe "when no job is available", ->
clock = undefined
beforeEach ->
clock = Sinon.useFakeTimers()
Sinon.stub(worker.queues[0], "dequeue").yields()
Sinon.stub(worker.queues[1], "dequeue").yields()
Sinon.stub(worker.queues[2], "dequeue").yields()
afterEach ->
clock.restore()
it "waits an interval before polling again", (done) ->
worker.start ->
poll = Sinon.spy worker, "poll"
clock.tick worker.interval
worker.stop()
expect(poll.calledOnce).to.be.true
done()
describe "when stopping with a job in progress", ->
dequeueStubs = undefined
beforeEach (done) ->
dequeueStubs = worker.queues.map (queue) ->
Sinon.stub(queue, "dequeue").yieldsAsync null, job
Sinon.stub(worker, "process").yields null, "foobar"
Sinon.stub(job, "complete").yields()
worker.start ->
worker.work job
done()
it "waits for the job to finish", (done) ->
expect(worker.working).to.be.true
worker.stop ->
expect(worker.working).to.be.false
expect(dequeueStubs[0].calledOnce).to.be.true
# It doesn't get the stop signal until after the next dequeue is in motion
expect(dequeueStubs[1].calledOnce).to.be.true
# Make sure it didn't continue polling after we told it to stop
expect(dequeueStubs[2].calledOnce).to.be.false
expect(worker.listeners("done")).to.be.empty
done()
describe "when stopping during an empty dequeue", ->
dequeueStubs = undefined
beforeEach (done) ->
dequeueStubs = worker.queues.map (queue) ->
Sinon.stub(queue, "dequeue").yieldsAsync()
worker.start done
it "stops cleanly", (done) ->
expect(worker.working).to.be.true
worker.stop ->
expect(worker.working).to.be.false
expect(dequeueStubs[0].called).to.be.true
# Make sure it didn't continue polling after we told it to stop
expect(dequeueStubs[1].called).to.be.false
expect(dequeueStubs[2].called).to.be.false
expect(worker.listeners("done")).to.be.empty
done()
describe "when stopping between polls", ->
dequeueStubs = undefined
beforeEach (done) ->
dequeueStubs = worker.queues.map (queue) ->
Sinon.stub(queue, "dequeue").yieldsAsync()
worker.start done
it "stops cleanly", (done) ->
expect(worker.working).to.be.true
worker.once "empty", ->
worker.stop ->
expect(worker.working).to.be.false
expect(dequeueStubs[0].called).to.be.true
# Make sure it didn't continue polling after we told it to stop
expect(dequeueStubs[1].called).to.be.false
expect(dequeueStubs[2].called).to.be.false
expect(worker.listeners("done")).to.be.empty
done()
describe "when stopping twice", ->
dequeueStubs = undefined
beforeEach (done) ->
dequeueStubs = worker.queues.map (queue) ->
Sinon.stub(queue, "dequeue").yieldsAsync()
worker.start done
it "does not error", (done) ->
worker.stop ->
worker.stop()
done()
describe "when working", ->
describe "when processing fails", ->
error = fail = poll = undefined
beforeEach ->
error = new Error()
fail = Sinon.stub(job, "fail").yields()
poll = Sinon.spy worker, "poll"
Sinon.stub(worker, "process").yields error
it "fails the job", ->
worker.work job
expect(fail.calledOnce).to.be.true
expect(fail.getCall(0).args[0]).to.equal error
it "emits 'done' event", (done) ->
worker.on "done", (data) ->
expect(data).to.equal job.data
done()
worker.work job
it "emits 'failed' event", (done) ->
worker.on "failed", (data) ->
expect(data).to.equal job.data
done()
worker.work(job)
it "polls for a new job", ->
worker.work job
expect(poll.calledOnce).to.be.true
describe "when processing succeeds", ->
complete = poll = undefined
beforeEach ->
complete = Sinon.stub(job, "complete").yields()
poll = Sinon.spy worker, "poll"
Sinon.stub(worker, "process").yields null, "foobar"
it "completes the job", ->
worker.work job
expect(complete.calledOnce).to.be.true
expect(complete.getCall(0).args[0]).to.equal "foobar"
it "emits 'done' event", (done) ->
worker.on "done", (data) ->
expect(data).to.equal job.data
done()
worker.work job
it "emits 'complete' event", (done) ->
worker.on "complete", (data) ->
expect(data).to.equal job.data
done()
worker.work job
it "polls for a new job", ->
worker.work job
expect(poll.calledOnce).to.be.true
describe "when processing", ->
beforeEach ->
worker.register
example: (params, callback) -> callback(null, params)
it "passes job to registered callback", (done) ->
worker.process { name: "example", params: { foo: "bar" } }, (err, result) ->
expect(result).to.deep.equal { foo: "bar" }
done()
it "returns error if there is no registered callback", (done) ->
worker.process { name: "<NAME>" }, (err) ->
expect(err).to.exist
done()
describe "when locking", ->
beforeEach (done) ->
locked = true
queue = new Queue { db: @db }, "foo"
worker = new Worker [queue]
worker.register
example: (params, callback) -> callback(null, params)
worker.registerLock
example: (job, callback) ->
return callback() unless locked
locked = false
callback new Error("locked")
worker.poll = ->
Async.series [
(next) -> worker.start next
(next) -> worker.queues[0].enqueue "example", {}, next
], done
it "passes job to registered callback", (done) ->
Async.series [
(next) ->
worker.dequeue (err) ->
expect(err).to.exist
expect(err.message).to.equal "locked"
next()
(next) ->
worker.queues[0].collection.findOne (err, doc) ->
expect(err).to.not.exist
expect(doc.name).to.equal "example"
expect(doc.status).to.equal "queued"
next()
(next) ->
worker.dequeue (err) ->
expect(err).to.not.exist
next()
(next) ->
worker.queues[0].collection.findOne (err, doc) ->
expect(err).to.not.exist
expect(doc.name).to.equal "example"
expect(doc.status).to.equal "dequeued"
next()
], done
| true | Async = require "async"
Sinon = require "sinon"
Queue = require "../src/queue"
Worker = require "../src/worker"
MongoClient = require("mongodb").MongoClient
RedisClient = require("redis").createClient()
{ expect } = require "chai"
uri = "mongodb://localhost:27017/monq_tests"
describe "Worker", ->
job = queues = worker = undefined
before (done) ->
MongoClient.connect uri, (err, @db) => done(err)
after (done) ->
@db.close done
beforeEach ->
job =
data: {}
complete: ->
fail: ->
queues = ["foo", "bar", "baz"].map (name) =>
new Queue { db: @db }, name
worker = new Worker queues
afterEach (done) ->
Async.parallel [
(next) -> RedisClient.flushdb next
(next) -> queues[0].collection.remove {}, next
], done
after (done) ->
RedisClient.quit done
it "has default polling interval", ->
expect(worker.interval).to.equal 5000
it "is an event emitter", (done) ->
worker.on "foo", (bar) ->
expect(bar).to.equal "bar"
done()
worker.emit "foo", "bar"
describe "when dequeuing", ->
foo = bar = baz = undefined
beforeEach ->
foo = Sinon.stub(worker.queues[0], "dequeue").yields()
bar = Sinon.stub(worker.queues[1], "dequeue").yields()
baz = Sinon.stub(worker.queues[2], "dequeue").yields()
worker.dequeue ->
worker.dequeue ->
worker.dequeue ->
worker.dequeue ->
it "cycles queues", ->
expect(foo.calledTwice).to.be.true
expect(foo.calledBefore(bar)).to.be.true
expect(bar.calledOnce).to.be.true
expect(bar.calledBefore(baz)).to.be.true
expect(baz.calledOnce).to.be.true
expect(baz.calledBefore(foo)).to.be.true
describe "when starting", ->
beforeEach ->
worker.poll = ->
describe "when job already dequeued", ->
beforeEach (done) ->
Async.series [
(next) -> worker.start next
(next) -> worker.queues[0].enqueue "foo", {}, next
(next) -> worker.queues[0].dequeue next
(next) ->
worker.queues[0].collection.findOne { status: "dequeued" }, (err, doc) ->
expect(err).to.not.exist
expect(doc.name).to.equal "foo"
worker.stop()
next()
(next) -> worker.start next
], done
it "returns dequeued job to queue", (done) ->
worker.queues[0].collection.findOne { status: "queued" }, (err, doc) ->
expect(err).to.not.exist
expect(doc.name).to.equal "foo"
done()
describe "when job from foreign queue already dequeued", ->
foreignQueue = undefined
beforeEach (done) ->
foreignQueue = new Queue { db: @db }, "foreign"
Async.series [
(next) -> worker.start next
(next) -> foreignQueue.enqueue "foo", {}, next
(next) -> foreignQueue.dequeue next
(next) ->
foreignQueue.collection.findOne { status: "dequeued" }, (err, doc) ->
expect(err).to.not.exist
expect(doc.name).to.equal "foo"
worker.stop()
next()
(next) -> worker.start next
], done
it "does not return dequeued job to queue", (done) ->
foreignQueue.collection.find { status: "queued" }, (err, docs) ->
expect(err).to.not.exist
expect(docs[0]).to.not.exist
done()
describe "when polling", ->
describe "when error", ->
it "emits an 'error' event", (done) ->
error = new Error()
Sinon.stub(worker, "dequeue").yields error
worker.on "error", (err) ->
expect(err).to.equal error
done()
worker.start()
describe "when job is available", ->
work = undefined
beforeEach ->
work = Sinon.stub worker, "work"
Sinon.stub(worker.queues[0], "dequeue").yields null, job
it "works on the job", (done) ->
worker.start ->
expect(work.calledOnce).to.be.true
expect(work.getCall(0).args[0]).to.equal job
done()
it "emits 'dequeued' event", (done) ->
worker.on "dequeued", (j) ->
expect(j).to.equal job.data
done()
worker.start()
describe "when no job is available", ->
clock = undefined
beforeEach ->
clock = Sinon.useFakeTimers()
Sinon.stub(worker.queues[0], "dequeue").yields()
Sinon.stub(worker.queues[1], "dequeue").yields()
Sinon.stub(worker.queues[2], "dequeue").yields()
afterEach ->
clock.restore()
it "waits an interval before polling again", (done) ->
worker.start ->
poll = Sinon.spy worker, "poll"
clock.tick worker.interval
worker.stop()
expect(poll.calledOnce).to.be.true
done()
describe "when stopping with a job in progress", ->
dequeueStubs = undefined
beforeEach (done) ->
dequeueStubs = worker.queues.map (queue) ->
Sinon.stub(queue, "dequeue").yieldsAsync null, job
Sinon.stub(worker, "process").yields null, "foobar"
Sinon.stub(job, "complete").yields()
worker.start ->
worker.work job
done()
it "waits for the job to finish", (done) ->
expect(worker.working).to.be.true
worker.stop ->
expect(worker.working).to.be.false
expect(dequeueStubs[0].calledOnce).to.be.true
# It doesn't get the stop signal until after the next dequeue is in motion
expect(dequeueStubs[1].calledOnce).to.be.true
# Make sure it didn't continue polling after we told it to stop
expect(dequeueStubs[2].calledOnce).to.be.false
expect(worker.listeners("done")).to.be.empty
done()
describe "when stopping during an empty dequeue", ->
dequeueStubs = undefined
beforeEach (done) ->
dequeueStubs = worker.queues.map (queue) ->
Sinon.stub(queue, "dequeue").yieldsAsync()
worker.start done
it "stops cleanly", (done) ->
expect(worker.working).to.be.true
worker.stop ->
expect(worker.working).to.be.false
expect(dequeueStubs[0].called).to.be.true
# Make sure it didn't continue polling after we told it to stop
expect(dequeueStubs[1].called).to.be.false
expect(dequeueStubs[2].called).to.be.false
expect(worker.listeners("done")).to.be.empty
done()
describe "when stopping between polls", ->
dequeueStubs = undefined
beforeEach (done) ->
dequeueStubs = worker.queues.map (queue) ->
Sinon.stub(queue, "dequeue").yieldsAsync()
worker.start done
it "stops cleanly", (done) ->
expect(worker.working).to.be.true
worker.once "empty", ->
worker.stop ->
expect(worker.working).to.be.false
expect(dequeueStubs[0].called).to.be.true
# Make sure it didn't continue polling after we told it to stop
expect(dequeueStubs[1].called).to.be.false
expect(dequeueStubs[2].called).to.be.false
expect(worker.listeners("done")).to.be.empty
done()
describe "when stopping twice", ->
dequeueStubs = undefined
beforeEach (done) ->
dequeueStubs = worker.queues.map (queue) ->
Sinon.stub(queue, "dequeue").yieldsAsync()
worker.start done
it "does not error", (done) ->
worker.stop ->
worker.stop()
done()
describe "when working", ->
describe "when processing fails", ->
error = fail = poll = undefined
beforeEach ->
error = new Error()
fail = Sinon.stub(job, "fail").yields()
poll = Sinon.spy worker, "poll"
Sinon.stub(worker, "process").yields error
it "fails the job", ->
worker.work job
expect(fail.calledOnce).to.be.true
expect(fail.getCall(0).args[0]).to.equal error
it "emits 'done' event", (done) ->
worker.on "done", (data) ->
expect(data).to.equal job.data
done()
worker.work job
it "emits 'failed' event", (done) ->
worker.on "failed", (data) ->
expect(data).to.equal job.data
done()
worker.work(job)
it "polls for a new job", ->
worker.work job
expect(poll.calledOnce).to.be.true
describe "when processing succeeds", ->
complete = poll = undefined
beforeEach ->
complete = Sinon.stub(job, "complete").yields()
poll = Sinon.spy worker, "poll"
Sinon.stub(worker, "process").yields null, "foobar"
it "completes the job", ->
worker.work job
expect(complete.calledOnce).to.be.true
expect(complete.getCall(0).args[0]).to.equal "foobar"
it "emits 'done' event", (done) ->
worker.on "done", (data) ->
expect(data).to.equal job.data
done()
worker.work job
it "emits 'complete' event", (done) ->
worker.on "complete", (data) ->
expect(data).to.equal job.data
done()
worker.work job
it "polls for a new job", ->
worker.work job
expect(poll.calledOnce).to.be.true
describe "when processing", ->
beforeEach ->
worker.register
example: (params, callback) -> callback(null, params)
it "passes job to registered callback", (done) ->
worker.process { name: "example", params: { foo: "bar" } }, (err, result) ->
expect(result).to.deep.equal { foo: "bar" }
done()
it "returns error if there is no registered callback", (done) ->
worker.process { name: "PI:NAME:<NAME>END_PI" }, (err) ->
expect(err).to.exist
done()
describe "when locking", ->
beforeEach (done) ->
locked = true
queue = new Queue { db: @db }, "foo"
worker = new Worker [queue]
worker.register
example: (params, callback) -> callback(null, params)
worker.registerLock
example: (job, callback) ->
return callback() unless locked
locked = false
callback new Error("locked")
worker.poll = ->
Async.series [
(next) -> worker.start next
(next) -> worker.queues[0].enqueue "example", {}, next
], done
it "passes job to registered callback", (done) ->
Async.series [
(next) ->
worker.dequeue (err) ->
expect(err).to.exist
expect(err.message).to.equal "locked"
next()
(next) ->
worker.queues[0].collection.findOne (err, doc) ->
expect(err).to.not.exist
expect(doc.name).to.equal "example"
expect(doc.status).to.equal "queued"
next()
(next) ->
worker.dequeue (err) ->
expect(err).to.not.exist
next()
(next) ->
worker.queues[0].collection.findOne (err, doc) ->
expect(err).to.not.exist
expect(doc.name).to.equal "example"
expect(doc.status).to.equal "dequeued"
next()
], done
|
[
{
"context": " \"timezone\" : null,\n \"firstName\" : null,\n \"lastName\" : null,\n \"aliasName\" : null,\n \"kerberos\" :",
"end": 3020,
"score": 0.9429629445075989,
"start": 3012,
"tag": "NAME",
"value": "lastName"
}
] | src/com/redhat/ascension/rules/taskRules.coffee | pbathia/ascension | 0 | nools = require 'nools'
logger = require('tracer').colorConsole()
prettyjson = require 'prettyjson'
salesforce = require '../db/salesforce'
Q = require 'q'
#DbOperations = require '../db/dbOperations'
MongoOperations = require '../db/MongoOperations'
TaskStateEnum = require './enums/TaskStateEnum'
TaskTypeEnum = require './enums/TaskTypeEnum'
TaskOpEnum = require './enums/TaskOpEnum'
EntityOpEnum = require './enums/ResourceOpEnum'
_ = require 'lodash'
moment = require 'moment'
mongoose = require 'mongoose'
mongooseQ = require('mongoose-q')(mongoose)
#MongoClient = require('mongodb').MongoClient
#Server = require('mongodb').Server
# TODO remove -- This is deprecated, leaving it a bit for reference
TaskRules = {}
#OwnerId != '{spam_queue_id}'
#AND SBR_Group__c includes ({sbr_groups})
# Spam id = 00GA0000000XxxNMAS
#AND SBR_Group__c includes ('Kernel')
TaskRules.soql = """
SELECT
AccountId,
Account_Number__c,
CaseNumber,
Collaboration_Score__c,
Comment_Count__c,
CreatedDate,
Created_By__c,
FTS_Role__c,
FTS__c,
Last_Breach__c,
PrivateCommentCount__c,
PublicCommentCount__c,
SBT__c,
SBR_Group__c,
Severity__c,
Status,
Internal_Status__c,
Strategic__c,
Tags__c
FROM
Case
WHERE
OwnerId != '00GA0000000XxxNMAS'
#andStatusCondition#
LIMIT 100
"""
#Status = 'Waiting on Red Hat'
#Status != 'Closed'
#define Case {
# AccountId : null,
# Account_Number__c : null,
# CaseNumber : null,
# Collaboration_Score__c : null,
# Comment_Count__c : null,
# CreatedDate : null,
# Created_By__c : null,
# FTS_Role__c : null,
# FTS__c : null,
# Last_Breach__c : null,
# PrivateCommentCount__c : null,
# PublicCommentCount__c : null,
# SBT__c : null,
# Severity__c : null,
# Status : null,
# Internal_status__c : null,
# Strategic__c : null,
# SBR_Group__c : null,
# Tags__c : null
#}
TaskRules.noolsDefs = """
// The ExistingTask represents a minimal fetch of all existing tasks, this allows rules such as 'If no prior NNO task,
// create one'
//define ExistingTask {
// bid: null,
// taskOp: null,
// resourceOp: null,
// owner: null
//}
define Task {
_id: null,
bid: null,
type: null,
score: 0,
locked: false,
timeout: -1,
sbrs: [],
tags: [],
owner: null,
closed: null,
type: null,
taskOp: null,
resourceOp: null,
state: 'new',
'case': {
AccountId : null,
Account_Number__c : null,
CaseNumber : null,
Collaboration_Score__c : null,
Comment_Count__c : null,
CreatedDate : null,
Created_By__c : null,
FTS_Role__c : null,
FTS__c : null,
Last_Breach__c : null,
PrivateCommentCount__c : null,
PublicCommentCount__c : null,
SBT__c : null,
Severity__c : null,
Status : null,
Internal_status__c : null,
Strategic__c : null,
SBR_Group__c : null,
Tags__c : null
},
owner: {
"fullName" : null,
"email" : null,
"sso" : null,
"gss" : null,
"superRegion" : null,
"timezone" : null,
"firstName" : null,
"lastName" : null,
"aliasName" : null,
"kerberos" : null,
"salesforce" : null,
"isManager" : null,
"active" : null,
"created" : null,
"lastLogin" : null,
"lastModified" : null,
"outOfOffice" : null,
"id" : null
}
}
"""
# INFO -- do not use prettyjson.render inside of the nools, it silently gives issues
TaskRules.nools = """
// This is the most basic of all rules, it says if there is no real task associated with an Unassigned case, create
// One and set the appropriate states -- In this one situation there is no question there is a single resulting task
// Thus we can retract that task once created.
rule "noop task/unassigned case" {
when {
t : Task t.taskOp == TaskOpEnum.NOOP.name && t.case.Internal_Status__c == 'Unassigned';
// Make sure there is no prior existing task created already
not(et: Task et.taskOp != TaskOpEnum.NOOP.name && et.bid == t.bid && et.case.Internal_Status__c == 'Unassigned');
}
then {
logger.warn('Found unmanaged task: ' + t.bid + ', setting the task to NNO.');
modify(t, function() {
this.taskOp = TaskOpEnum.OWN_TASK.name;
this.resourceOp = EntityOpEnum.OWN.name;
});
//logger.warn('Sending task to be saved: ' + t.bid);
retract(t);
return saveRuleTask(t);
}
}
// New Waiting on Collab case without any prior associated Task
rule "noop task/collab case" {
when {
t : Task t.taskOp == TaskOpEnum.NOOP.name && t.case.Internal_Status__c == 'Waiting on Collaboration';
// Make sure there is no prior task created for this Waiting on Collaboration task
not(et: Task et.taskOp != TaskOpEnum.NOOP.name && et.bid == t.bid && et.resourceOp == EntityOpEnum.COLLAB.name);
}
then {
modify(t, function(){
this.taskOp = TaskOpEnum.OWN_TASK.name;
this.resourceOp = EntityOpEnum.COLLAB.name;
});
retract(t);
return saveRuleTask(t);
}
}
// New Waiting on Collab case with an associated Task
rule "noop task/collab case w/exiting task" {
when {
t : Task t.taskOp == TaskOpEnum.NOOP.name && t.case.Internal_Status__c == 'Waiting on Collaboration';
// If there is an existing task that matches this noop task, retract both
et: Task et.taskOp != TaskOpEnum.NOOP.name && et.bid == t.bid && et.resourceOp == EntityOpEnum.COLLAB.name;
}
then {
retract(t);
retract(et);
}
}
rule "noop task/default" {
when {
t : Task t.taskOp == TaskOpEnum.NOOP.name;
// Make sure there is no prior existing task created already
not(et: Task et.taskOp != TaskOpEnum.NOOP.name && et.bid == t.bid);
}
then {
//logger.warn('DEFAULT: Found unmanaged task: ' + t.bid + ', setting the task to NNO.');
//modify(t, function(){
// this.taskOp = TaskOpEnum.OWN_TASK.name;
// this.resourceOp = EntityOpEnum.OWN.name;
//});
retract(t);
return saveRuleTask(t);
}
}
""";
#TaskRules.saveRuleTaskCb = (t, cb) ->
# logger.debug "saveRuleTask: Creating task [cb]: " + t.bid
# model = new MongoOperations['models']['task'] t
# model.save (err) ->
# if err then cb(err) else cb(null)
TaskRules.generateSaveTasksPromises = (tasks) ->
promises = []
# Potentially consider https://groups.google.com/forum/#!topic/mongoose-orm/IkPmvcd0kds if this ever proves to be
# too slow. For 10-20k inserts, probably won't be too slow. Anything more, might want to bulk insert
_.each tasks, (t) ->
#logger.debug "Made task: #{prettyjson.render t}"
promises.push new MongoOperations['models']['task'](t).saveQ()
promises
# This is for task discovery, to insert a minimal look at all existing tasks for the rules
TaskRules.getExistingTasks = () ->
MongoOperations['models']['task']
.find()
.where('state').ne(TaskStateEnum.CLOSED)
#.select('bid taskOp resourceOp owner')
.execQ()
# Make Mongoose Schema Tasks out of regular js objects
TaskRules.getTasks = (tasks) ->
_.map tasks, (t) ->
new MongoOperations['models']['task'](t)
TaskRules.saveTasks = (tasks) ->
deferred = Q.defer()
Q.all(@generateSaveTasksPromises(tasks))
.then(->
deferred.resolve()
, (err) ->
deferred.reject(err)
)
deferred.promise
#TaskRules.makeTaskFromRule = (t) ->
# _id: t['_id']
# bid: t['bid']
# score: t['score']
# timeout: t['score']
# sbrs: t['score']
# tags: t['score']
# owner: t['score']
# completed: t['score']
# type: t['score']
# taskOp: t['score']
# resourceOp: t['score']
# state: t['score']
# 'case': t['case']
# A simple update to Mongo with the case meta data. The input is the noop task which will have the very latest
# case data always
# TODO -- the decision here to make is should I always update all existing tasks prior to assert? I would think so
# This would simplify things and make the rules less error prone
TaskRules.updateTasksWithCaseMetadata = (t) ->
MongoOperations['models']['task'].where()
.setOptions({multi: true})
.update({'bid': c['CaseNumber']}, @taskFromCaseUpdateHash(t, t['case']))
.exec()
TaskRules.divineTasks = (cases) ->
deferred = Q.defer()
# Remove these attributes from SF and do other transforms
_.each cases, (c) ->
delete c['attributes']
c['SBR_Group__c'] = TaskRules.parseSfArray(c['SBR_Group__c'])
c['Tags__c'] = TaskRules.parseSfArray(c['Tags__c'])
# Output hash will contain the case and task
#outputHash = _.object(_.map(cases, (x) -> [x['CaseNumber'], {'case': x, 'task': undefined}]))
outputHash = _.object(_.map(cases, (x) ->
[x['CaseNumber'], {'case': x, 'task': undefined}]))
# grab a list of existing tasks in the db based on the fetched case numbers
caseNumbers = _.chain(cases).pluck('CaseNumber').value()
MongoOperations['models']['task']
.find()
.where('bid')
.in(caseNumbers)
#.select('bid')
.execQ()
.done((tasks) ->
# TODO -- Ultimately here I want to convert anything seen to NOOP tasks, this will be much simpler to handle, then
# just re-assert all existing un-closed tasks to the rules every time, This will give the ultimate flexibility I believe
# These represent existing tasks
existingCaseNumbers = _.chain(tasks).pluck('bid').unique().value()
# Find all new cases that are not tasks by rejecting all the cases that overlap with existing tasks
newCases = _.chain(cases).reject((c) ->
_.contains(existingCaseNumbers, c['CaseNumber'])).value()
# Make new tasks
newTasks = _.map(newCases, (c) ->
TaskRules.makeTaskFromCase(c))
logger.debug "Discovered #{newTasks.length} new tasks"
# Existing tasks
existingTasks = _.chain(tasks).filter((t) ->
_.contains(existingCaseNumbers, t['bid'])).value()
logger.debug "Discovered #{existingTasks.length} existing tasks"
# Update existing Tasks, the updates are only to case meta data at this point, nothing else
taskPromises = []
_.each existingTasks, (t) ->
c = outputHash[t['bid']]['case']
updateHash = TaskRules.taskFromCaseUpdateHash(t, c)
# Update the in-memory task
_.assign t, updateHash
# TODO -- should also specify task state != closed/completed/abandoned
# taskPromises.push MongoOperations['models']['task'].where().setOptions({multi: true}).update({'bid': c['CaseNumber']}, updateHash).exec()
# TODO -- make this change and haven't retested the rules yet
taskPromises.push TaskRules.updateTaskFromCase(t, c)
noopTasks = TaskRules.getTasks newTasks
#deferred.resolve noopTasks
# The below logic is for saving those new tasks, not sure this is really what we want -- let's just flatten out
# the existing tasks for now
# The taskPromises will be comprised of all task updates and task inserts
# taskPromises = _.chain([taskPromises, TaskRules.generateSaveTasksPromises(newTasks)]).flatten().value()
taskPromises = _.chain(taskPromises).value()
# In the chain the noopTasks are undefined, gotta figure out what's up
Q.allSettled(taskPromises)
.then(->
d = Q.defer()
logger.debug "Completed all task promises, re-fetching the tasks from mongo"
# Now re-fetch all the tasks from Mongo so we know we absolutely have the latest consistent versions
# TODO -- Attempt to promisify this again
MongoOperations['models']['task']
.where('bid')
.in(existingCaseNumbers)
.exec (err, results) ->
logger.debug "re-fetched #{results.length} results"
if err? then d.reject err else d.resolve results
d.promise
)
.then((results) ->
output = _.chain([results, noopTasks]).flatten().value()
logger.debug "Resolving the main deferred with a total of #{results.length} existing tasks, and #{noopTasks.length} noop tasks"
deferred.resolve output
)
.fail((err) ->
logger.error err.stack)
.done()
, (err) ->
deferred.reject err
)
deferred.promise
TaskRules.initFlow = () ->
@beginFire = 0
@endFire = 0
@flow = nools.compile @noolsDefs + @nools,
name: 'helloFlow'
scope:
logger: logger
TaskOpEnum: TaskOpEnum
EntityOpEnum: EntityOpEnum
saveRuleTask: TaskRules.saveRuleTask
saveRuleTaskCb: TaskRules.saveRuleTaskCb
#updateTasksWithCaseMetadata: TaskRules.updateTasksWithCaseMetadata
prettyjson: prettyjson
@assertCalls = 0
@fireCalls = 0
TaskRules.printSimple = (op, fact) ->
logger.debug "#{op}: " + prettyjson.render
bid: fact['bid']
taskOp: fact['taskOp']
resourceOp: fact['resourceOp']
TaskRules.initSession = (debug = false) ->
@session = @flow.getSession()
@session.on "assert", (fact) ->
TaskRules.assertCalls += 1
if debug is true then TaskRules.printSimple('assert', fact)
@session.on "retract", (fact) ->
if debug is true then TaskRules.printSimple('retract', fact)
@session.on "modify", (fact) ->
if debug is true then TaskRules.printSimple('modify', fact)
@session.on "fire", (name, rule) ->
if debug is true then logger.debug "fired: #{name}"
TaskRules.fireCalls += 1
TaskRules.executeTest = () ->
#Case = flow.getDefined("Case")
Task = @flow.getDefined("Task")
# TODO -- I do need to fetch != 'Closed' as well, for now though, testing with WoRH is sufficient
#soql = soql.replace /#andStatusCondition#/, " AND Status != 'Closed'"
soql = @soql.replace /#andStatusCondition#/, " AND Status = 'Waiting on Red Hat'"
Q.nfcall(salesforce.querySf, {'soql': soql})
.then((cases) ->
TaskRules.divineTasks(cases)
)
.then((tasks) ->
logger.debug "Completed persisting the tasks"
#_.each obj['cases'], (x) ->
# delete x['attributes']
# c = new Case(x)
# session.assert c
_.each tasks, (x) ->
t = new Task(x)
TaskRules.session.assert t
beginFire = +moment()
TaskRules.session.match().then(() ->
logger.info "Done, assert calls: #{TaskRules.assertCalls}, fire calls: #{TaskRules.fireCalls}"
endFire = +moment()
dur = ((endFire - beginFire) / 1000).toFixed(0)
logger.info "Completed firing rules in #{dur}s"
TaskRules.session.dispose()
process.exit(0)
, (err) ->
logger.error err.stack
)
)
.done()
module.exports = TaskRules
if require.main is module
MongoOperations.init({mongoDebug: true})
db = mongoose['connection']
db.on 'error', logger.error.bind(logger, 'connection error:')
db.once 'open', () ->
MongoOperations.defineCollections()
TaskRules.initFlow()
TaskRules.initSession(false)
MongoOperations.reset().done(->
TaskRules.executeTest()
, (err) ->
logger.error err.stack
)
#TaskRules.executeTest()
| 197105 | nools = require 'nools'
logger = require('tracer').colorConsole()
prettyjson = require 'prettyjson'
salesforce = require '../db/salesforce'
Q = require 'q'
#DbOperations = require '../db/dbOperations'
MongoOperations = require '../db/MongoOperations'
TaskStateEnum = require './enums/TaskStateEnum'
TaskTypeEnum = require './enums/TaskTypeEnum'
TaskOpEnum = require './enums/TaskOpEnum'
EntityOpEnum = require './enums/ResourceOpEnum'
_ = require 'lodash'
moment = require 'moment'
mongoose = require 'mongoose'
mongooseQ = require('mongoose-q')(mongoose)
#MongoClient = require('mongodb').MongoClient
#Server = require('mongodb').Server
# TODO remove -- This is deprecated, leaving it a bit for reference
TaskRules = {}
#OwnerId != '{spam_queue_id}'
#AND SBR_Group__c includes ({sbr_groups})
# Spam id = 00GA0000000XxxNMAS
#AND SBR_Group__c includes ('Kernel')
TaskRules.soql = """
SELECT
AccountId,
Account_Number__c,
CaseNumber,
Collaboration_Score__c,
Comment_Count__c,
CreatedDate,
Created_By__c,
FTS_Role__c,
FTS__c,
Last_Breach__c,
PrivateCommentCount__c,
PublicCommentCount__c,
SBT__c,
SBR_Group__c,
Severity__c,
Status,
Internal_Status__c,
Strategic__c,
Tags__c
FROM
Case
WHERE
OwnerId != '00GA0000000XxxNMAS'
#andStatusCondition#
LIMIT 100
"""
#Status = 'Waiting on Red Hat'
#Status != 'Closed'
#define Case {
# AccountId : null,
# Account_Number__c : null,
# CaseNumber : null,
# Collaboration_Score__c : null,
# Comment_Count__c : null,
# CreatedDate : null,
# Created_By__c : null,
# FTS_Role__c : null,
# FTS__c : null,
# Last_Breach__c : null,
# PrivateCommentCount__c : null,
# PublicCommentCount__c : null,
# SBT__c : null,
# Severity__c : null,
# Status : null,
# Internal_status__c : null,
# Strategic__c : null,
# SBR_Group__c : null,
# Tags__c : null
#}
TaskRules.noolsDefs = """
// The ExistingTask represents a minimal fetch of all existing tasks, this allows rules such as 'If no prior NNO task,
// create one'
//define ExistingTask {
// bid: null,
// taskOp: null,
// resourceOp: null,
// owner: null
//}
define Task {
_id: null,
bid: null,
type: null,
score: 0,
locked: false,
timeout: -1,
sbrs: [],
tags: [],
owner: null,
closed: null,
type: null,
taskOp: null,
resourceOp: null,
state: 'new',
'case': {
AccountId : null,
Account_Number__c : null,
CaseNumber : null,
Collaboration_Score__c : null,
Comment_Count__c : null,
CreatedDate : null,
Created_By__c : null,
FTS_Role__c : null,
FTS__c : null,
Last_Breach__c : null,
PrivateCommentCount__c : null,
PublicCommentCount__c : null,
SBT__c : null,
Severity__c : null,
Status : null,
Internal_status__c : null,
Strategic__c : null,
SBR_Group__c : null,
Tags__c : null
},
owner: {
"fullName" : null,
"email" : null,
"sso" : null,
"gss" : null,
"superRegion" : null,
"timezone" : null,
"firstName" : null,
"<NAME>" : null,
"aliasName" : null,
"kerberos" : null,
"salesforce" : null,
"isManager" : null,
"active" : null,
"created" : null,
"lastLogin" : null,
"lastModified" : null,
"outOfOffice" : null,
"id" : null
}
}
"""
# INFO -- do not use prettyjson.render inside of the nools, it silently gives issues
TaskRules.nools = """
// This is the most basic of all rules, it says if there is no real task associated with an Unassigned case, create
// One and set the appropriate states -- In this one situation there is no question there is a single resulting task
// Thus we can retract that task once created.
rule "noop task/unassigned case" {
when {
t : Task t.taskOp == TaskOpEnum.NOOP.name && t.case.Internal_Status__c == 'Unassigned';
// Make sure there is no prior existing task created already
not(et: Task et.taskOp != TaskOpEnum.NOOP.name && et.bid == t.bid && et.case.Internal_Status__c == 'Unassigned');
}
then {
logger.warn('Found unmanaged task: ' + t.bid + ', setting the task to NNO.');
modify(t, function() {
this.taskOp = TaskOpEnum.OWN_TASK.name;
this.resourceOp = EntityOpEnum.OWN.name;
});
//logger.warn('Sending task to be saved: ' + t.bid);
retract(t);
return saveRuleTask(t);
}
}
// New Waiting on Collab case without any prior associated Task
rule "noop task/collab case" {
when {
t : Task t.taskOp == TaskOpEnum.NOOP.name && t.case.Internal_Status__c == 'Waiting on Collaboration';
// Make sure there is no prior task created for this Waiting on Collaboration task
not(et: Task et.taskOp != TaskOpEnum.NOOP.name && et.bid == t.bid && et.resourceOp == EntityOpEnum.COLLAB.name);
}
then {
modify(t, function(){
this.taskOp = TaskOpEnum.OWN_TASK.name;
this.resourceOp = EntityOpEnum.COLLAB.name;
});
retract(t);
return saveRuleTask(t);
}
}
// New Waiting on Collab case with an associated Task
rule "noop task/collab case w/exiting task" {
when {
t : Task t.taskOp == TaskOpEnum.NOOP.name && t.case.Internal_Status__c == 'Waiting on Collaboration';
// If there is an existing task that matches this noop task, retract both
et: Task et.taskOp != TaskOpEnum.NOOP.name && et.bid == t.bid && et.resourceOp == EntityOpEnum.COLLAB.name;
}
then {
retract(t);
retract(et);
}
}
rule "noop task/default" {
when {
t : Task t.taskOp == TaskOpEnum.NOOP.name;
// Make sure there is no prior existing task created already
not(et: Task et.taskOp != TaskOpEnum.NOOP.name && et.bid == t.bid);
}
then {
//logger.warn('DEFAULT: Found unmanaged task: ' + t.bid + ', setting the task to NNO.');
//modify(t, function(){
// this.taskOp = TaskOpEnum.OWN_TASK.name;
// this.resourceOp = EntityOpEnum.OWN.name;
//});
retract(t);
return saveRuleTask(t);
}
}
""";
#TaskRules.saveRuleTaskCb = (t, cb) ->
# logger.debug "saveRuleTask: Creating task [cb]: " + t.bid
# model = new MongoOperations['models']['task'] t
# model.save (err) ->
# if err then cb(err) else cb(null)
TaskRules.generateSaveTasksPromises = (tasks) ->
promises = []
# Potentially consider https://groups.google.com/forum/#!topic/mongoose-orm/IkPmvcd0kds if this ever proves to be
# too slow. For 10-20k inserts, probably won't be too slow. Anything more, might want to bulk insert
_.each tasks, (t) ->
#logger.debug "Made task: #{prettyjson.render t}"
promises.push new MongoOperations['models']['task'](t).saveQ()
promises
# This is for task discovery, to insert a minimal look at all existing tasks for the rules
TaskRules.getExistingTasks = () ->
MongoOperations['models']['task']
.find()
.where('state').ne(TaskStateEnum.CLOSED)
#.select('bid taskOp resourceOp owner')
.execQ()
# Make Mongoose Schema Tasks out of regular js objects
TaskRules.getTasks = (tasks) ->
_.map tasks, (t) ->
new MongoOperations['models']['task'](t)
TaskRules.saveTasks = (tasks) ->
deferred = Q.defer()
Q.all(@generateSaveTasksPromises(tasks))
.then(->
deferred.resolve()
, (err) ->
deferred.reject(err)
)
deferred.promise
#TaskRules.makeTaskFromRule = (t) ->
# _id: t['_id']
# bid: t['bid']
# score: t['score']
# timeout: t['score']
# sbrs: t['score']
# tags: t['score']
# owner: t['score']
# completed: t['score']
# type: t['score']
# taskOp: t['score']
# resourceOp: t['score']
# state: t['score']
# 'case': t['case']
# A simple update to Mongo with the case meta data. The input is the noop task which will have the very latest
# case data always
# TODO -- the decision here to make is should I always update all existing tasks prior to assert? I would think so
# This would simplify things and make the rules less error prone
TaskRules.updateTasksWithCaseMetadata = (t) ->
MongoOperations['models']['task'].where()
.setOptions({multi: true})
.update({'bid': c['CaseNumber']}, @taskFromCaseUpdateHash(t, t['case']))
.exec()
TaskRules.divineTasks = (cases) ->
deferred = Q.defer()
# Remove these attributes from SF and do other transforms
_.each cases, (c) ->
delete c['attributes']
c['SBR_Group__c'] = TaskRules.parseSfArray(c['SBR_Group__c'])
c['Tags__c'] = TaskRules.parseSfArray(c['Tags__c'])
# Output hash will contain the case and task
#outputHash = _.object(_.map(cases, (x) -> [x['CaseNumber'], {'case': x, 'task': undefined}]))
outputHash = _.object(_.map(cases, (x) ->
[x['CaseNumber'], {'case': x, 'task': undefined}]))
# grab a list of existing tasks in the db based on the fetched case numbers
caseNumbers = _.chain(cases).pluck('CaseNumber').value()
MongoOperations['models']['task']
.find()
.where('bid')
.in(caseNumbers)
#.select('bid')
.execQ()
.done((tasks) ->
# TODO -- Ultimately here I want to convert anything seen to NOOP tasks, this will be much simpler to handle, then
# just re-assert all existing un-closed tasks to the rules every time, This will give the ultimate flexibility I believe
# These represent existing tasks
existingCaseNumbers = _.chain(tasks).pluck('bid').unique().value()
# Find all new cases that are not tasks by rejecting all the cases that overlap with existing tasks
newCases = _.chain(cases).reject((c) ->
_.contains(existingCaseNumbers, c['CaseNumber'])).value()
# Make new tasks
newTasks = _.map(newCases, (c) ->
TaskRules.makeTaskFromCase(c))
logger.debug "Discovered #{newTasks.length} new tasks"
# Existing tasks
existingTasks = _.chain(tasks).filter((t) ->
_.contains(existingCaseNumbers, t['bid'])).value()
logger.debug "Discovered #{existingTasks.length} existing tasks"
# Update existing Tasks, the updates are only to case meta data at this point, nothing else
taskPromises = []
_.each existingTasks, (t) ->
c = outputHash[t['bid']]['case']
updateHash = TaskRules.taskFromCaseUpdateHash(t, c)
# Update the in-memory task
_.assign t, updateHash
# TODO -- should also specify task state != closed/completed/abandoned
# taskPromises.push MongoOperations['models']['task'].where().setOptions({multi: true}).update({'bid': c['CaseNumber']}, updateHash).exec()
# TODO -- make this change and haven't retested the rules yet
taskPromises.push TaskRules.updateTaskFromCase(t, c)
noopTasks = TaskRules.getTasks newTasks
#deferred.resolve noopTasks
# The below logic is for saving those new tasks, not sure this is really what we want -- let's just flatten out
# the existing tasks for now
# The taskPromises will be comprised of all task updates and task inserts
# taskPromises = _.chain([taskPromises, TaskRules.generateSaveTasksPromises(newTasks)]).flatten().value()
taskPromises = _.chain(taskPromises).value()
# In the chain the noopTasks are undefined, gotta figure out what's up
Q.allSettled(taskPromises)
.then(->
d = Q.defer()
logger.debug "Completed all task promises, re-fetching the tasks from mongo"
# Now re-fetch all the tasks from Mongo so we know we absolutely have the latest consistent versions
# TODO -- Attempt to promisify this again
MongoOperations['models']['task']
.where('bid')
.in(existingCaseNumbers)
.exec (err, results) ->
logger.debug "re-fetched #{results.length} results"
if err? then d.reject err else d.resolve results
d.promise
)
.then((results) ->
output = _.chain([results, noopTasks]).flatten().value()
logger.debug "Resolving the main deferred with a total of #{results.length} existing tasks, and #{noopTasks.length} noop tasks"
deferred.resolve output
)
.fail((err) ->
logger.error err.stack)
.done()
, (err) ->
deferred.reject err
)
deferred.promise
TaskRules.initFlow = () ->
@beginFire = 0
@endFire = 0
@flow = nools.compile @noolsDefs + @nools,
name: 'helloFlow'
scope:
logger: logger
TaskOpEnum: TaskOpEnum
EntityOpEnum: EntityOpEnum
saveRuleTask: TaskRules.saveRuleTask
saveRuleTaskCb: TaskRules.saveRuleTaskCb
#updateTasksWithCaseMetadata: TaskRules.updateTasksWithCaseMetadata
prettyjson: prettyjson
@assertCalls = 0
@fireCalls = 0
TaskRules.printSimple = (op, fact) ->
logger.debug "#{op}: " + prettyjson.render
bid: fact['bid']
taskOp: fact['taskOp']
resourceOp: fact['resourceOp']
TaskRules.initSession = (debug = false) ->
@session = @flow.getSession()
@session.on "assert", (fact) ->
TaskRules.assertCalls += 1
if debug is true then TaskRules.printSimple('assert', fact)
@session.on "retract", (fact) ->
if debug is true then TaskRules.printSimple('retract', fact)
@session.on "modify", (fact) ->
if debug is true then TaskRules.printSimple('modify', fact)
@session.on "fire", (name, rule) ->
if debug is true then logger.debug "fired: #{name}"
TaskRules.fireCalls += 1
TaskRules.executeTest = () ->
#Case = flow.getDefined("Case")
Task = @flow.getDefined("Task")
# TODO -- I do need to fetch != 'Closed' as well, for now though, testing with WoRH is sufficient
#soql = soql.replace /#andStatusCondition#/, " AND Status != 'Closed'"
soql = @soql.replace /#andStatusCondition#/, " AND Status = 'Waiting on Red Hat'"
Q.nfcall(salesforce.querySf, {'soql': soql})
.then((cases) ->
TaskRules.divineTasks(cases)
)
.then((tasks) ->
logger.debug "Completed persisting the tasks"
#_.each obj['cases'], (x) ->
# delete x['attributes']
# c = new Case(x)
# session.assert c
_.each tasks, (x) ->
t = new Task(x)
TaskRules.session.assert t
beginFire = +moment()
TaskRules.session.match().then(() ->
logger.info "Done, assert calls: #{TaskRules.assertCalls}, fire calls: #{TaskRules.fireCalls}"
endFire = +moment()
dur = ((endFire - beginFire) / 1000).toFixed(0)
logger.info "Completed firing rules in #{dur}s"
TaskRules.session.dispose()
process.exit(0)
, (err) ->
logger.error err.stack
)
)
.done()
module.exports = TaskRules
if require.main is module
MongoOperations.init({mongoDebug: true})
db = mongoose['connection']
db.on 'error', logger.error.bind(logger, 'connection error:')
db.once 'open', () ->
MongoOperations.defineCollections()
TaskRules.initFlow()
TaskRules.initSession(false)
MongoOperations.reset().done(->
TaskRules.executeTest()
, (err) ->
logger.error err.stack
)
#TaskRules.executeTest()
| true | nools = require 'nools'
logger = require('tracer').colorConsole()
prettyjson = require 'prettyjson'
salesforce = require '../db/salesforce'
Q = require 'q'
#DbOperations = require '../db/dbOperations'
MongoOperations = require '../db/MongoOperations'
TaskStateEnum = require './enums/TaskStateEnum'
TaskTypeEnum = require './enums/TaskTypeEnum'
TaskOpEnum = require './enums/TaskOpEnum'
EntityOpEnum = require './enums/ResourceOpEnum'
_ = require 'lodash'
moment = require 'moment'
mongoose = require 'mongoose'
mongooseQ = require('mongoose-q')(mongoose)
#MongoClient = require('mongodb').MongoClient
#Server = require('mongodb').Server
# TODO remove -- This is deprecated, leaving it a bit for reference
TaskRules = {}
#OwnerId != '{spam_queue_id}'
#AND SBR_Group__c includes ({sbr_groups})
# Spam id = 00GA0000000XxxNMAS
#AND SBR_Group__c includes ('Kernel')
TaskRules.soql = """
SELECT
AccountId,
Account_Number__c,
CaseNumber,
Collaboration_Score__c,
Comment_Count__c,
CreatedDate,
Created_By__c,
FTS_Role__c,
FTS__c,
Last_Breach__c,
PrivateCommentCount__c,
PublicCommentCount__c,
SBT__c,
SBR_Group__c,
Severity__c,
Status,
Internal_Status__c,
Strategic__c,
Tags__c
FROM
Case
WHERE
OwnerId != '00GA0000000XxxNMAS'
#andStatusCondition#
LIMIT 100
"""
#Status = 'Waiting on Red Hat'
#Status != 'Closed'
#define Case {
# AccountId : null,
# Account_Number__c : null,
# CaseNumber : null,
# Collaboration_Score__c : null,
# Comment_Count__c : null,
# CreatedDate : null,
# Created_By__c : null,
# FTS_Role__c : null,
# FTS__c : null,
# Last_Breach__c : null,
# PrivateCommentCount__c : null,
# PublicCommentCount__c : null,
# SBT__c : null,
# Severity__c : null,
# Status : null,
# Internal_status__c : null,
# Strategic__c : null,
# SBR_Group__c : null,
# Tags__c : null
#}
TaskRules.noolsDefs = """
// The ExistingTask represents a minimal fetch of all existing tasks, this allows rules such as 'If no prior NNO task,
// create one'
//define ExistingTask {
// bid: null,
// taskOp: null,
// resourceOp: null,
// owner: null
//}
define Task {
_id: null,
bid: null,
type: null,
score: 0,
locked: false,
timeout: -1,
sbrs: [],
tags: [],
owner: null,
closed: null,
type: null,
taskOp: null,
resourceOp: null,
state: 'new',
'case': {
AccountId : null,
Account_Number__c : null,
CaseNumber : null,
Collaboration_Score__c : null,
Comment_Count__c : null,
CreatedDate : null,
Created_By__c : null,
FTS_Role__c : null,
FTS__c : null,
Last_Breach__c : null,
PrivateCommentCount__c : null,
PublicCommentCount__c : null,
SBT__c : null,
Severity__c : null,
Status : null,
Internal_status__c : null,
Strategic__c : null,
SBR_Group__c : null,
Tags__c : null
},
owner: {
"fullName" : null,
"email" : null,
"sso" : null,
"gss" : null,
"superRegion" : null,
"timezone" : null,
"firstName" : null,
"PI:NAME:<NAME>END_PI" : null,
"aliasName" : null,
"kerberos" : null,
"salesforce" : null,
"isManager" : null,
"active" : null,
"created" : null,
"lastLogin" : null,
"lastModified" : null,
"outOfOffice" : null,
"id" : null
}
}
"""
# INFO -- do not use prettyjson.render inside of the nools, it silently gives issues
TaskRules.nools = """
// This is the most basic of all rules, it says if there is no real task associated with an Unassigned case, create
// One and set the appropriate states -- In this one situation there is no question there is a single resulting task
// Thus we can retract that task once created.
rule "noop task/unassigned case" {
when {
t : Task t.taskOp == TaskOpEnum.NOOP.name && t.case.Internal_Status__c == 'Unassigned';
// Make sure there is no prior existing task created already
not(et: Task et.taskOp != TaskOpEnum.NOOP.name && et.bid == t.bid && et.case.Internal_Status__c == 'Unassigned');
}
then {
logger.warn('Found unmanaged task: ' + t.bid + ', setting the task to NNO.');
modify(t, function() {
this.taskOp = TaskOpEnum.OWN_TASK.name;
this.resourceOp = EntityOpEnum.OWN.name;
});
//logger.warn('Sending task to be saved: ' + t.bid);
retract(t);
return saveRuleTask(t);
}
}
// New Waiting on Collab case without any prior associated Task
rule "noop task/collab case" {
when {
t : Task t.taskOp == TaskOpEnum.NOOP.name && t.case.Internal_Status__c == 'Waiting on Collaboration';
// Make sure there is no prior task created for this Waiting on Collaboration task
not(et: Task et.taskOp != TaskOpEnum.NOOP.name && et.bid == t.bid && et.resourceOp == EntityOpEnum.COLLAB.name);
}
then {
modify(t, function(){
this.taskOp = TaskOpEnum.OWN_TASK.name;
this.resourceOp = EntityOpEnum.COLLAB.name;
});
retract(t);
return saveRuleTask(t);
}
}
// New Waiting on Collab case with an associated Task
rule "noop task/collab case w/exiting task" {
when {
t : Task t.taskOp == TaskOpEnum.NOOP.name && t.case.Internal_Status__c == 'Waiting on Collaboration';
// If there is an existing task that matches this noop task, retract both
et: Task et.taskOp != TaskOpEnum.NOOP.name && et.bid == t.bid && et.resourceOp == EntityOpEnum.COLLAB.name;
}
then {
retract(t);
retract(et);
}
}
rule "noop task/default" {
when {
t : Task t.taskOp == TaskOpEnum.NOOP.name;
// Make sure there is no prior existing task created already
not(et: Task et.taskOp != TaskOpEnum.NOOP.name && et.bid == t.bid);
}
then {
//logger.warn('DEFAULT: Found unmanaged task: ' + t.bid + ', setting the task to NNO.');
//modify(t, function(){
// this.taskOp = TaskOpEnum.OWN_TASK.name;
// this.resourceOp = EntityOpEnum.OWN.name;
//});
retract(t);
return saveRuleTask(t);
}
}
""";
#TaskRules.saveRuleTaskCb = (t, cb) ->
# logger.debug "saveRuleTask: Creating task [cb]: " + t.bid
# model = new MongoOperations['models']['task'] t
# model.save (err) ->
# if err then cb(err) else cb(null)
TaskRules.generateSaveTasksPromises = (tasks) ->
promises = []
# Potentially consider https://groups.google.com/forum/#!topic/mongoose-orm/IkPmvcd0kds if this ever proves to be
# too slow. For 10-20k inserts, probably won't be too slow. Anything more, might want to bulk insert
_.each tasks, (t) ->
#logger.debug "Made task: #{prettyjson.render t}"
promises.push new MongoOperations['models']['task'](t).saveQ()
promises
# This is for task discovery, to insert a minimal look at all existing tasks for the rules
TaskRules.getExistingTasks = () ->
MongoOperations['models']['task']
.find()
.where('state').ne(TaskStateEnum.CLOSED)
#.select('bid taskOp resourceOp owner')
.execQ()
# Make Mongoose Schema Tasks out of regular js objects
TaskRules.getTasks = (tasks) ->
_.map tasks, (t) ->
new MongoOperations['models']['task'](t)
TaskRules.saveTasks = (tasks) ->
deferred = Q.defer()
Q.all(@generateSaveTasksPromises(tasks))
.then(->
deferred.resolve()
, (err) ->
deferred.reject(err)
)
deferred.promise
#TaskRules.makeTaskFromRule = (t) ->
# _id: t['_id']
# bid: t['bid']
# score: t['score']
# timeout: t['score']
# sbrs: t['score']
# tags: t['score']
# owner: t['score']
# completed: t['score']
# type: t['score']
# taskOp: t['score']
# resourceOp: t['score']
# state: t['score']
# 'case': t['case']
# A simple update to Mongo with the case meta data. The input is the noop task which will have the very latest
# case data always
# TODO -- the decision here to make is should I always update all existing tasks prior to assert? I would think so
# This would simplify things and make the rules less error prone
TaskRules.updateTasksWithCaseMetadata = (t) ->
MongoOperations['models']['task'].where()
.setOptions({multi: true})
.update({'bid': c['CaseNumber']}, @taskFromCaseUpdateHash(t, t['case']))
.exec()
TaskRules.divineTasks = (cases) ->
deferred = Q.defer()
# Remove these attributes from SF and do other transforms
_.each cases, (c) ->
delete c['attributes']
c['SBR_Group__c'] = TaskRules.parseSfArray(c['SBR_Group__c'])
c['Tags__c'] = TaskRules.parseSfArray(c['Tags__c'])
# Output hash will contain the case and task
#outputHash = _.object(_.map(cases, (x) -> [x['CaseNumber'], {'case': x, 'task': undefined}]))
outputHash = _.object(_.map(cases, (x) ->
[x['CaseNumber'], {'case': x, 'task': undefined}]))
# grab a list of existing tasks in the db based on the fetched case numbers
caseNumbers = _.chain(cases).pluck('CaseNumber').value()
MongoOperations['models']['task']
.find()
.where('bid')
.in(caseNumbers)
#.select('bid')
.execQ()
.done((tasks) ->
# TODO -- Ultimately here I want to convert anything seen to NOOP tasks, this will be much simpler to handle, then
# just re-assert all existing un-closed tasks to the rules every time, This will give the ultimate flexibility I believe
# These represent existing tasks
existingCaseNumbers = _.chain(tasks).pluck('bid').unique().value()
# Find all new cases that are not tasks by rejecting all the cases that overlap with existing tasks
newCases = _.chain(cases).reject((c) ->
_.contains(existingCaseNumbers, c['CaseNumber'])).value()
# Make new tasks
newTasks = _.map(newCases, (c) ->
TaskRules.makeTaskFromCase(c))
logger.debug "Discovered #{newTasks.length} new tasks"
# Existing tasks
existingTasks = _.chain(tasks).filter((t) ->
_.contains(existingCaseNumbers, t['bid'])).value()
logger.debug "Discovered #{existingTasks.length} existing tasks"
# Update existing Tasks, the updates are only to case meta data at this point, nothing else
taskPromises = []
_.each existingTasks, (t) ->
c = outputHash[t['bid']]['case']
updateHash = TaskRules.taskFromCaseUpdateHash(t, c)
# Update the in-memory task
_.assign t, updateHash
# TODO -- should also specify task state != closed/completed/abandoned
# taskPromises.push MongoOperations['models']['task'].where().setOptions({multi: true}).update({'bid': c['CaseNumber']}, updateHash).exec()
# TODO -- make this change and haven't retested the rules yet
taskPromises.push TaskRules.updateTaskFromCase(t, c)
noopTasks = TaskRules.getTasks newTasks
#deferred.resolve noopTasks
# The below logic is for saving those new tasks, not sure this is really what we want -- let's just flatten out
# the existing tasks for now
# The taskPromises will be comprised of all task updates and task inserts
# taskPromises = _.chain([taskPromises, TaskRules.generateSaveTasksPromises(newTasks)]).flatten().value()
taskPromises = _.chain(taskPromises).value()
# In the chain the noopTasks are undefined, gotta figure out what's up
Q.allSettled(taskPromises)
.then(->
d = Q.defer()
logger.debug "Completed all task promises, re-fetching the tasks from mongo"
# Now re-fetch all the tasks from Mongo so we know we absolutely have the latest consistent versions
# TODO -- Attempt to promisify this again
MongoOperations['models']['task']
.where('bid')
.in(existingCaseNumbers)
.exec (err, results) ->
logger.debug "re-fetched #{results.length} results"
if err? then d.reject err else d.resolve results
d.promise
)
.then((results) ->
output = _.chain([results, noopTasks]).flatten().value()
logger.debug "Resolving the main deferred with a total of #{results.length} existing tasks, and #{noopTasks.length} noop tasks"
deferred.resolve output
)
.fail((err) ->
logger.error err.stack)
.done()
, (err) ->
deferred.reject err
)
deferred.promise
TaskRules.initFlow = () ->
@beginFire = 0
@endFire = 0
@flow = nools.compile @noolsDefs + @nools,
name: 'helloFlow'
scope:
logger: logger
TaskOpEnum: TaskOpEnum
EntityOpEnum: EntityOpEnum
saveRuleTask: TaskRules.saveRuleTask
saveRuleTaskCb: TaskRules.saveRuleTaskCb
#updateTasksWithCaseMetadata: TaskRules.updateTasksWithCaseMetadata
prettyjson: prettyjson
@assertCalls = 0
@fireCalls = 0
TaskRules.printSimple = (op, fact) ->
logger.debug "#{op}: " + prettyjson.render
bid: fact['bid']
taskOp: fact['taskOp']
resourceOp: fact['resourceOp']
TaskRules.initSession = (debug = false) ->
@session = @flow.getSession()
@session.on "assert", (fact) ->
TaskRules.assertCalls += 1
if debug is true then TaskRules.printSimple('assert', fact)
@session.on "retract", (fact) ->
if debug is true then TaskRules.printSimple('retract', fact)
@session.on "modify", (fact) ->
if debug is true then TaskRules.printSimple('modify', fact)
@session.on "fire", (name, rule) ->
if debug is true then logger.debug "fired: #{name}"
TaskRules.fireCalls += 1
TaskRules.executeTest = () ->
#Case = flow.getDefined("Case")
Task = @flow.getDefined("Task")
# TODO -- I do need to fetch != 'Closed' as well, for now though, testing with WoRH is sufficient
#soql = soql.replace /#andStatusCondition#/, " AND Status != 'Closed'"
soql = @soql.replace /#andStatusCondition#/, " AND Status = 'Waiting on Red Hat'"
Q.nfcall(salesforce.querySf, {'soql': soql})
.then((cases) ->
TaskRules.divineTasks(cases)
)
.then((tasks) ->
logger.debug "Completed persisting the tasks"
#_.each obj['cases'], (x) ->
# delete x['attributes']
# c = new Case(x)
# session.assert c
_.each tasks, (x) ->
t = new Task(x)
TaskRules.session.assert t
beginFire = +moment()
TaskRules.session.match().then(() ->
logger.info "Done, assert calls: #{TaskRules.assertCalls}, fire calls: #{TaskRules.fireCalls}"
endFire = +moment()
dur = ((endFire - beginFire) / 1000).toFixed(0)
logger.info "Completed firing rules in #{dur}s"
TaskRules.session.dispose()
process.exit(0)
, (err) ->
logger.error err.stack
)
)
.done()
module.exports = TaskRules
if require.main is module
MongoOperations.init({mongoDebug: true})
db = mongoose['connection']
db.on 'error', logger.error.bind(logger, 'connection error:')
db.once 'open', () ->
MongoOperations.defineCollections()
TaskRules.initFlow()
TaskRules.initSession(false)
MongoOperations.reset().done(->
TaskRules.executeTest()
, (err) ->
logger.error err.stack
)
#TaskRules.executeTest()
|
[
{
"context": "plating function\n\n# Simple JavaScript Templating\n# John Resig - http://ejohn.org/ - MIT Licensed\ntmplCache = {}",
"end": 1744,
"score": 0.9995851516723633,
"start": 1734,
"tag": "NAME",
"value": "John Resig"
}
] | src/utils.coffee | Pranshumehta/hem | 20 | path = require('path')
fs = require('fs-extra')
utils = {}
# check for windows :o(...
isWin = !!require('os').platform().match(/^win/)
utils.flatten = flatten = (array, results = []) ->
for item in array
if Array.isArray(item)
flatten(item, results)
else if item
results.push(item)
results
utils.arrayToString = (value) ->
if Array.isArray(value)
result = ""
for line in value
result += line + "\n"
result
else
value
utils.removeDuplicateValues = (array) ->
newArray = []
for value in array
if value not in newArray
newArray.push(value)
newArray
utils.toArray = (value = []) ->
if Array.isArray(value) then value else [value]
utils.startsWith = (str, value) ->
str?.slice(0, value.length) is value
utils.endsWith = (str, value) ->
str?.slice(-value.length) is value
utils.extend = extend = (a, b) ->
for x of b
if typeof b[x] is 'object' and not Array.isArray(b[x])
a[x] or= {}
extend(a[x], b[x])
else
a[x] = b[x]
return a
utils.loadAsset = (asset) ->
require("../assets/" + asset)
utils.copyFile = (from, to) ->
# make sure target files exists
fs.createFileSync(to)
# constants
BUF_LENGTH = 64 * 1024
_buff = new Buffer(BUF_LENGTH)
# perform copy
fdr = fs.openSync(from, 'r')
fdw = fs.openSync(to, 'w')
bytesRead = 1
pos = 0
while bytesRead > 0
bytesRead = fs.readSync(fdr, _buff, 0, BUF_LENGTH, pos)
fs.writeSync(fdw, _buff, 0, bytesRead)
pos += bytesRead
fs.closeSync(fdr)
fs.closeSync(fdw)
utils.isDirectory = (dir) ->
try
stats = fs.lstatSync(dir)
stats.isDirectory()
catch e
false
# ------ Simple templating function
# Simple JavaScript Templating
# John Resig - http://ejohn.org/ - MIT Licensed
tmplCache = {};
utils.tmpl = (str, data) ->
# Figure out if we're getting a template, or if we need to
# load the template - and be sure to cache the result.
if not /[\t\r\n% ]/.test(str)
if tmplCache[str]
fn = tmplCache[str]
else
# load file
template = utils.loadAsset("#{str}.tmpl")
fn = utils.tmpl(template)
else
# Convert the template into pure JavaScript
str = str
.split("'").join("\\'")
.split("\n").join("\\n")
.replace(/<%([\s\S]*?)%>/mg, (m, t) -> '<%' + t.split("\\'").join("'").split("\\n").join("\n") + '%>')
.replace(/<%=(.+?)%>/g, "',$1,'")
.split("<%").join("');")
.split("%>").join("p.push('")
# Generate a reusable function that will serve as a template
fn = new Function("obj",
"""
var p=[]
var print = function(){ p.push.apply(p,arguments); };
with(obj){
p.push('#{str}');
}
return p.join('');
"""
)
# Provide some basic currying to the user
return data and fn( data ) or fn;
# ------ Formatting urls and folder paths
clean = (values, sep, trimStart = false) ->
result = ""
for value in values when value
result = result + sep + value
# clean duplicate sep
regexp = new RegExp "#{sep}+","g"
result = result.replace(regexp, sep)
# trim the starting path sep if there is one
if trimStart and utils.startsWith(result, sep)
result = result.slice(sep.length)
# make sure doesn't end in sep
if utils.endsWith(result, sep)
result = result.slice(0, -sep.length)
result
utils.cleanPath = (paths...) ->
result = clean(paths, path.sep, true)
# deal with windows paths :o(...
if isWin or true
cleanPath = new RegExp /\//g
result = result.replace(cleanPath, path.sep)
result
utils.cleanRoute = (routes...) ->
clean(routes, "/")
module.exports = utils
| 76637 | path = require('path')
fs = require('fs-extra')
utils = {}
# check for windows :o(...
isWin = !!require('os').platform().match(/^win/)
utils.flatten = flatten = (array, results = []) ->
for item in array
if Array.isArray(item)
flatten(item, results)
else if item
results.push(item)
results
utils.arrayToString = (value) ->
if Array.isArray(value)
result = ""
for line in value
result += line + "\n"
result
else
value
utils.removeDuplicateValues = (array) ->
newArray = []
for value in array
if value not in newArray
newArray.push(value)
newArray
utils.toArray = (value = []) ->
if Array.isArray(value) then value else [value]
utils.startsWith = (str, value) ->
str?.slice(0, value.length) is value
utils.endsWith = (str, value) ->
str?.slice(-value.length) is value
utils.extend = extend = (a, b) ->
for x of b
if typeof b[x] is 'object' and not Array.isArray(b[x])
a[x] or= {}
extend(a[x], b[x])
else
a[x] = b[x]
return a
utils.loadAsset = (asset) ->
require("../assets/" + asset)
utils.copyFile = (from, to) ->
# make sure target files exists
fs.createFileSync(to)
# constants
BUF_LENGTH = 64 * 1024
_buff = new Buffer(BUF_LENGTH)
# perform copy
fdr = fs.openSync(from, 'r')
fdw = fs.openSync(to, 'w')
bytesRead = 1
pos = 0
while bytesRead > 0
bytesRead = fs.readSync(fdr, _buff, 0, BUF_LENGTH, pos)
fs.writeSync(fdw, _buff, 0, bytesRead)
pos += bytesRead
fs.closeSync(fdr)
fs.closeSync(fdw)
utils.isDirectory = (dir) ->
try
stats = fs.lstatSync(dir)
stats.isDirectory()
catch e
false
# ------ Simple templating function
# Simple JavaScript Templating
# <NAME> - http://ejohn.org/ - MIT Licensed
tmplCache = {};
utils.tmpl = (str, data) ->
# Figure out if we're getting a template, or if we need to
# load the template - and be sure to cache the result.
if not /[\t\r\n% ]/.test(str)
if tmplCache[str]
fn = tmplCache[str]
else
# load file
template = utils.loadAsset("#{str}.tmpl")
fn = utils.tmpl(template)
else
# Convert the template into pure JavaScript
str = str
.split("'").join("\\'")
.split("\n").join("\\n")
.replace(/<%([\s\S]*?)%>/mg, (m, t) -> '<%' + t.split("\\'").join("'").split("\\n").join("\n") + '%>')
.replace(/<%=(.+?)%>/g, "',$1,'")
.split("<%").join("');")
.split("%>").join("p.push('")
# Generate a reusable function that will serve as a template
fn = new Function("obj",
"""
var p=[]
var print = function(){ p.push.apply(p,arguments); };
with(obj){
p.push('#{str}');
}
return p.join('');
"""
)
# Provide some basic currying to the user
return data and fn( data ) or fn;
# ------ Formatting urls and folder paths
clean = (values, sep, trimStart = false) ->
result = ""
for value in values when value
result = result + sep + value
# clean duplicate sep
regexp = new RegExp "#{sep}+","g"
result = result.replace(regexp, sep)
# trim the starting path sep if there is one
if trimStart and utils.startsWith(result, sep)
result = result.slice(sep.length)
# make sure doesn't end in sep
if utils.endsWith(result, sep)
result = result.slice(0, -sep.length)
result
utils.cleanPath = (paths...) ->
result = clean(paths, path.sep, true)
# deal with windows paths :o(...
if isWin or true
cleanPath = new RegExp /\//g
result = result.replace(cleanPath, path.sep)
result
utils.cleanRoute = (routes...) ->
clean(routes, "/")
module.exports = utils
| true | path = require('path')
fs = require('fs-extra')
utils = {}
# check for windows :o(...
isWin = !!require('os').platform().match(/^win/)
utils.flatten = flatten = (array, results = []) ->
for item in array
if Array.isArray(item)
flatten(item, results)
else if item
results.push(item)
results
utils.arrayToString = (value) ->
if Array.isArray(value)
result = ""
for line in value
result += line + "\n"
result
else
value
utils.removeDuplicateValues = (array) ->
newArray = []
for value in array
if value not in newArray
newArray.push(value)
newArray
utils.toArray = (value = []) ->
if Array.isArray(value) then value else [value]
utils.startsWith = (str, value) ->
str?.slice(0, value.length) is value
utils.endsWith = (str, value) ->
str?.slice(-value.length) is value
utils.extend = extend = (a, b) ->
for x of b
if typeof b[x] is 'object' and not Array.isArray(b[x])
a[x] or= {}
extend(a[x], b[x])
else
a[x] = b[x]
return a
utils.loadAsset = (asset) ->
require("../assets/" + asset)
utils.copyFile = (from, to) ->
# make sure target files exists
fs.createFileSync(to)
# constants
BUF_LENGTH = 64 * 1024
_buff = new Buffer(BUF_LENGTH)
# perform copy
fdr = fs.openSync(from, 'r')
fdw = fs.openSync(to, 'w')
bytesRead = 1
pos = 0
while bytesRead > 0
bytesRead = fs.readSync(fdr, _buff, 0, BUF_LENGTH, pos)
fs.writeSync(fdw, _buff, 0, bytesRead)
pos += bytesRead
fs.closeSync(fdr)
fs.closeSync(fdw)
utils.isDirectory = (dir) ->
try
stats = fs.lstatSync(dir)
stats.isDirectory()
catch e
false
# ------ Simple templating function
# Simple JavaScript Templating
# PI:NAME:<NAME>END_PI - http://ejohn.org/ - MIT Licensed
tmplCache = {};
utils.tmpl = (str, data) ->
# Figure out if we're getting a template, or if we need to
# load the template - and be sure to cache the result.
if not /[\t\r\n% ]/.test(str)
if tmplCache[str]
fn = tmplCache[str]
else
# load file
template = utils.loadAsset("#{str}.tmpl")
fn = utils.tmpl(template)
else
# Convert the template into pure JavaScript
str = str
.split("'").join("\\'")
.split("\n").join("\\n")
.replace(/<%([\s\S]*?)%>/mg, (m, t) -> '<%' + t.split("\\'").join("'").split("\\n").join("\n") + '%>')
.replace(/<%=(.+?)%>/g, "',$1,'")
.split("<%").join("');")
.split("%>").join("p.push('")
# Generate a reusable function that will serve as a template
fn = new Function("obj",
"""
var p=[]
var print = function(){ p.push.apply(p,arguments); };
with(obj){
p.push('#{str}');
}
return p.join('');
"""
)
# Provide some basic currying to the user
return data and fn( data ) or fn;
# ------ Formatting urls and folder paths
clean = (values, sep, trimStart = false) ->
result = ""
for value in values when value
result = result + sep + value
# clean duplicate sep
regexp = new RegExp "#{sep}+","g"
result = result.replace(regexp, sep)
# trim the starting path sep if there is one
if trimStart and utils.startsWith(result, sep)
result = result.slice(sep.length)
# make sure doesn't end in sep
if utils.endsWith(result, sep)
result = result.slice(0, -sep.length)
result
utils.cleanPath = (paths...) ->
result = clean(paths, path.sep, true)
# deal with windows paths :o(...
if isWin or true
cleanPath = new RegExp /\//g
result = result.replace(cleanPath, path.sep)
result
utils.cleanRoute = (routes...) ->
clean(routes, "/")
module.exports = utils
|
[
{
"context": "###\n# Copyright jtlebi.fr <admin@jtlebi.fr> and other contributors.",
"end": 17,
"score": 0.8892978429794312,
"start": 16,
"tag": "EMAIL",
"value": "j"
},
{
"context": "###\n# Copyright jtlebi.fr <admin@jtlebi.fr> and other contributors.\n#\n# Per",
"end": 25,
... | static/js/app/app.coffee | yadutaf/Weathermap-archive | 1 | ###
# Copyright jtlebi.fr <admin@jtlebi.fr> and other contributors.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the
# following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
# NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
# USE OR OTHER DEALINGS IN THE SOFTWARE.
###
WM = Em.Application.create()
baseurl = "/wm-api"
basedir = "/wm/js/app"
###
Utils
###
keys = (obj) ->
key for key, value of obj
Array.prototype.last = ->
@[@length-1]
###
Models/Controllers
###
ListController = Em.ArrayController.extend {
default: 'single'#may be 'single' or 'first'
value: '' #active value. *MUST* be valid (ie: in options array)
candidate: '' #a value we will try to apply asa it becomes valid, if ever
options: [] #list of valid options
_timeout: null #auto-update timer
wish: (value) ->
if value in @get 'options'
@set 'value', value
@set 'candidate', ''
else
@set 'value', ''
@set 'candidate', value
load: (parentChanged)->
#clear auto-update timer
clearTimeout @_timeout if @_timeout
@_timeout = null
#reset data before update
value = @get 'value'
if value
@set 'candidate', value
if parentChanged
@set 'value', ''
@set 'options', []
_autoSelect: (() ->
options = @get 'options'
value = @get 'value'
def = @get 'default'
#if we have no options
if options.length is 0
if value.length
@set 'candidate', value
@set 'value', ''
return
#if we have a selected value
if value in options
return
if value.length is 0 and @get('candidate') in options
@set 'value', @get 'candidate'
@set 'candidate', ""
return
#if we have only one value in single mode, auto-select it
if options.length is 1 and def is 'single'
@set 'value', options[0]
@set 'candidate', ""
#if we have more than 1 value in 'first' mode, auto-select it
else if options.length >= 1 and def is 'first'
@set 'value', options[0]
@set 'candidate', ''
else#keep value in candidate until a manual selection is operated
@set 'candidate', value
@set 'value', ''
).observes('options', 'default')
}
createListController = (name, parentName, init) ->
if 'Object' is typeof parentName
init = parentName
init = init || {}
controller = if parentName
ListController.extend {
parentValueBinding: Em.Binding.oneWay 'WM.'+parentName+'s.value'
parentUrlBinding: Em.Binding.oneWay 'WM.'+parentName+'s.databaseurl'
databaseurl: (->
@get('parentUrl')+"/"+@get('parentValue')
).property('parentValue', 'parentUrl')
load: ((parentChanged, cb)->
@_super(parentChanged)
parentValue = @get 'parentValue'
if parentValue
$.getJSON @get('databaseurl')+"/"+name+"s", (data)->
cb(data)
)
}
else
ListController.extend {
databaseurl: baseurl
load: ((parentChanged) ->
@_super(parentChanged)
$.getJSON @get('databaseurl')+"/"+name+"s", (data) =>
if not data.compareArrays(@get 'options')
@set 'options', data
@_timeout = setTimeout (=> @load(false)), 60*60*1000#1 hour
)
}
controller.create(init)
WM.groups = createListController "group"
WM.maps = createListController "map", "group", {
load: ((parentChanged)->
@_super true, (data) =>
if not data.compareArrays(@get 'options')
@set 'options', data
@_timeout = setTimeout (=> @load(false)), 60*60*1000#1 hour
)
_load: (-> @load true).observes 'parentValue'
}
WM.dates = createListController "date", "map", {
default: 'first'
load: ((parentChanged)->
@_super true, (data) =>
data.sort()
data.reverse()
if not data.compareArrays(@get 'options')
@set 'options', data
@_timeout = setTimeout (=> @load(false)), 10*60*1000#10 min
)
_load: (-> @load true).observes 'parentValue'
}
WM.times = createListController "time", "date", {
default: 'first'
cache: {}
selected: (->
@get('cache')[@get 'value']
).property 'value'
load: ((parentChanged)->
@_super parentChanged, (data) =>
k = keys data
k.sort()
k.reverse()
if not k.compareArrays(@get 'options')
@set 'options', k
@set 'cache', data
@_timeout = setTimeout (=> @load(false)), 60*1000#1 min
)
_load: (-> @load true).observes 'parentValue'
}
###
Views
###
# Main view. Nothing special here. Mostly a "hook"
WM.main = Em.View.create {
templateName: 'main'
}
MainMenuView = Em.View.extend {
defaultTitle: 'Dropdown'
templateName: 'menu-list'
active: false
title: (->
value = @get 'value'
if value then value else @get 'defaultTitle'
).property 'value'
select: (e) ->
$(e.target).parents('.open').removeClass('open')
@set 'value', e.context
return false
}
createMenu = (name, activeRule, ext) ->
menu = MainMenuView.extend {
valueBinding: 'WM.'+name+'s.value'
optionsBinding: Em.Binding.oneWay 'WM.'+name+'s.options'
}
if activeRule == true
ext.active = true
else if activeRule.length
ext.active = (->
return if @get(activeRule) and @get(activeRule).length then true else false
).property activeRule
menu.create ext
WM.GroupListView = createMenu 'group', true, {
defaultTitle: 'Group name'
}
WM.MapListView = createMenu 'map', 'group', {
groupBinding: Em.Binding.oneWay 'WM.groups.value'
defaultTitle: 'Map name'
}
WM.DateListView = createMenu 'date', 'map', {
groupBinding: Em.Binding.oneWay 'WM.groups.value'
mapBinding: Em.Binding.oneWay 'WM.maps.value'
defaultTitle: 'Date'
}
WM.TimeListView = createMenu 'time', 'date', {
groupBinding: Em.Binding.oneWay 'WM.groups.value'
mapBinding: Em.Binding.oneWay 'WM.maps.value'
dateBinding: Em.Binding.oneWay 'WM.dates.value'
defaultTitle: 'Time'
}
###
init
###
$ ->
#application re-start
$.ajaxSetup { headers: {'accept-version': "~0.2"}}
WM.groups.load()
#load views
WM.main.appendTo 'body'
WM.GroupListView.appendTo '#list-menu'
WM.MapListView.appendTo '#list-menu'
WM.DateListView.appendTo '#list-menu'
WM.TimeListView.appendTo '#list-menu'
| 39056 | ###
# Copyright <EMAIL>tlebi.fr <<EMAIL>> and other contributors.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the
# following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
# NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
# USE OR OTHER DEALINGS IN THE SOFTWARE.
###
WM = Em.Application.create()
baseurl = "/wm-api"
basedir = "/wm/js/app"
###
Utils
###
keys = (obj) ->
key for key, value of obj
Array.prototype.last = ->
@[@length-1]
###
Models/Controllers
###
ListController = Em.ArrayController.extend {
default: 'single'#may be 'single' or 'first'
value: '' #active value. *MUST* be valid (ie: in options array)
candidate: '' #a value we will try to apply asa it becomes valid, if ever
options: [] #list of valid options
_timeout: null #auto-update timer
wish: (value) ->
if value in @get 'options'
@set 'value', value
@set 'candidate', ''
else
@set 'value', ''
@set 'candidate', value
load: (parentChanged)->
#clear auto-update timer
clearTimeout @_timeout if @_timeout
@_timeout = null
#reset data before update
value = @get 'value'
if value
@set 'candidate', value
if parentChanged
@set 'value', ''
@set 'options', []
_autoSelect: (() ->
options = @get 'options'
value = @get 'value'
def = @get 'default'
#if we have no options
if options.length is 0
if value.length
@set 'candidate', value
@set 'value', ''
return
#if we have a selected value
if value in options
return
if value.length is 0 and @get('candidate') in options
@set 'value', @get 'candidate'
@set 'candidate', ""
return
#if we have only one value in single mode, auto-select it
if options.length is 1 and def is 'single'
@set 'value', options[0]
@set 'candidate', ""
#if we have more than 1 value in 'first' mode, auto-select it
else if options.length >= 1 and def is 'first'
@set 'value', options[0]
@set 'candidate', ''
else#keep value in candidate until a manual selection is operated
@set 'candidate', value
@set 'value', ''
).observes('options', 'default')
}
createListController = (name, parentName, init) ->
if 'Object' is typeof parentName
init = parentName
init = init || {}
controller = if parentName
ListController.extend {
parentValueBinding: Em.Binding.oneWay 'WM.'+parentName+'s.value'
parentUrlBinding: Em.Binding.oneWay 'WM.'+parentName+'s.databaseurl'
databaseurl: (->
@get('parentUrl')+"/"+@get('parentValue')
).property('parentValue', 'parentUrl')
load: ((parentChanged, cb)->
@_super(parentChanged)
parentValue = @get 'parentValue'
if parentValue
$.getJSON @get('databaseurl')+"/"+name+"s", (data)->
cb(data)
)
}
else
ListController.extend {
databaseurl: baseurl
load: ((parentChanged) ->
@_super(parentChanged)
$.getJSON @get('databaseurl')+"/"+name+"s", (data) =>
if not data.compareArrays(@get 'options')
@set 'options', data
@_timeout = setTimeout (=> @load(false)), 60*60*1000#1 hour
)
}
controller.create(init)
WM.groups = createListController "group"
WM.maps = createListController "map", "group", {
load: ((parentChanged)->
@_super true, (data) =>
if not data.compareArrays(@get 'options')
@set 'options', data
@_timeout = setTimeout (=> @load(false)), 60*60*1000#1 hour
)
_load: (-> @load true).observes 'parentValue'
}
WM.dates = createListController "date", "map", {
default: 'first'
load: ((parentChanged)->
@_super true, (data) =>
data.sort()
data.reverse()
if not data.compareArrays(@get 'options')
@set 'options', data
@_timeout = setTimeout (=> @load(false)), 10*60*1000#10 min
)
_load: (-> @load true).observes 'parentValue'
}
WM.times = createListController "time", "date", {
default: 'first'
cache: {}
selected: (->
@get('cache')[@get 'value']
).property 'value'
load: ((parentChanged)->
@_super parentChanged, (data) =>
k = keys data
k.sort()
k.reverse()
if not k.compareArrays(@get 'options')
@set 'options', k
@set 'cache', data
@_timeout = setTimeout (=> @load(false)), 60*1000#1 min
)
_load: (-> @load true).observes 'parentValue'
}
###
Views
###
# Main view. Nothing special here. Mostly a "hook"
WM.main = Em.View.create {
templateName: 'main'
}
MainMenuView = Em.View.extend {
defaultTitle: 'Dropdown'
templateName: 'menu-list'
active: false
title: (->
value = @get 'value'
if value then value else @get 'defaultTitle'
).property 'value'
select: (e) ->
$(e.target).parents('.open').removeClass('open')
@set 'value', e.context
return false
}
createMenu = (name, activeRule, ext) ->
menu = MainMenuView.extend {
valueBinding: 'WM.'+name+'s.value'
optionsBinding: Em.Binding.oneWay 'WM.'+name+'s.options'
}
if activeRule == true
ext.active = true
else if activeRule.length
ext.active = (->
return if @get(activeRule) and @get(activeRule).length then true else false
).property activeRule
menu.create ext
WM.GroupListView = createMenu 'group', true, {
defaultTitle: 'Group name'
}
WM.MapListView = createMenu 'map', 'group', {
groupBinding: Em.Binding.oneWay 'WM.groups.value'
defaultTitle: 'Map name'
}
WM.DateListView = createMenu 'date', 'map', {
groupBinding: Em.Binding.oneWay 'WM.groups.value'
mapBinding: Em.Binding.oneWay 'WM.maps.value'
defaultTitle: 'Date'
}
WM.TimeListView = createMenu 'time', 'date', {
groupBinding: Em.Binding.oneWay 'WM.groups.value'
mapBinding: Em.Binding.oneWay 'WM.maps.value'
dateBinding: Em.Binding.oneWay 'WM.dates.value'
defaultTitle: 'Time'
}
###
init
###
$ ->
#application re-start
$.ajaxSetup { headers: {'accept-version': "~0.2"}}
WM.groups.load()
#load views
WM.main.appendTo 'body'
WM.GroupListView.appendTo '#list-menu'
WM.MapListView.appendTo '#list-menu'
WM.DateListView.appendTo '#list-menu'
WM.TimeListView.appendTo '#list-menu'
| true | ###
# Copyright PI:EMAIL:<EMAIL>END_PItlebi.fr <PI:EMAIL:<EMAIL>END_PI> and other contributors.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the
# following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
# NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
# USE OR OTHER DEALINGS IN THE SOFTWARE.
###
WM = Em.Application.create()
baseurl = "/wm-api"
basedir = "/wm/js/app"
###
Utils
###
keys = (obj) ->
key for key, value of obj
Array.prototype.last = ->
@[@length-1]
###
Models/Controllers
###
ListController = Em.ArrayController.extend {
default: 'single'#may be 'single' or 'first'
value: '' #active value. *MUST* be valid (ie: in options array)
candidate: '' #a value we will try to apply asa it becomes valid, if ever
options: [] #list of valid options
_timeout: null #auto-update timer
wish: (value) ->
if value in @get 'options'
@set 'value', value
@set 'candidate', ''
else
@set 'value', ''
@set 'candidate', value
load: (parentChanged)->
#clear auto-update timer
clearTimeout @_timeout if @_timeout
@_timeout = null
#reset data before update
value = @get 'value'
if value
@set 'candidate', value
if parentChanged
@set 'value', ''
@set 'options', []
_autoSelect: (() ->
options = @get 'options'
value = @get 'value'
def = @get 'default'
#if we have no options
if options.length is 0
if value.length
@set 'candidate', value
@set 'value', ''
return
#if we have a selected value
if value in options
return
if value.length is 0 and @get('candidate') in options
@set 'value', @get 'candidate'
@set 'candidate', ""
return
#if we have only one value in single mode, auto-select it
if options.length is 1 and def is 'single'
@set 'value', options[0]
@set 'candidate', ""
#if we have more than 1 value in 'first' mode, auto-select it
else if options.length >= 1 and def is 'first'
@set 'value', options[0]
@set 'candidate', ''
else#keep value in candidate until a manual selection is operated
@set 'candidate', value
@set 'value', ''
).observes('options', 'default')
}
createListController = (name, parentName, init) ->
if 'Object' is typeof parentName
init = parentName
init = init || {}
controller = if parentName
ListController.extend {
parentValueBinding: Em.Binding.oneWay 'WM.'+parentName+'s.value'
parentUrlBinding: Em.Binding.oneWay 'WM.'+parentName+'s.databaseurl'
databaseurl: (->
@get('parentUrl')+"/"+@get('parentValue')
).property('parentValue', 'parentUrl')
load: ((parentChanged, cb)->
@_super(parentChanged)
parentValue = @get 'parentValue'
if parentValue
$.getJSON @get('databaseurl')+"/"+name+"s", (data)->
cb(data)
)
}
else
ListController.extend {
databaseurl: baseurl
load: ((parentChanged) ->
@_super(parentChanged)
$.getJSON @get('databaseurl')+"/"+name+"s", (data) =>
if not data.compareArrays(@get 'options')
@set 'options', data
@_timeout = setTimeout (=> @load(false)), 60*60*1000#1 hour
)
}
controller.create(init)
WM.groups = createListController "group"
WM.maps = createListController "map", "group", {
load: ((parentChanged)->
@_super true, (data) =>
if not data.compareArrays(@get 'options')
@set 'options', data
@_timeout = setTimeout (=> @load(false)), 60*60*1000#1 hour
)
_load: (-> @load true).observes 'parentValue'
}
WM.dates = createListController "date", "map", {
default: 'first'
load: ((parentChanged)->
@_super true, (data) =>
data.sort()
data.reverse()
if not data.compareArrays(@get 'options')
@set 'options', data
@_timeout = setTimeout (=> @load(false)), 10*60*1000#10 min
)
_load: (-> @load true).observes 'parentValue'
}
WM.times = createListController "time", "date", {
default: 'first'
cache: {}
selected: (->
@get('cache')[@get 'value']
).property 'value'
load: ((parentChanged)->
@_super parentChanged, (data) =>
k = keys data
k.sort()
k.reverse()
if not k.compareArrays(@get 'options')
@set 'options', k
@set 'cache', data
@_timeout = setTimeout (=> @load(false)), 60*1000#1 min
)
_load: (-> @load true).observes 'parentValue'
}
###
Views
###
# Main view. Nothing special here. Mostly a "hook"
WM.main = Em.View.create {
templateName: 'main'
}
MainMenuView = Em.View.extend {
defaultTitle: 'Dropdown'
templateName: 'menu-list'
active: false
title: (->
value = @get 'value'
if value then value else @get 'defaultTitle'
).property 'value'
select: (e) ->
$(e.target).parents('.open').removeClass('open')
@set 'value', e.context
return false
}
createMenu = (name, activeRule, ext) ->
menu = MainMenuView.extend {
valueBinding: 'WM.'+name+'s.value'
optionsBinding: Em.Binding.oneWay 'WM.'+name+'s.options'
}
if activeRule == true
ext.active = true
else if activeRule.length
ext.active = (->
return if @get(activeRule) and @get(activeRule).length then true else false
).property activeRule
menu.create ext
WM.GroupListView = createMenu 'group', true, {
defaultTitle: 'Group name'
}
WM.MapListView = createMenu 'map', 'group', {
groupBinding: Em.Binding.oneWay 'WM.groups.value'
defaultTitle: 'Map name'
}
WM.DateListView = createMenu 'date', 'map', {
groupBinding: Em.Binding.oneWay 'WM.groups.value'
mapBinding: Em.Binding.oneWay 'WM.maps.value'
defaultTitle: 'Date'
}
WM.TimeListView = createMenu 'time', 'date', {
groupBinding: Em.Binding.oneWay 'WM.groups.value'
mapBinding: Em.Binding.oneWay 'WM.maps.value'
dateBinding: Em.Binding.oneWay 'WM.dates.value'
defaultTitle: 'Time'
}
###
init
###
$ ->
#application re-start
$.ajaxSetup { headers: {'accept-version': "~0.2"}}
WM.groups.load()
#load views
WM.main.appendTo 'body'
WM.GroupListView.appendTo '#list-menu'
WM.MapListView.appendTo '#list-menu'
WM.DateListView.appendTo '#list-menu'
WM.TimeListView.appendTo '#list-menu'
|
[
{
"context": " context 'changing the name', ->\n name = 'xavier'\n beforeEach -> Simulate.change(getNameInput",
"end": 869,
"score": 0.8999570608139038,
"start": 863,
"tag": "NAME",
"value": "xavier"
},
{
"context": " props.is -> onSave: sinon.stub()\n name = 'xav... | test/react/components/saver_test.coffee | urmastalimaa/rx-visualize | 10 | require '../react_test_helper'
sinon = require 'sinon'
describe 'Saver', ->
props = memo().is -> {}
getOpenButton = modalCount = getNameInput = getDescriptionInput = getSaveButton = null
beforeEach ->
Saver = require assetPath + 'react/components/saver'
@saver = saver = render(React.createElement(Saver, props()))
getOpenButton = -> findByTag(saver, 'button')
modalCount = -> findModals().length
findModals = -> scryByClass(saver, 'modal-body')
getNameInput = -> findById(saver, 'saveNameInput')
getDescriptionInput = -> findById(saver, 'saveDescriptionInput')
getSaveButton = -> findById(saver, 'saveConfirm')
context 'clicking the button', ->
beforeEach ->
Simulate.click(getOpenButton())
it 'opens a modal', ->
expect(modalCount()).toEqual(1)
context 'changing the name', ->
name = 'xavier'
beforeEach -> Simulate.change(getNameInput(), target: {value: name})
it 'changes the name', ->
expect(getNameInput().getDOMNode().value).toEqual(name)
context 'changing the description', ->
description = 'good boy'
beforeEach -> Simulate.change(getDescriptionInput(), target: {value: description})
it 'changes the name', ->
expect(getDescriptionInput().getDOMNode().value).toEqual(description)
context 'changing name and description and clicking save', ->
props.is -> onSave: sinon.stub()
name = 'xavier'; description = 'good boy'
beforeEach ->
Simulate.change(getNameInput(), target: {value: name})
Simulate.change(getDescriptionInput(), target: {value: description})
Simulate.click(getSaveButton().getDOMNode())
it 'calls onSave', ->
sinon.assert.calledWith(props().onSave, name: name, description: description)
| 152152 | require '../react_test_helper'
sinon = require 'sinon'
describe 'Saver', ->
props = memo().is -> {}
getOpenButton = modalCount = getNameInput = getDescriptionInput = getSaveButton = null
beforeEach ->
Saver = require assetPath + 'react/components/saver'
@saver = saver = render(React.createElement(Saver, props()))
getOpenButton = -> findByTag(saver, 'button')
modalCount = -> findModals().length
findModals = -> scryByClass(saver, 'modal-body')
getNameInput = -> findById(saver, 'saveNameInput')
getDescriptionInput = -> findById(saver, 'saveDescriptionInput')
getSaveButton = -> findById(saver, 'saveConfirm')
context 'clicking the button', ->
beforeEach ->
Simulate.click(getOpenButton())
it 'opens a modal', ->
expect(modalCount()).toEqual(1)
context 'changing the name', ->
name = '<NAME>'
beforeEach -> Simulate.change(getNameInput(), target: {value: name})
it 'changes the name', ->
expect(getNameInput().getDOMNode().value).toEqual(name)
context 'changing the description', ->
description = 'good boy'
beforeEach -> Simulate.change(getDescriptionInput(), target: {value: description})
it 'changes the name', ->
expect(getDescriptionInput().getDOMNode().value).toEqual(description)
context 'changing name and description and clicking save', ->
props.is -> onSave: sinon.stub()
name = '<NAME>'; description = 'good boy'
beforeEach ->
Simulate.change(getNameInput(), target: {value: name})
Simulate.change(getDescriptionInput(), target: {value: description})
Simulate.click(getSaveButton().getDOMNode())
it 'calls onSave', ->
sinon.assert.calledWith(props().onSave, name: name, description: description)
| true | require '../react_test_helper'
sinon = require 'sinon'
describe 'Saver', ->
props = memo().is -> {}
getOpenButton = modalCount = getNameInput = getDescriptionInput = getSaveButton = null
beforeEach ->
Saver = require assetPath + 'react/components/saver'
@saver = saver = render(React.createElement(Saver, props()))
getOpenButton = -> findByTag(saver, 'button')
modalCount = -> findModals().length
findModals = -> scryByClass(saver, 'modal-body')
getNameInput = -> findById(saver, 'saveNameInput')
getDescriptionInput = -> findById(saver, 'saveDescriptionInput')
getSaveButton = -> findById(saver, 'saveConfirm')
context 'clicking the button', ->
beforeEach ->
Simulate.click(getOpenButton())
it 'opens a modal', ->
expect(modalCount()).toEqual(1)
context 'changing the name', ->
name = 'PI:NAME:<NAME>END_PI'
beforeEach -> Simulate.change(getNameInput(), target: {value: name})
it 'changes the name', ->
expect(getNameInput().getDOMNode().value).toEqual(name)
context 'changing the description', ->
description = 'good boy'
beforeEach -> Simulate.change(getDescriptionInput(), target: {value: description})
it 'changes the name', ->
expect(getDescriptionInput().getDOMNode().value).toEqual(description)
context 'changing name and description and clicking save', ->
props.is -> onSave: sinon.stub()
name = 'PI:NAME:<NAME>END_PI'; description = 'good boy'
beforeEach ->
Simulate.change(getNameInput(), target: {value: name})
Simulate.change(getDescriptionInput(), target: {value: description})
Simulate.click(getSaveButton().getDOMNode())
it 'calls onSave', ->
sinon.assert.calledWith(props().onSave, name: name, description: description)
|
[
{
"context": ")\n\n $scope.getStripeToken = () ->\n DEV_TOKEN=\"pk_096woK2npZnBc1cPETWmMsNmjod7e\"\n PROD_TOKEN=\"pk_096wf6Q6pH1974ZLUF8lZXTUh3Ceg",
"end": 500,
"score": 0.9824973344802856,
"start": 468,
"tag": "KEY",
"value": "pk_096woK2npZnBc1cPETWmMsNmjod7e"
},
{
"context":... | plugin-assets/custom/pricing.coffee | wp-plugins/anybackup | 0 | app.controller "PricingController", ($scope, $http, accountFactory) ->
$scope.loadPlans = ->
request = $http {
url: ajaxurl,
method: "GET",
params: {
action: "bits_backup_get_plans"
}
}
request.success (planData, status, headers, config) =>
accountFactory.getStatus (data) ->
$scope.plans = planData.plans
$scope.status = data
$scope.loadPlans()
$scope.getStripeToken = () ->
DEV_TOKEN="pk_096woK2npZnBc1cPETWmMsNmjod7e"
PROD_TOKEN="pk_096wf6Q6pH1974ZLUF8lZXTUh3Ceg"
if(window.location.href.match(/localhost/))
DEV_TOKEN
else
PROD_TOKEN
$scope.getPlan = (name) ->
(plan for plan in $scope.plans when plan.name is name)[0]
$scope.cancelAccount = ()->
plan_id = $scope.getPlan('free').id
if(confirm("Are you sure you want to cancel your account? This may stop backup of your sites."))
$scope.updateAccount(plan_id)
$scope.updateAccount = (plan_id, token_id=null)->
params = {
action: "bits_backup_update_account",
plan_id: plan_id,
token: token_id
}
request = $http {
url: ajaxurl,
method: "POST",
params:params
}
request.success (data, status, headers, config) =>
accountFactory.getStatus (data) ->
$scope.status = data
request.error (data, status, headers, config) =>
$scope.status = 500
$scope.openCheckout = (plan) ->
handler = StripeCheckout.configure
key: $scope.getStripeToken(),
image: 'https://anybackup.io/images/logo-512x512.png',
email: status.email,
token: (token) ->
$scope.updateAccount(plan.id, token.id)
handler.open
name: 'AnyBackup ' + plan.name,
description: plan.price+'/m',
amount: plan.price_in_cents
| 140192 | app.controller "PricingController", ($scope, $http, accountFactory) ->
$scope.loadPlans = ->
request = $http {
url: ajaxurl,
method: "GET",
params: {
action: "bits_backup_get_plans"
}
}
request.success (planData, status, headers, config) =>
accountFactory.getStatus (data) ->
$scope.plans = planData.plans
$scope.status = data
$scope.loadPlans()
$scope.getStripeToken = () ->
DEV_TOKEN="<KEY>"
PROD_TOKEN="<KEY>"
if(window.location.href.match(/localhost/))
DEV_TOKEN
else
PROD_TOKEN
$scope.getPlan = (name) ->
(plan for plan in $scope.plans when plan.name is name)[0]
$scope.cancelAccount = ()->
plan_id = $scope.getPlan('free').id
if(confirm("Are you sure you want to cancel your account? This may stop backup of your sites."))
$scope.updateAccount(plan_id)
$scope.updateAccount = (plan_id, token_id=null)->
params = {
action: "bits_backup_update_account",
plan_id: plan_id,
token: token_id
}
request = $http {
url: ajaxurl,
method: "POST",
params:params
}
request.success (data, status, headers, config) =>
accountFactory.getStatus (data) ->
$scope.status = data
request.error (data, status, headers, config) =>
$scope.status = 500
$scope.openCheckout = (plan) ->
handler = StripeCheckout.configure
key: $scope.getStripeToken(),
image: 'https://anybackup.io/images/logo-512x512.png',
email: status.email,
token: (token) ->
$scope.updateAccount(plan.id, token.id)
handler.open
name: 'AnyBackup ' + plan.name,
description: plan.price+'/m',
amount: plan.price_in_cents
| true | app.controller "PricingController", ($scope, $http, accountFactory) ->
$scope.loadPlans = ->
request = $http {
url: ajaxurl,
method: "GET",
params: {
action: "bits_backup_get_plans"
}
}
request.success (planData, status, headers, config) =>
accountFactory.getStatus (data) ->
$scope.plans = planData.plans
$scope.status = data
$scope.loadPlans()
$scope.getStripeToken = () ->
DEV_TOKEN="PI:KEY:<KEY>END_PI"
PROD_TOKEN="PI:KEY:<KEY>END_PI"
if(window.location.href.match(/localhost/))
DEV_TOKEN
else
PROD_TOKEN
$scope.getPlan = (name) ->
(plan for plan in $scope.plans when plan.name is name)[0]
$scope.cancelAccount = ()->
plan_id = $scope.getPlan('free').id
if(confirm("Are you sure you want to cancel your account? This may stop backup of your sites."))
$scope.updateAccount(plan_id)
$scope.updateAccount = (plan_id, token_id=null)->
params = {
action: "bits_backup_update_account",
plan_id: plan_id,
token: token_id
}
request = $http {
url: ajaxurl,
method: "POST",
params:params
}
request.success (data, status, headers, config) =>
accountFactory.getStatus (data) ->
$scope.status = data
request.error (data, status, headers, config) =>
$scope.status = 500
$scope.openCheckout = (plan) ->
handler = StripeCheckout.configure
key: $scope.getStripeToken(),
image: 'https://anybackup.io/images/logo-512x512.png',
email: status.email,
token: (token) ->
$scope.updateAccount(plan.id, token.id)
handler.open
name: 'AnyBackup ' + plan.name,
description: plan.price+'/m',
amount: plan.price_in_cents
|
[
{
"context": "host\"\r\nusername = process.env.AMQP_USERNAME ? \"guest\"\r\npassword = process.env.AMQP_PASSWORD ? \"gues",
"end": 521,
"score": 0.6066981554031372,
"start": 516,
"tag": "USERNAME",
"value": "guest"
},
{
"context": "uest\"\r\npassword = process.env.AMQP_PASSW... | test/index.coffee | HoS0/HoSApi | 0 | http = require 'http'
Promise = require 'bluebird'
bodyParser = require 'body-parser'
crypto = require 'crypto'
HosCom = require 'hos-com'
HoSAuth = require 'hos-auth'
generalContract = require './serviceContract'
request = require('supertest')
hosApi = require '../index'
express = require('express')
cors = require 'cors'
amqpurl = process.env.AMQP_URL ? "localhost"
username = process.env.AMQP_USERNAME ? "guest"
password = process.env.AMQP_PASSWORD ? "guest"
@serviceCon = JSON.parse(JSON.stringify(generalContract))
@serviceCon.serviceDoc.basePath = "/serviceTest#{crypto.randomBytes(4).toString('hex')}"
@serviceDist = new HosCom @serviceCon, amqpurl, username, password
@hosAuth = new HoSAuth(amqpurl, username, password)
promises = []
promises.push @hosAuth.connect()
promises.push @serviceDist.connect()
Promise.all(promises).then ()=>
@hosAuth.on 'message', (msg)=>
msg.accept()
hosApi.init(true, 'localhost:8091').then ()=>
@serviceDist.on '/users.post', (msg)=>
msg.reply(msg.content)
app = express()
app.set 'port', 8091
app.use cors()
app.use bodyParser.json()
app.use hosApi.swaggerMetadata
app.use hosApi.swaggerValidator
app.use hosApi.swaggerUi
app.use hosApi.middleware
http.createServer(app).listen app.get('port'), () ->
console.log 'Express server listening on port ' + app.get 'port'
| 113116 | http = require 'http'
Promise = require 'bluebird'
bodyParser = require 'body-parser'
crypto = require 'crypto'
HosCom = require 'hos-com'
HoSAuth = require 'hos-auth'
generalContract = require './serviceContract'
request = require('supertest')
hosApi = require '../index'
express = require('express')
cors = require 'cors'
amqpurl = process.env.AMQP_URL ? "localhost"
username = process.env.AMQP_USERNAME ? "guest"
password = process.env.AMQP_PASSWORD ? "<PASSWORD>"
@serviceCon = JSON.parse(JSON.stringify(generalContract))
@serviceCon.serviceDoc.basePath = "/serviceTest#{crypto.randomBytes(4).toString('hex')}"
@serviceDist = new HosCom @serviceCon, amqpurl, username, password
@hosAuth = new HoSAuth(amqpurl, username, password)
promises = []
promises.push @hosAuth.connect()
promises.push @serviceDist.connect()
Promise.all(promises).then ()=>
@hosAuth.on 'message', (msg)=>
msg.accept()
hosApi.init(true, 'localhost:8091').then ()=>
@serviceDist.on '/users.post', (msg)=>
msg.reply(msg.content)
app = express()
app.set 'port', 8091
app.use cors()
app.use bodyParser.json()
app.use hosApi.swaggerMetadata
app.use hosApi.swaggerValidator
app.use hosApi.swaggerUi
app.use hosApi.middleware
http.createServer(app).listen app.get('port'), () ->
console.log 'Express server listening on port ' + app.get 'port'
| true | http = require 'http'
Promise = require 'bluebird'
bodyParser = require 'body-parser'
crypto = require 'crypto'
HosCom = require 'hos-com'
HoSAuth = require 'hos-auth'
generalContract = require './serviceContract'
request = require('supertest')
hosApi = require '../index'
express = require('express')
cors = require 'cors'
amqpurl = process.env.AMQP_URL ? "localhost"
username = process.env.AMQP_USERNAME ? "guest"
password = process.env.AMQP_PASSWORD ? "PI:PASSWORD:<PASSWORD>END_PI"
@serviceCon = JSON.parse(JSON.stringify(generalContract))
@serviceCon.serviceDoc.basePath = "/serviceTest#{crypto.randomBytes(4).toString('hex')}"
@serviceDist = new HosCom @serviceCon, amqpurl, username, password
@hosAuth = new HoSAuth(amqpurl, username, password)
promises = []
promises.push @hosAuth.connect()
promises.push @serviceDist.connect()
Promise.all(promises).then ()=>
@hosAuth.on 'message', (msg)=>
msg.accept()
hosApi.init(true, 'localhost:8091').then ()=>
@serviceDist.on '/users.post', (msg)=>
msg.reply(msg.content)
app = express()
app.set 'port', 8091
app.use cors()
app.use bodyParser.json()
app.use hosApi.swaggerMetadata
app.use hosApi.swaggerValidator
app.use hosApi.swaggerUi
app.use hosApi.middleware
http.createServer(app).listen app.get('port'), () ->
console.log 'Express server listening on port ' + app.get 'port'
|
[
{
"context": "\n elements:\n 'form': 'form'\n '.password': 'password'\n\n events:\n 'submit form': 'submit'\n 'chan",
"end": 494,
"score": 0.9987154006958008,
"start": 486,
"tag": "PASSWORD",
"value": "password"
},
{
"context": " @pusher = pubnub = PUBNUB(\n subscr... | app/controllers/user_edit.coffee | burgalon/boorgle-spine | 0 | Spine = require('spine')
BasePanel = require('./base_panel')
Authorization = require('authorization')
# Model
MyUser = require('models/my_user')
# Models below needed for Pusher events
FoundFriend = require('models/found_friend')
Friend = require('models/friend')
# Controllers
UsersShow = require('controllers/users_show')
Settings = require('controllers/settings')
class Login extends BasePanel
title: 'Log In'
tab: 'account'
elements:
'form': 'form'
'.password': 'password'
events:
'submit form': 'submit'
'change input': 'checkValidity'
'keyup input': 'checkValidity'
'tap .forgot-password': 'forgotPassword'
className: 'users editView login'
constructor: ->
super
@addButton('Cancel', -> @navigate '/please_login', trans: 'left')
@doneButton = @addButton('Log In', @submit).addClass('right blue')
@render()
render: =>
@html require('views/users/login')
@checkValidity()
submit: (e) ->
# return @log 'UserEditForm.submit - invalid form' if @doneButton.attr('disabled')
e.preventDefault()
return @alert 'Please fill form' unless @form[0].checkValidity()
basic_auth = @form.serializeArray().map((el) -> el.value)
basic_auth = basic_auth.join ':'
basic_auth = Base64.encode basic_auth
Authorization.ajax(
data: "commit=true&client_id=#{Config.clientId}&response_type=token&redirect_uri=#{Config.oauthRedirectUri}",
headers: {"Authorization": "Basic #{basic_auth}"}
url: Config.oauthEndpoint + 'authorize.json',
type: 'POST',
contentType: 'application/x-www-form-urlencoded; charset=UTF-8'
).done( (data) =>
Authorization.saveToken(data.access_token)
Spine.trigger 'login'
# When signing up, cancel button is available to go back to the /please_login
# but once email is entered we are in 'steps' mode that cannot be canceled
@cancelButton.remove() if @cancelButton
).fail ( (xhr) =>
@log 'login fail', arguments
if xhr.status is 401
msg = "Could not find user/password"
else
msg = "Network Error: #{xhr.statusText} (#{xhr.status}). #{xhr.responseText}"
@alert msg
)
alert: (msg) ->
@hideKeyboard()
Spine.trigger 'notify', msg: msg, class: 'warn'
checkValidity: ->
if @form[0].checkValidity()
@doneButton.removeClass 'disabled'
else
@doneButton.addClass 'disabled'
forgotPassword: ->
window.open Config.host.replace('/api/v1', '') + '/accounts/password/new'
class UserEditForm extends BasePanel
title: 'Info'
tab: 'account'
elements:
'form': 'form'
events:
'submit form': 'submit'
'change input': 'checkValidity'
'keyup input': 'checkValidity'
'focus .input-phone': 'onFocusPhone'
'blur .input-phone': 'onBlurPhone'
'change .input-phone': 'onChangePhone'
'keyup .input-phone': 'onChangePhone'
className: 'users editView'
isSteps: true
constructor: ->
super
@item = new MyUser()
MyUser.bind('refresh change', @change)
@doneButton = @addButton('Done', @submit).addClass('right blue')
@cancelButton = @addButton('Cancel', @back) unless Authorization.is_loggedin()
# When activating tab, render the view in order to revert any canceled former editing
@active => @render()
@render()
render: =>
return unless @isActive()
if !@item.email
@html require('views/users/form_email')(@item)
else if !@item.first_name || !@item.last_name
@html require('views/users/form_name')(@item)
else if !@item.phones.length
@html require('views/users/form_phone')(@item)
else if !@item.zipcode
@html require('views/users/form_address')(@item)
else
Spine.trigger 'login' if @isSteps
@isSteps = false
@html require('views/users/form')(@item)
if @isSteps
@log 'Focusing', $($('input', @form))
$($('input', @form)[0]).focus()
@doneButton.text('Next')
@checkValidity()
submit: (e) ->
e.preventDefault()
unless @form[0].checkValidity()
el = $($('input:invalid', @form)[0])
el.focus()
@alert el.attr('placeholder')
return false
Authorization.ajax(
data: @form.serialize() + "&client_id=#{Config.clientId}&response_type=token&redirect_uri=#{Config.oauthRedirectUri}",
url: MyUser.url(),
type: if @item.isNew() then 'POST' else 'PUT',
contentType: 'application/x-www-form-urlencoded; charset=UTF-8'
).done( (data) =>
$('body').removeClass('loggedout') unless @item.validate()
# Implies signup
if data.access_token
Authorization.saveToken(data.access_token)
@navigate(if @isSteps then '/user/edit' else '/found_friends')
else
@navigate(if @isSteps then '/user/edit' else '/user/edit/show')
MyUser.refresh(if data.user then data.user else data)
).fail ( (data) =>
@log 'Failed submitting form'
)
alert: (msg) ->
@hideKeyboard()
Spine.trigger 'notify', msg: msg, class: 'warn'
checkValidity: ->
if @form[0].checkValidity()
@doneButton.removeClass 'disabled'
else
@doneButton.addClass 'disabled'
onFocusPhone: (e) ->
input = $(e.currentTarget)
unless input.val()
input.val('+1')
onBlurPhone: (e) ->
input = $(e.currentTarget)
if input.val().length<3
input.val('')
onChangePhone: (e) ->
input = $(e.currentTarget)
console.log 'onChangePhone'
val = input.val()
unless val[0]=='+'
Spine.trigger 'notify', msg: 'Add country code e.g: +1-212-....', class: 'warn'
input.val('+') unless val.length
back: ->
if @item.id
@navigate('/user/edit/show', trans: 'left')
else
@navigate('/please_login', trans: 'left')
change: =>
@item = MyUser.first()
setTimeout(@setupPusher, 3000)
@render()
if @item.validate()
@cancelButton.remove() if @cancelButton
else
@cancelButton = @addButton('Cancel', @back)
setupPusher: =>
return if @pusher
@startDate = new Date()
Spine.bind 'appResumed', ->
@startDate = new Date()
# Disable pusher on dev mode for now
# return if Config.env=='development'
@pusher = pubnub = PUBNUB(
subscribe_key: 'sub-c84a4505-ef49-11e1-b9bf-9fa4c00b78db',
ssl: false,
origin: 'pubsub.pubnub.com'
)
pubnub.subscribe(
restore: true,
channel: "user_"+@item.id,
callback : (message) ->
console.log("pusher message: "+ message.cmd)
return if (new Date(message.created_at)) < @startDate
console.log("executing message")
eval(message.cmd)
disconnect : ->
console.log("Connection Lost")
)
class MyUserShow extends UsersShow
title: 'Info'
tab: 'account'
@configure MyUser
add_buttons: ->
@addButton('Edit', @back)
@addButton('Settings', -> @navigate '/settings', trans: 'left').addClass('right')
back: ->
@navigate '/user/edit', trans: 'right'
change: (params) =>
super(id: 'my-user')
class UserEdit extends Spine.Controller
constructor: ->
super
@form = new UserEditForm
@show = new MyUserShow
@login = new Login
@settings = new Settings
@routes
'/user/edit/show': (params) -> @show.active(params)
'/user/edit': (params) -> @form.active(params)
'/user/login': (params) -> @login.active(params)
'/settings': (params) -> @settings.active(params)
module.exports = UserEdit | 53345 | Spine = require('spine')
BasePanel = require('./base_panel')
Authorization = require('authorization')
# Model
MyUser = require('models/my_user')
# Models below needed for Pusher events
FoundFriend = require('models/found_friend')
Friend = require('models/friend')
# Controllers
UsersShow = require('controllers/users_show')
Settings = require('controllers/settings')
class Login extends BasePanel
title: 'Log In'
tab: 'account'
elements:
'form': 'form'
'.password': '<PASSWORD>'
events:
'submit form': 'submit'
'change input': 'checkValidity'
'keyup input': 'checkValidity'
'tap .forgot-password': 'forgotPassword'
className: 'users editView login'
constructor: ->
super
@addButton('Cancel', -> @navigate '/please_login', trans: 'left')
@doneButton = @addButton('Log In', @submit).addClass('right blue')
@render()
render: =>
@html require('views/users/login')
@checkValidity()
submit: (e) ->
# return @log 'UserEditForm.submit - invalid form' if @doneButton.attr('disabled')
e.preventDefault()
return @alert 'Please fill form' unless @form[0].checkValidity()
basic_auth = @form.serializeArray().map((el) -> el.value)
basic_auth = basic_auth.join ':'
basic_auth = Base64.encode basic_auth
Authorization.ajax(
data: "commit=true&client_id=#{Config.clientId}&response_type=token&redirect_uri=#{Config.oauthRedirectUri}",
headers: {"Authorization": "Basic #{basic_auth}"}
url: Config.oauthEndpoint + 'authorize.json',
type: 'POST',
contentType: 'application/x-www-form-urlencoded; charset=UTF-8'
).done( (data) =>
Authorization.saveToken(data.access_token)
Spine.trigger 'login'
# When signing up, cancel button is available to go back to the /please_login
# but once email is entered we are in 'steps' mode that cannot be canceled
@cancelButton.remove() if @cancelButton
).fail ( (xhr) =>
@log 'login fail', arguments
if xhr.status is 401
msg = "Could not find user/password"
else
msg = "Network Error: #{xhr.statusText} (#{xhr.status}). #{xhr.responseText}"
@alert msg
)
alert: (msg) ->
@hideKeyboard()
Spine.trigger 'notify', msg: msg, class: 'warn'
checkValidity: ->
if @form[0].checkValidity()
@doneButton.removeClass 'disabled'
else
@doneButton.addClass 'disabled'
forgotPassword: ->
window.open Config.host.replace('/api/v1', '') + '/accounts/password/new'
class UserEditForm extends BasePanel
title: 'Info'
tab: 'account'
elements:
'form': 'form'
events:
'submit form': 'submit'
'change input': 'checkValidity'
'keyup input': 'checkValidity'
'focus .input-phone': 'onFocusPhone'
'blur .input-phone': 'onBlurPhone'
'change .input-phone': 'onChangePhone'
'keyup .input-phone': 'onChangePhone'
className: 'users editView'
isSteps: true
constructor: ->
super
@item = new MyUser()
MyUser.bind('refresh change', @change)
@doneButton = @addButton('Done', @submit).addClass('right blue')
@cancelButton = @addButton('Cancel', @back) unless Authorization.is_loggedin()
# When activating tab, render the view in order to revert any canceled former editing
@active => @render()
@render()
render: =>
return unless @isActive()
if !@item.email
@html require('views/users/form_email')(@item)
else if !@item.first_name || !@item.last_name
@html require('views/users/form_name')(@item)
else if !@item.phones.length
@html require('views/users/form_phone')(@item)
else if !@item.zipcode
@html require('views/users/form_address')(@item)
else
Spine.trigger 'login' if @isSteps
@isSteps = false
@html require('views/users/form')(@item)
if @isSteps
@log 'Focusing', $($('input', @form))
$($('input', @form)[0]).focus()
@doneButton.text('Next')
@checkValidity()
submit: (e) ->
e.preventDefault()
unless @form[0].checkValidity()
el = $($('input:invalid', @form)[0])
el.focus()
@alert el.attr('placeholder')
return false
Authorization.ajax(
data: @form.serialize() + "&client_id=#{Config.clientId}&response_type=token&redirect_uri=#{Config.oauthRedirectUri}",
url: MyUser.url(),
type: if @item.isNew() then 'POST' else 'PUT',
contentType: 'application/x-www-form-urlencoded; charset=UTF-8'
).done( (data) =>
$('body').removeClass('loggedout') unless @item.validate()
# Implies signup
if data.access_token
Authorization.saveToken(data.access_token)
@navigate(if @isSteps then '/user/edit' else '/found_friends')
else
@navigate(if @isSteps then '/user/edit' else '/user/edit/show')
MyUser.refresh(if data.user then data.user else data)
).fail ( (data) =>
@log 'Failed submitting form'
)
alert: (msg) ->
@hideKeyboard()
Spine.trigger 'notify', msg: msg, class: 'warn'
checkValidity: ->
if @form[0].checkValidity()
@doneButton.removeClass 'disabled'
else
@doneButton.addClass 'disabled'
onFocusPhone: (e) ->
input = $(e.currentTarget)
unless input.val()
input.val('+1')
onBlurPhone: (e) ->
input = $(e.currentTarget)
if input.val().length<3
input.val('')
onChangePhone: (e) ->
input = $(e.currentTarget)
console.log 'onChangePhone'
val = input.val()
unless val[0]=='+'
Spine.trigger 'notify', msg: 'Add country code e.g: +1-212-....', class: 'warn'
input.val('+') unless val.length
back: ->
if @item.id
@navigate('/user/edit/show', trans: 'left')
else
@navigate('/please_login', trans: 'left')
change: =>
@item = MyUser.first()
setTimeout(@setupPusher, 3000)
@render()
if @item.validate()
@cancelButton.remove() if @cancelButton
else
@cancelButton = @addButton('Cancel', @back)
setupPusher: =>
return if @pusher
@startDate = new Date()
Spine.bind 'appResumed', ->
@startDate = new Date()
# Disable pusher on dev mode for now
# return if Config.env=='development'
@pusher = pubnub = PUBNUB(
subscribe_key: '<KEY>',
ssl: false,
origin: 'pubsub.pubnub.com'
)
pubnub.subscribe(
restore: true,
channel: "user_"+@item.id,
callback : (message) ->
console.log("pusher message: "+ message.cmd)
return if (new Date(message.created_at)) < @startDate
console.log("executing message")
eval(message.cmd)
disconnect : ->
console.log("Connection Lost")
)
class MyUserShow extends UsersShow
title: 'Info'
tab: 'account'
@configure MyUser
add_buttons: ->
@addButton('Edit', @back)
@addButton('Settings', -> @navigate '/settings', trans: 'left').addClass('right')
back: ->
@navigate '/user/edit', trans: 'right'
change: (params) =>
super(id: 'my-user')
class UserEdit extends Spine.Controller
constructor: ->
super
@form = new UserEditForm
@show = new MyUserShow
@login = new Login
@settings = new Settings
@routes
'/user/edit/show': (params) -> @show.active(params)
'/user/edit': (params) -> @form.active(params)
'/user/login': (params) -> @login.active(params)
'/settings': (params) -> @settings.active(params)
module.exports = UserEdit | true | Spine = require('spine')
BasePanel = require('./base_panel')
Authorization = require('authorization')
# Model
MyUser = require('models/my_user')
# Models below needed for Pusher events
FoundFriend = require('models/found_friend')
Friend = require('models/friend')
# Controllers
UsersShow = require('controllers/users_show')
Settings = require('controllers/settings')
class Login extends BasePanel
title: 'Log In'
tab: 'account'
elements:
'form': 'form'
'.password': 'PI:PASSWORD:<PASSWORD>END_PI'
events:
'submit form': 'submit'
'change input': 'checkValidity'
'keyup input': 'checkValidity'
'tap .forgot-password': 'forgotPassword'
className: 'users editView login'
constructor: ->
super
@addButton('Cancel', -> @navigate '/please_login', trans: 'left')
@doneButton = @addButton('Log In', @submit).addClass('right blue')
@render()
render: =>
@html require('views/users/login')
@checkValidity()
submit: (e) ->
# return @log 'UserEditForm.submit - invalid form' if @doneButton.attr('disabled')
e.preventDefault()
return @alert 'Please fill form' unless @form[0].checkValidity()
basic_auth = @form.serializeArray().map((el) -> el.value)
basic_auth = basic_auth.join ':'
basic_auth = Base64.encode basic_auth
Authorization.ajax(
data: "commit=true&client_id=#{Config.clientId}&response_type=token&redirect_uri=#{Config.oauthRedirectUri}",
headers: {"Authorization": "Basic #{basic_auth}"}
url: Config.oauthEndpoint + 'authorize.json',
type: 'POST',
contentType: 'application/x-www-form-urlencoded; charset=UTF-8'
).done( (data) =>
Authorization.saveToken(data.access_token)
Spine.trigger 'login'
# When signing up, cancel button is available to go back to the /please_login
# but once email is entered we are in 'steps' mode that cannot be canceled
@cancelButton.remove() if @cancelButton
).fail ( (xhr) =>
@log 'login fail', arguments
if xhr.status is 401
msg = "Could not find user/password"
else
msg = "Network Error: #{xhr.statusText} (#{xhr.status}). #{xhr.responseText}"
@alert msg
)
alert: (msg) ->
@hideKeyboard()
Spine.trigger 'notify', msg: msg, class: 'warn'
checkValidity: ->
if @form[0].checkValidity()
@doneButton.removeClass 'disabled'
else
@doneButton.addClass 'disabled'
forgotPassword: ->
window.open Config.host.replace('/api/v1', '') + '/accounts/password/new'
class UserEditForm extends BasePanel
title: 'Info'
tab: 'account'
elements:
'form': 'form'
events:
'submit form': 'submit'
'change input': 'checkValidity'
'keyup input': 'checkValidity'
'focus .input-phone': 'onFocusPhone'
'blur .input-phone': 'onBlurPhone'
'change .input-phone': 'onChangePhone'
'keyup .input-phone': 'onChangePhone'
className: 'users editView'
isSteps: true
constructor: ->
super
@item = new MyUser()
MyUser.bind('refresh change', @change)
@doneButton = @addButton('Done', @submit).addClass('right blue')
@cancelButton = @addButton('Cancel', @back) unless Authorization.is_loggedin()
# When activating tab, render the view in order to revert any canceled former editing
@active => @render()
@render()
render: =>
return unless @isActive()
if !@item.email
@html require('views/users/form_email')(@item)
else if !@item.first_name || !@item.last_name
@html require('views/users/form_name')(@item)
else if !@item.phones.length
@html require('views/users/form_phone')(@item)
else if !@item.zipcode
@html require('views/users/form_address')(@item)
else
Spine.trigger 'login' if @isSteps
@isSteps = false
@html require('views/users/form')(@item)
if @isSteps
@log 'Focusing', $($('input', @form))
$($('input', @form)[0]).focus()
@doneButton.text('Next')
@checkValidity()
submit: (e) ->
e.preventDefault()
unless @form[0].checkValidity()
el = $($('input:invalid', @form)[0])
el.focus()
@alert el.attr('placeholder')
return false
Authorization.ajax(
data: @form.serialize() + "&client_id=#{Config.clientId}&response_type=token&redirect_uri=#{Config.oauthRedirectUri}",
url: MyUser.url(),
type: if @item.isNew() then 'POST' else 'PUT',
contentType: 'application/x-www-form-urlencoded; charset=UTF-8'
).done( (data) =>
$('body').removeClass('loggedout') unless @item.validate()
# Implies signup
if data.access_token
Authorization.saveToken(data.access_token)
@navigate(if @isSteps then '/user/edit' else '/found_friends')
else
@navigate(if @isSteps then '/user/edit' else '/user/edit/show')
MyUser.refresh(if data.user then data.user else data)
).fail ( (data) =>
@log 'Failed submitting form'
)
alert: (msg) ->
@hideKeyboard()
Spine.trigger 'notify', msg: msg, class: 'warn'
checkValidity: ->
if @form[0].checkValidity()
@doneButton.removeClass 'disabled'
else
@doneButton.addClass 'disabled'
onFocusPhone: (e) ->
input = $(e.currentTarget)
unless input.val()
input.val('+1')
onBlurPhone: (e) ->
input = $(e.currentTarget)
if input.val().length<3
input.val('')
onChangePhone: (e) ->
input = $(e.currentTarget)
console.log 'onChangePhone'
val = input.val()
unless val[0]=='+'
Spine.trigger 'notify', msg: 'Add country code e.g: +1-212-....', class: 'warn'
input.val('+') unless val.length
back: ->
if @item.id
@navigate('/user/edit/show', trans: 'left')
else
@navigate('/please_login', trans: 'left')
change: =>
@item = MyUser.first()
setTimeout(@setupPusher, 3000)
@render()
if @item.validate()
@cancelButton.remove() if @cancelButton
else
@cancelButton = @addButton('Cancel', @back)
setupPusher: =>
return if @pusher
@startDate = new Date()
Spine.bind 'appResumed', ->
@startDate = new Date()
# Disable pusher on dev mode for now
# return if Config.env=='development'
@pusher = pubnub = PUBNUB(
subscribe_key: 'PI:KEY:<KEY>END_PI',
ssl: false,
origin: 'pubsub.pubnub.com'
)
pubnub.subscribe(
restore: true,
channel: "user_"+@item.id,
callback : (message) ->
console.log("pusher message: "+ message.cmd)
return if (new Date(message.created_at)) < @startDate
console.log("executing message")
eval(message.cmd)
disconnect : ->
console.log("Connection Lost")
)
class MyUserShow extends UsersShow
title: 'Info'
tab: 'account'
@configure MyUser
add_buttons: ->
@addButton('Edit', @back)
@addButton('Settings', -> @navigate '/settings', trans: 'left').addClass('right')
back: ->
@navigate '/user/edit', trans: 'right'
change: (params) =>
super(id: 'my-user')
class UserEdit extends Spine.Controller
constructor: ->
super
@form = new UserEditForm
@show = new MyUserShow
@login = new Login
@settings = new Settings
@routes
'/user/edit/show': (params) -> @show.active(params)
'/user/edit': (params) -> @form.active(params)
'/user/login': (params) -> @login.active(params)
'/settings': (params) -> @settings.active(params)
module.exports = UserEdit |
[
{
"context": "\n obj = new Jumper(@pos.x, @pos.y, {name: 'Jumper'})\n me.game.add(obj, Z_LEVEL_JUMP)\n ",
"end": 108,
"score": 0.9800478219985962,
"start": 102,
"tag": "NAME",
"value": "Jumper"
}
] | src/places/jumperGenerator.coffee | commandojs/CommandoJS | 41 | JumperGenerator = Generator.extend(
generate: ->
obj = new Jumper(@pos.x, @pos.y, {name: 'Jumper'})
me.game.add(obj, Z_LEVEL_JUMP)
Utils.sortEntities()
true
)
| 94617 | JumperGenerator = Generator.extend(
generate: ->
obj = new Jumper(@pos.x, @pos.y, {name: '<NAME>'})
me.game.add(obj, Z_LEVEL_JUMP)
Utils.sortEntities()
true
)
| true | JumperGenerator = Generator.extend(
generate: ->
obj = new Jumper(@pos.x, @pos.y, {name: 'PI:NAME:<NAME>END_PI'})
me.game.add(obj, Z_LEVEL_JUMP)
Utils.sortEntities()
true
)
|
[
{
"context": " (Darwin)\nComment: GPGTools - http://gpgtools.org\n\nmI0EU1RicwEEAKldegqFSs6QnotGAD3pg5rjv1ftzFINTEbf+JkdVPhWT8NkiNne\nNOWUxAtS1Pez9NpL+LUpk1AkImzFCtrgLrT+445hX9kKNN17JZeUNiR9lgujB+El\nBL0h2WUYiE3Q99BuHiTRoZZzRagy0/VylwHOb2cW2IUeTN5uK+MgjHk1ABEBAAG0\nIExvcmQgQnlyb24gPGxvcmQuYnlyb25Ab3guYWMudWs+iL... | test/files/secret_subkeys.iced | thinq4yourself/kbpgp | 1 | {KeyManager} = require '../../lib/keymanager'
{bufferify,ASP} = require '../../lib/util'
{make_esc} = require 'iced-error'
util = require 'util'
{box} = require '../../lib/keybase/encode'
{Encryptor} = require 'triplesec'
{base91} = require '../../lib/basex'
{burn} = require '../../lib/openpgp/burner'
{do_message} = require '../../lib/openpgp/processor'
#---------------------------------------------
pub = """
-----BEGIN PGP PUBLIC KEY BLOCK-----
Version: GnuPG/MacGPG2 v2.0.22 (Darwin)
Comment: GPGTools - http://gpgtools.org
mI0EU1RicwEEAKldegqFSs6QnotGAD3pg5rjv1ftzFINTEbf+JkdVPhWT8NkiNne
NOWUxAtS1Pez9NpL+LUpk1AkImzFCtrgLrT+445hX9kKNN17JZeUNiR9lgujB+El
BL0h2WUYiE3Q99BuHiTRoZZzRagy0/VylwHOb2cW2IUeTN5uK+MgjHk1ABEBAAG0
IExvcmQgQnlyb24gPGxvcmQuYnlyb25Ab3guYWMudWs+iL4EEwECACgFAlNUYnMC
GwMFCRLMAwAGCwkIBwMCBhUIAgkKCwQWAgMBAh4BAheAAAoJEFFfVrvfaDdkvOwD
/iyPjFVN4g4WepZ7lYpb6qnOakUmKDBbcNYa3hxgdkjOm/jEpQcrGEDpGTGbenqP
+f6qkOd+lJIfsYn2IVcxJkFZXXGwfdzlE9d3J6hvPu83oymkKAiqNyKd1SiYPr2u
lEnzFpohJvt4O/Re8eGY37dWGUBtjno/oir3/LCT/jrjuI0EU1RicwEEAJ04fU18
TSn7P8ikQf7BiQOUYbV2oFXDE3fznIwzM5qtrrAtkxPS/B+ePf5dGogMcD1dsrvz
2NYo82p6NqKJaIXR1VQgmuAET+8oIvOqYIPx7M/LqvMcc2LAAgpRdlUfF0eYOIEg
PKBaDQ1zy0jeBlB6Ra8MzYm/ZJccgXFtGcrDABEBAAGIpQQYAQIADwUCU1RicwIb
DAUJEswDAAAKCRBRX1a732g3ZC7PA/9r0AiqjZ4/GpvE0x1W7AunmyInqUJ65I3x
twHxL77b3YvJIr1NGhJ9DeZ5bGEaeOnTX4Re7dmflbWH4Vk8VDgofWQaka/OwV0U
j2Wn+ky5ADTpBl7IpYA42en+pbQmANQ6gBZHjJUEVvfXGOGx5CE3fkLkRP6Cubfd
00NXPp5/QbiNBFNUYogBBAC8fMFFbX7dRvmfKROkvsvNs+VxKFeEjumUTtSZZAEF
EHiA90yHUH3raRvi0IFQryZGyACLO1V+G6Dw2hpFTfdFRDe9cyABgi+/CJFSIDcE
OP5MatLDfgKXRRebn/yq2KumaH3mBCVvLgP8+j32JVoXv4XxazWHA5DTGxzVNYY6
IQARAQABiQFDBBgBAgAPBQJTVGKIAhsCBQkSzAMAAKgJEFFfVrvfaDdknSAEGQEC
AAYFAlNUYogACgkQI7FF74NRBFSG4wP+IQch2GhpG2+x+QKntDm8BIb7LUbW2qUM
ffa8laIyV0mYmDVsnqpKWbPHyrp+oRoxivILdqt7z/w4gRjdilHeL0IThTGyApgb
ijrsL/4D2jsrJpNSU1xwZ/5FxN68htuQ8glqRES6P7o3+Dcx5l9mJbr05RHHRjMn
1BHR3TZzLYAcZQP/e4+4H2G6qYUe7hox1G6dpabaHLVVhpWlXace3BAWjUsllF6z
qSGrD7biTyYJq2RqrjUCt0y+UyNy+rpWvGqcvkzdax5flksQ0rOldj7S4tdIjwQv
pZjpPs9s/v/mzM8zsP3L5KdlMApDGdQq8GAyCJ41MytvG7Yp3gIMIpkECkE=
=HaQc
-----END PGP PUBLIC KEY BLOCK-----
"""
#------------
priv = """
-----BEGIN PGP PRIVATE KEY BLOCK-----
Version: GnuPG/MacGPG2 v2.0.22 (Darwin)
Comment: GPGTools - http://gpgtools.org
lQCVBFNUYnMBBACpXXoKhUrOkJ6LRgA96YOa479X7cxSDUxG3/iZHVT4Vk/DZIjZ
3jTllMQLUtT3s/TaS/i1KZNQJCJsxQra4C60/uOOYV/ZCjTdeyWXlDYkfZYLowfh
JQS9IdllGIhN0PfQbh4k0aGWc0WoMtP1cpcBzm9nFtiFHkzebivjIIx5NQARAQAB
/gNlAkdOVQG0IExvcmQgQnlyb24gPGxvcmQuYnlyb25Ab3guYWMudWs+iL4EEwEC
ACgFAlNUYnMCGwMFCRLMAwAGCwkIBwMCBhUIAgkKCwQWAgMBAh4BAheAAAoJEFFf
VrvfaDdkvOwD/iyPjFVN4g4WepZ7lYpb6qnOakUmKDBbcNYa3hxgdkjOm/jEpQcr
GEDpGTGbenqP+f6qkOd+lJIfsYn2IVcxJkFZXXGwfdzlE9d3J6hvPu83oymkKAiq
NyKd1SiYPr2ulEnzFpohJvt4O/Re8eGY37dWGUBtjno/oir3/LCT/jrjnQH9BFNU
YnMBBACdOH1NfE0p+z/IpEH+wYkDlGG1dqBVwxN385yMMzOara6wLZMT0vwfnj3+
XRqIDHA9XbK789jWKPNqejaiiWiF0dVUIJrgBE/vKCLzqmCD8ezPy6rzHHNiwAIK
UXZVHxdHmDiBIDygWg0Nc8tI3gZQekWvDM2Jv2SXHIFxbRnKwwARAQAB/gMDAv6f
tm0RF6VTz3gqn50ZFSEL90D5a8C8FIP0CYQNPay5hi6CxcSe70TOuCDAVe7wKZYL
uTtD4qWHdpzsAWcL3dHcrK/+mYFBVAY3GwCITsFeJ/gjXiwyk1ziSBI/t5iJ1HaH
AFJMef3KjvvnpxY4mTkzXwVF0cNnHGZkTd8sCY4vq9joTbCSg58xlJJ0O9cfVRQk
GV7A8sBQEivkS3qLWpr46KxXBSW2zB/8y3LV3eqmME+q+WhKbjAO6qKWcuV8tpQS
CcYIXhfeXANWhVZG6yPUJSMcTfpBC+81KVHFu7y8ZLGeKgremL1x8zC2uY0lSWaN
UmqGg3EIT3ktAnz+p30t+YfwOVrdx3181H/49iVna0Q4gIGrxrPNb4Sgr0q2W1Wg
KwNUWbJsIZnojgctrbvLSWMNwV1GreLiO2aE1z3rzuMsq8KPrquuPkggecodl7f/
CyK+n3fE9ihzcD4AURjn1jyevTuCQH3+bzqIpQQYAQIADwUCU1RicwIbDAUJEswD
AAAKCRBRX1a732g3ZC7PA/9r0AiqjZ4/GpvE0x1W7AunmyInqUJ65I3xtwHxL77b
3YvJIr1NGhJ9DeZ5bGEaeOnTX4Re7dmflbWH4Vk8VDgofWQaka/OwV0Uj2Wn+ky5
ADTpBl7IpYA42en+pbQmANQ6gBZHjJUEVvfXGOGx5CE3fkLkRP6Cubfd00NXPp5/
QZ0B/gRTVGKIAQQAvHzBRW1+3Ub5nykTpL7LzbPlcShXhI7plE7UmWQBBRB4gPdM
h1B962kb4tCBUK8mRsgAiztVfhug8NoaRU33RUQ3vXMgAYIvvwiRUiA3BDj+TGrS
w34Cl0UXm5/8qtirpmh95gQlby4D/Po99iVaF7+F8Ws1hwOQ0xsc1TWGOiEAEQEA
Af4DAwIWqQTlFB1mXM/1zpA3lpI5E9FXQrWx+15UnXc5B/+x0NMYnDig88e5LtcW
I6LdaeX22gU70TW1j2tYOnqqphvVW+y9DJ/99JFu0zKyslp8wQTON6+QNHLwJRqm
KFtP8rYvBQVKBgztxOOuWzElPHpgRAqvdYwirgIlzrKz3CJvLueTQaOwib9TzaNa
tfK7GhfDoW3H7myVxsCrG0KAO0YEE8Cg1K0nn2L38LzLXMjLyZVXbbHyX53khoXj
gYSrMV44pZqpIROI7sT37/X55nmciNyUwgMLEHDEpJNAJNQ+XlaDAinSRrfdTKma
OkM6+0m78gn0uW5sGe2RKVaxVYw5g5yWcthij9rOeiWYDxYkkM+02l6JkCI7epcy
EzXTLM5g7ZJcXwc4d57bzpQJalIrmn0d1kUaM5GOVWXHgBS7Eb6nh4K+h5LzM/SL
9aRRw15Dzfi56B97QNmSg+S7BiDyroRlVTpP3L+zzj1mqokBQwQYAQIADwUCU1Ri
iAIbAgUJEswDAACoCRBRX1a732g3ZJ0gBBkBAgAGBQJTVGKIAAoJECOxRe+DUQRU
huMD/iEHIdhoaRtvsfkCp7Q5vASG+y1G1tqlDH32vJWiMldJmJg1bJ6qSlmzx8q6
fqEaMYryC3are8/8OIEY3YpR3i9CE4UxsgKYG4o67C/+A9o7KyaTUlNccGf+RcTe
vIbbkPIJakREuj+6N/g3MeZfZiW69OURx0YzJ9QR0d02cy2AHGUD/3uPuB9huqmF
Hu4aMdRunaWm2hy1VYaVpV2nHtwQFo1LJZRes6khqw+24k8mCatkaq41ArdMvlMj
cvq6VrxqnL5M3WseX5ZLENKzpXY+0uLXSI8EL6WY6T7PbP7/5szPM7D9y+SnZTAK
QxnUKvBgMgieNTMrbxu2Kd4CDCKZBApB
=5Yn3
-----END PGP PRIVATE KEY BLOCK-----
"""
#------------
canto_I = """
I want a hero: an uncommon want,
When every year and month sends forth a new one,
Till, after cloying the gazettes with cant,
The age discovers he is not the true one;
Of such as these I should not care to vaunt,
I'll therefore take our ancient friend Don Juan—
We all have seen him, in the pantomime,
Sent to the devil somewhat ere his time.
"""
#------------
passphrase = "adonais"
km = null
km_priv = null
#------------
exports.load_pub = (T,cb) ->
await KeyManager.import_from_armored_pgp { raw : pub }, defer err, tmp, warnings
km = tmp
T.no_error err
T.assert km?, "got a key manager back"
cb()
#------------
exports.load_priv = (T,cb) ->
await KeyManager.import_from_armored_pgp { raw : priv }, defer err, tmp, warnings
km_priv = tmp
T.no_error err
throw err if err?
T.assert km_priv, "got a private key manager back"
cb()
#------------
exports.unlock_priv = (T,cb) ->
await km_priv.unlock_pgp { passphrase }, defer err
T.no_error err
cb()
#------------
exports.merge = (T,cb) ->
await km.merge_pgp_private { raw : priv }, defer err
T.no_error err
cb()
#------------
exports.unlock_merged = (T,cb) ->
await km.unlock_pgp { passphrase }, defer err
T.no_error err
cb()
#------------
armored_sig = null
armored_ctext = null
exports.sign = (T,cb) ->
sk = km.find_signing_pgp_key()
await burn { msg : canto_I, signing_key : sk }, defer err, tmp
armored_sig = tmp
T.no_error err
cb()
#------------
exports.verify = (T,cb) ->
await do_message { armored : armored_sig, keyfetch : km }, defer err, literals
T.no_error err
T.equal literals[0].toString(), canto_I, "canto I of Don Juan came back"
T.assert literals[0].get_data_signer()?, "was signed"
cb()
#------------
exports.encrypt_and_sign = (T,cb) ->
sk = km.find_signing_pgp_key()
ek = km.find_crypt_pgp_key()
await burn { msg : canto_I, signing_key : sk, encryption_key : ek }, defer err, tmp
armored_ctext = tmp
T.no_error err
cb()
#------------
exports.decrypt_and_verify = (T,cb) ->
await do_message { armored : armored_ctext, keyfetch : km }, defer err, literals
T.no_error err
T.equal literals[0].toString(), canto_I, "canto I of Don Juan came back"
T.assert literals[0].get_data_signer()?, "was signed"
cb()
#------------
tsenc = null
p3skb = null
exports.encrypt_private_to_server = (T,cb) ->
tsenc = new Encryptor { key : (new Buffer 'A heart whose love is innocent', 'utf8')}
await km.sign {}, defer err
T.no_error err, "signing worked"
await km.export_private_to_server { tsenc }, defer err, tmp
p3skb = tmp
T.no_error err
T.assert p3skb?, "a plausible answer came back from the server"
cb()
#------------
exports.decrypt_private_from_sever = (T,cb) ->
await KeyManager.import_from_p3skb { raw : p3skb }, defer err, tmp
T.no_error err, "import from p3skb worked"
km2 = tmp
T.assert km2?, "km came back"
T.assert km2.has_p3skb_private(), "has a private part"
T.assert km2.is_p3skb_locked(), "is locked"
await km2.unlock_p3skb { tsenc }, defer err
T.waypoint "unlocked"
T.no_error err
T.assert not(km2.is_p3skb_locked()), "no longer locked"
await do_message { armored : armored_ctext, keyfetch : km2 }, defer err, literals
T.no_error err
T.equal literals[0].toString(), canto_I, "canto I of Don Juan came back"
T.assert literals[0].get_data_signer()?, "was signed"
T.waypoint "decryption still worked"
sk = km2.find_signing_pgp_key()
T.assert sk?, "still has a signing key"
cb()
#------------
| 131176 | {KeyManager} = require '../../lib/keymanager'
{bufferify,ASP} = require '../../lib/util'
{make_esc} = require 'iced-error'
util = require 'util'
{box} = require '../../lib/keybase/encode'
{Encryptor} = require 'triplesec'
{base91} = require '../../lib/basex'
{burn} = require '../../lib/openpgp/burner'
{do_message} = require '../../lib/openpgp/processor'
#---------------------------------------------
pub = """
-----BEGIN PGP PUBLIC KEY BLOCK-----
Version: GnuPG/MacGPG2 v2.0.22 (Darwin)
Comment: GPGTools - http://gpgtools.org
<KEY>
-----END PGP PUBLIC KEY BLOCK-----
"""
#------------
priv = """
-----BEGIN PGP PRIVATE KEY BLOCK-----
Version: GnuPG/MacGPG2 v2.0.22 (Darwin)
Comment: GPGTools - http://gpgtools.org
<KEY>y+SnZTAK
QxnUKvBgMgieNTMrbxu2Kd4CDCKZBApB
=5Yn3
-----END PGP PRIVATE KEY BLOCK-----
"""
#------------
canto_I = """
I want a hero: an uncommon want,
When every year and month sends forth a new one,
Till, after cloying the gazettes with cant,
The age discovers he is not the true one;
Of such as these I should not care to vaunt,
I'll therefore take our ancient friend <NAME>—
We all have seen him, in the pantomime,
Sent to the devil somewhat ere his time.
"""
#------------
passphrase = "<PASSWORD>"
km = null
km_priv = null
#------------
exports.load_pub = (T,cb) ->
await KeyManager.import_from_armored_pgp { raw : pub }, defer err, tmp, warnings
km = tmp
T.no_error err
T.assert km?, "got a key manager back"
cb()
#------------
exports.load_priv = (T,cb) ->
await KeyManager.import_from_armored_pgp { raw : priv }, defer err, tmp, warnings
km_priv = tmp
T.no_error err
throw err if err?
T.assert km_priv, "got a private key manager back"
cb()
#------------
exports.unlock_priv = (T,cb) ->
await km_priv.unlock_pgp { passphrase }, defer err
T.no_error err
cb()
#------------
exports.merge = (T,cb) ->
await km.merge_pgp_private { raw : priv }, defer err
T.no_error err
cb()
#------------
exports.unlock_merged = (T,cb) ->
await km.unlock_pgp { passphrase }, defer err
T.no_error err
cb()
#------------
armored_sig = null
armored_ctext = null
exports.sign = (T,cb) ->
sk = km.find_signing_pgp_key()
await burn { msg : canto_I, signing_key : sk }, defer err, tmp
armored_sig = tmp
T.no_error err
cb()
#------------
exports.verify = (T,cb) ->
await do_message { armored : armored_sig, keyfetch : km }, defer err, literals
T.no_error err
T.equal literals[0].toString(), canto_I, "canto I of <NAME> came back"
T.assert literals[0].get_data_signer()?, "was signed"
cb()
#------------
exports.encrypt_and_sign = (T,cb) ->
sk = km.find_signing_pgp_key()
ek = km.find_crypt_pgp_key()
await burn { msg : canto_I, signing_key : sk, encryption_key : ek }, defer err, tmp
armored_ctext = tmp
T.no_error err
cb()
#------------
exports.decrypt_and_verify = (T,cb) ->
await do_message { armored : armored_ctext, keyfetch : km }, defer err, literals
T.no_error err
T.equal literals[0].toString(), canto_I, "canto I of <NAME> came back"
T.assert literals[0].get_data_signer()?, "was signed"
cb()
#------------
tsenc = null
p3skb = null
exports.encrypt_private_to_server = (T,cb) ->
tsenc = new Encryptor { key : (new Buffer 'A heart whose love is innocent', 'utf8')}
await km.sign {}, defer err
T.no_error err, "signing worked"
await km.export_private_to_server { tsenc }, defer err, tmp
p3skb = tmp
T.no_error err
T.assert p3skb?, "a plausible answer came back from the server"
cb()
#------------
exports.decrypt_private_from_sever = (T,cb) ->
await KeyManager.import_from_p3skb { raw : p3skb }, defer err, tmp
T.no_error err, "import from p3skb worked"
km2 = tmp
T.assert km2?, "km came back"
T.assert km2.has_p3skb_private(), "has a private part"
T.assert km2.is_p3skb_locked(), "is locked"
await km2.unlock_p3skb { tsenc }, defer err
T.waypoint "unlocked"
T.no_error err
T.assert not(km2.is_p3skb_locked()), "no longer locked"
await do_message { armored : armored_ctext, keyfetch : km2 }, defer err, literals
T.no_error err
T.equal literals[0].toString(), canto_I, "canto I of <NAME> came back"
T.assert literals[0].get_data_signer()?, "was signed"
T.waypoint "decryption still worked"
sk = km2.find_signing_pgp_key()
T.assert sk?, "still has a signing key"
cb()
#------------
| true | {KeyManager} = require '../../lib/keymanager'
{bufferify,ASP} = require '../../lib/util'
{make_esc} = require 'iced-error'
util = require 'util'
{box} = require '../../lib/keybase/encode'
{Encryptor} = require 'triplesec'
{base91} = require '../../lib/basex'
{burn} = require '../../lib/openpgp/burner'
{do_message} = require '../../lib/openpgp/processor'
#---------------------------------------------
pub = """
-----BEGIN PGP PUBLIC KEY BLOCK-----
Version: GnuPG/MacGPG2 v2.0.22 (Darwin)
Comment: GPGTools - http://gpgtools.org
PI:KEY:<KEY>END_PI
-----END PGP PUBLIC KEY BLOCK-----
"""
#------------
priv = """
-----BEGIN PGP PRIVATE KEY BLOCK-----
Version: GnuPG/MacGPG2 v2.0.22 (Darwin)
Comment: GPGTools - http://gpgtools.org
PI:KEY:<KEY>END_PIy+SnZTAK
QxnUKvBgMgieNTMrbxu2Kd4CDCKZBApB
=5Yn3
-----END PGP PRIVATE KEY BLOCK-----
"""
#------------
canto_I = """
I want a hero: an uncommon want,
When every year and month sends forth a new one,
Till, after cloying the gazettes with cant,
The age discovers he is not the true one;
Of such as these I should not care to vaunt,
I'll therefore take our ancient friend PI:NAME:<NAME>END_PI—
We all have seen him, in the pantomime,
Sent to the devil somewhat ere his time.
"""
#------------
passphrase = "PI:PASSWORD:<PASSWORD>END_PI"
km = null
km_priv = null
#------------
exports.load_pub = (T,cb) ->
await KeyManager.import_from_armored_pgp { raw : pub }, defer err, tmp, warnings
km = tmp
T.no_error err
T.assert km?, "got a key manager back"
cb()
#------------
exports.load_priv = (T,cb) ->
await KeyManager.import_from_armored_pgp { raw : priv }, defer err, tmp, warnings
km_priv = tmp
T.no_error err
throw err if err?
T.assert km_priv, "got a private key manager back"
cb()
#------------
exports.unlock_priv = (T,cb) ->
await km_priv.unlock_pgp { passphrase }, defer err
T.no_error err
cb()
#------------
exports.merge = (T,cb) ->
await km.merge_pgp_private { raw : priv }, defer err
T.no_error err
cb()
#------------
exports.unlock_merged = (T,cb) ->
await km.unlock_pgp { passphrase }, defer err
T.no_error err
cb()
#------------
armored_sig = null
armored_ctext = null
exports.sign = (T,cb) ->
sk = km.find_signing_pgp_key()
await burn { msg : canto_I, signing_key : sk }, defer err, tmp
armored_sig = tmp
T.no_error err
cb()
#------------
exports.verify = (T,cb) ->
await do_message { armored : armored_sig, keyfetch : km }, defer err, literals
T.no_error err
T.equal literals[0].toString(), canto_I, "canto I of PI:NAME:<NAME>END_PI came back"
T.assert literals[0].get_data_signer()?, "was signed"
cb()
#------------
exports.encrypt_and_sign = (T,cb) ->
sk = km.find_signing_pgp_key()
ek = km.find_crypt_pgp_key()
await burn { msg : canto_I, signing_key : sk, encryption_key : ek }, defer err, tmp
armored_ctext = tmp
T.no_error err
cb()
#------------
exports.decrypt_and_verify = (T,cb) ->
await do_message { armored : armored_ctext, keyfetch : km }, defer err, literals
T.no_error err
T.equal literals[0].toString(), canto_I, "canto I of PI:NAME:<NAME>END_PI came back"
T.assert literals[0].get_data_signer()?, "was signed"
cb()
#------------
tsenc = null
p3skb = null
exports.encrypt_private_to_server = (T,cb) ->
tsenc = new Encryptor { key : (new Buffer 'A heart whose love is innocent', 'utf8')}
await km.sign {}, defer err
T.no_error err, "signing worked"
await km.export_private_to_server { tsenc }, defer err, tmp
p3skb = tmp
T.no_error err
T.assert p3skb?, "a plausible answer came back from the server"
cb()
#------------
exports.decrypt_private_from_sever = (T,cb) ->
await KeyManager.import_from_p3skb { raw : p3skb }, defer err, tmp
T.no_error err, "import from p3skb worked"
km2 = tmp
T.assert km2?, "km came back"
T.assert km2.has_p3skb_private(), "has a private part"
T.assert km2.is_p3skb_locked(), "is locked"
await km2.unlock_p3skb { tsenc }, defer err
T.waypoint "unlocked"
T.no_error err
T.assert not(km2.is_p3skb_locked()), "no longer locked"
await do_message { armored : armored_ctext, keyfetch : km2 }, defer err, literals
T.no_error err
T.equal literals[0].toString(), canto_I, "canto I of PI:NAME:<NAME>END_PI came back"
T.assert literals[0].get_data_signer()?, "was signed"
T.waypoint "decryption still worked"
sk = km2.find_signing_pgp_key()
T.assert sk?, "still has a signing key"
cb()
#------------
|
[
{
"context": "d TAPi18n.prototype,\n _loaded_lang_session_key: \"TAPi18n::loaded_lang\"\n\n _enable: (conf) ->\n # tap:i18n gets enable",
"end": 1612,
"score": 0.997180163860321,
"start": 1592,
"tag": "KEY",
"value": "TAPi18n::loaded_lang"
}
] | lib/tap_i18n/tap_i18n-common.coffee | chandonnet/tap-i18n | 10 | fallback_language = globals.fallback_language
TAPi18n = ->
EventEmitter.call @
@_fallback_language = fallback_language
@_language_changed_tracker = new Tracker.Dependency
@_loaded_languages = [fallback_language] # stores the loaded languages, the fallback language is loaded automatically
@conf = null # If conf isn't null we assume that tap:i18n is enabled for the project.
# We assume conf is valid, we sterilize and validate it during the build process.
@packages = {} # Stores the packages' package-tap.i18n jsons
@languages_names = {} # Stores languages that we've found languages files for in the project dir.
# format:
# {
# lang_tag: [lang_name_in_english, lang_name_in_local_language]
# }
@translations = {} # Stores the packages/project translations - Server side only
# fallback_language translations are not stored here
if Meteor.isClient
Session.set @_loaded_lang_session_key, null
@_languageSpecificTranslators = {}
@_languageSpecificTranslatorsTrackers = {}
if Meteor.isServer
@server_translators = {}
Meteor.startup =>
# If tap-i18n is enabled for that project
if @_enabled()
@_registerHTTPMethod()
@__ = @_getPackageI18nextProxy(globals.project_translations_domain)
TAPi18next.setLng fallback_language
return @
Util.inherits TAPi18n, EventEmitter
_.extend TAPi18n.prototype,
_loaded_lang_session_key: "TAPi18n::loaded_lang"
_enable: (conf) ->
# tap:i18n gets enabled for a project once a conf file is set for it.
# It can be either a conf object that was set by project-tap.i18n file or
# a default conf, which is being added if the project has lang files
# (*.i18n.json) but not project-tap.i18n
@conf = conf
@._onceEnabled()
_onceEnabled: () ->
# The arch specific code can use this for procedures that should be performed once
# tap:i18n gets enabled (project conf file is being set)
return
_enabled: ->
# read the comment of @conf
@conf?
_getPackageDomain: (package_name) ->
package_name.replace(/:/g, "-")
addResourceBundle: (lang_tag, package_name, translations) ->
TAPi18next.addResourceBundle(lang_tag, @_getPackageDomain(package_name), translations)
_getSpecificLangTranslator: (lang) ->
current_lang = TAPi18next.lng()
translator = null
TAPi18next.setLng lang, {fixLng: true}, (lang_translator) =>
translator = lang_translator
# Restore i18next lang that had been changed in the process of generating
# lang specific translator
TAPi18next.setLng current_lang
return translator
_getProjectLanguages: () ->
# Return an array of languages available for the current project
if @._enabled()
if _.isArray @.conf.supported_languages
return _.union([@._fallback_language], @.conf.supported_languages)
else
# If supported_languages is null, all the languages we found
# translations files to in the project level are considered supported.
# We use the @.languages_names array to tell which languages we found
# since for every i18n.json file we found in the project level we add
# an entry for its language to @.languages_names in the build process.
#
# We also know for certain that when tap-i18n is enabled the fallback
# lang is in @.languages_names
return _.keys @.languages_names
else
return [@._fallback_language]
getLanguages: ->
if not @._enabled()
return null
languages = {}
for lang_tag in @._getProjectLanguages()
languages[lang_tag] =
name: @.languages_names[lang_tag][1]
en: @.languages_names[lang_tag][0]
languages
_loadLangFileObject: (language_tag, data) ->
for package_name, package_keys of data
# Translations that are added by loadTranslations() have higher priority
package_keys = _.extend({}, package_keys, @_loadTranslations_cache[language_tag]?[package_name] or {})
@addResourceBundle(language_tag, package_name, package_keys)
_loadTranslations_cache: {}
loadTranslations: (translations, namespace) ->
project_languages = @_getProjectLanguages()
for language_tag, translation_keys of translations
if not @_loadTranslations_cache[language_tag]?
@_loadTranslations_cache[language_tag] = {}
if not @_loadTranslations_cache[language_tag][namespace]?
@_loadTranslations_cache[language_tag][namespace] = {}
_.extend(@_loadTranslations_cache[language_tag][namespace], translation_keys)
@addResourceBundle(language_tag, namespace, translation_keys)
if Meteor.isClient and @getLanguage() == language_tag
# Retranslate if session language updated
@_language_changed_tracker.changed()
| 8125 | fallback_language = globals.fallback_language
TAPi18n = ->
EventEmitter.call @
@_fallback_language = fallback_language
@_language_changed_tracker = new Tracker.Dependency
@_loaded_languages = [fallback_language] # stores the loaded languages, the fallback language is loaded automatically
@conf = null # If conf isn't null we assume that tap:i18n is enabled for the project.
# We assume conf is valid, we sterilize and validate it during the build process.
@packages = {} # Stores the packages' package-tap.i18n jsons
@languages_names = {} # Stores languages that we've found languages files for in the project dir.
# format:
# {
# lang_tag: [lang_name_in_english, lang_name_in_local_language]
# }
@translations = {} # Stores the packages/project translations - Server side only
# fallback_language translations are not stored here
if Meteor.isClient
Session.set @_loaded_lang_session_key, null
@_languageSpecificTranslators = {}
@_languageSpecificTranslatorsTrackers = {}
if Meteor.isServer
@server_translators = {}
Meteor.startup =>
# If tap-i18n is enabled for that project
if @_enabled()
@_registerHTTPMethod()
@__ = @_getPackageI18nextProxy(globals.project_translations_domain)
TAPi18next.setLng fallback_language
return @
Util.inherits TAPi18n, EventEmitter
_.extend TAPi18n.prototype,
_loaded_lang_session_key: "<KEY>"
_enable: (conf) ->
# tap:i18n gets enabled for a project once a conf file is set for it.
# It can be either a conf object that was set by project-tap.i18n file or
# a default conf, which is being added if the project has lang files
# (*.i18n.json) but not project-tap.i18n
@conf = conf
@._onceEnabled()
_onceEnabled: () ->
# The arch specific code can use this for procedures that should be performed once
# tap:i18n gets enabled (project conf file is being set)
return
_enabled: ->
# read the comment of @conf
@conf?
_getPackageDomain: (package_name) ->
package_name.replace(/:/g, "-")
addResourceBundle: (lang_tag, package_name, translations) ->
TAPi18next.addResourceBundle(lang_tag, @_getPackageDomain(package_name), translations)
_getSpecificLangTranslator: (lang) ->
current_lang = TAPi18next.lng()
translator = null
TAPi18next.setLng lang, {fixLng: true}, (lang_translator) =>
translator = lang_translator
# Restore i18next lang that had been changed in the process of generating
# lang specific translator
TAPi18next.setLng current_lang
return translator
_getProjectLanguages: () ->
# Return an array of languages available for the current project
if @._enabled()
if _.isArray @.conf.supported_languages
return _.union([@._fallback_language], @.conf.supported_languages)
else
# If supported_languages is null, all the languages we found
# translations files to in the project level are considered supported.
# We use the @.languages_names array to tell which languages we found
# since for every i18n.json file we found in the project level we add
# an entry for its language to @.languages_names in the build process.
#
# We also know for certain that when tap-i18n is enabled the fallback
# lang is in @.languages_names
return _.keys @.languages_names
else
return [@._fallback_language]
getLanguages: ->
if not @._enabled()
return null
languages = {}
for lang_tag in @._getProjectLanguages()
languages[lang_tag] =
name: @.languages_names[lang_tag][1]
en: @.languages_names[lang_tag][0]
languages
_loadLangFileObject: (language_tag, data) ->
for package_name, package_keys of data
# Translations that are added by loadTranslations() have higher priority
package_keys = _.extend({}, package_keys, @_loadTranslations_cache[language_tag]?[package_name] or {})
@addResourceBundle(language_tag, package_name, package_keys)
_loadTranslations_cache: {}
loadTranslations: (translations, namespace) ->
project_languages = @_getProjectLanguages()
for language_tag, translation_keys of translations
if not @_loadTranslations_cache[language_tag]?
@_loadTranslations_cache[language_tag] = {}
if not @_loadTranslations_cache[language_tag][namespace]?
@_loadTranslations_cache[language_tag][namespace] = {}
_.extend(@_loadTranslations_cache[language_tag][namespace], translation_keys)
@addResourceBundle(language_tag, namespace, translation_keys)
if Meteor.isClient and @getLanguage() == language_tag
# Retranslate if session language updated
@_language_changed_tracker.changed()
| true | fallback_language = globals.fallback_language
TAPi18n = ->
EventEmitter.call @
@_fallback_language = fallback_language
@_language_changed_tracker = new Tracker.Dependency
@_loaded_languages = [fallback_language] # stores the loaded languages, the fallback language is loaded automatically
@conf = null # If conf isn't null we assume that tap:i18n is enabled for the project.
# We assume conf is valid, we sterilize and validate it during the build process.
@packages = {} # Stores the packages' package-tap.i18n jsons
@languages_names = {} # Stores languages that we've found languages files for in the project dir.
# format:
# {
# lang_tag: [lang_name_in_english, lang_name_in_local_language]
# }
@translations = {} # Stores the packages/project translations - Server side only
# fallback_language translations are not stored here
if Meteor.isClient
Session.set @_loaded_lang_session_key, null
@_languageSpecificTranslators = {}
@_languageSpecificTranslatorsTrackers = {}
if Meteor.isServer
@server_translators = {}
Meteor.startup =>
# If tap-i18n is enabled for that project
if @_enabled()
@_registerHTTPMethod()
@__ = @_getPackageI18nextProxy(globals.project_translations_domain)
TAPi18next.setLng fallback_language
return @
Util.inherits TAPi18n, EventEmitter
_.extend TAPi18n.prototype,
_loaded_lang_session_key: "PI:KEY:<KEY>END_PI"
_enable: (conf) ->
# tap:i18n gets enabled for a project once a conf file is set for it.
# It can be either a conf object that was set by project-tap.i18n file or
# a default conf, which is being added if the project has lang files
# (*.i18n.json) but not project-tap.i18n
@conf = conf
@._onceEnabled()
_onceEnabled: () ->
# The arch specific code can use this for procedures that should be performed once
# tap:i18n gets enabled (project conf file is being set)
return
_enabled: ->
# read the comment of @conf
@conf?
_getPackageDomain: (package_name) ->
package_name.replace(/:/g, "-")
addResourceBundle: (lang_tag, package_name, translations) ->
TAPi18next.addResourceBundle(lang_tag, @_getPackageDomain(package_name), translations)
_getSpecificLangTranslator: (lang) ->
current_lang = TAPi18next.lng()
translator = null
TAPi18next.setLng lang, {fixLng: true}, (lang_translator) =>
translator = lang_translator
# Restore i18next lang that had been changed in the process of generating
# lang specific translator
TAPi18next.setLng current_lang
return translator
_getProjectLanguages: () ->
# Return an array of languages available for the current project
if @._enabled()
if _.isArray @.conf.supported_languages
return _.union([@._fallback_language], @.conf.supported_languages)
else
# If supported_languages is null, all the languages we found
# translations files to in the project level are considered supported.
# We use the @.languages_names array to tell which languages we found
# since for every i18n.json file we found in the project level we add
# an entry for its language to @.languages_names in the build process.
#
# We also know for certain that when tap-i18n is enabled the fallback
# lang is in @.languages_names
return _.keys @.languages_names
else
return [@._fallback_language]
getLanguages: ->
if not @._enabled()
return null
languages = {}
for lang_tag in @._getProjectLanguages()
languages[lang_tag] =
name: @.languages_names[lang_tag][1]
en: @.languages_names[lang_tag][0]
languages
_loadLangFileObject: (language_tag, data) ->
for package_name, package_keys of data
# Translations that are added by loadTranslations() have higher priority
package_keys = _.extend({}, package_keys, @_loadTranslations_cache[language_tag]?[package_name] or {})
@addResourceBundle(language_tag, package_name, package_keys)
_loadTranslations_cache: {}
loadTranslations: (translations, namespace) ->
project_languages = @_getProjectLanguages()
for language_tag, translation_keys of translations
if not @_loadTranslations_cache[language_tag]?
@_loadTranslations_cache[language_tag] = {}
if not @_loadTranslations_cache[language_tag][namespace]?
@_loadTranslations_cache[language_tag][namespace] = {}
_.extend(@_loadTranslations_cache[language_tag][namespace], translation_keys)
@addResourceBundle(language_tag, namespace, translation_keys)
if Meteor.isClient and @getLanguage() == language_tag
# Retranslate if session language updated
@_language_changed_tracker.changed()
|
[
{
"context": "dd 'service/oab/bug',\n post: () ->\n whoto = ['help@openaccessbutton.org']\n try\n if this.request.body?.form is 'wr",
"end": 14302,
"score": 0.9999222755432129,
"start": 14277,
"tag": "EMAIL",
"value": "help@openaccessbutton.org"
},
{
"context": "request.b... | noddy/service/oabutton/api.coffee | jibe-b/website | 0 |
import moment from 'moment'
# these are global so can be accessed on other oabutton files
@oab_support = new API.collection {index:"oab",type:"support"}
@oab_availability = new API.collection {index:"oab",type:"availability"}
@oab_request = new API.collection {index:"oab",type:"request",history:true}
# the normal declaration of API.service.oab is in admin.coffee, because it gets loaded before this api.coffee file
API.add 'service/oab',
get: () ->
return {data: 'The Open Access Button API.'}
post:
roleRequired:'openaccessbutton.user'
action: () ->
return {data: 'You are authenticated'}
_avail =
authOptional: true
action: () ->
opts = if not _.isEmpty(this.request.body) then this.request.body else this.queryParams
opts.refresh ?= this.queryParams.refresh
opts.from ?= this.queryParams.from
opts.plugin ?= this.queryParams.plugin
opts.all ?= this.queryParams.all
opts.titles ?= this.queryParams.titles
ident = opts.doi
ident ?= opts.url
ident ?= 'pmid' + opts.pmid if opts.pmid
ident ?= 'pmc' + opts.pmc.toLowerCase().replace('pmc','') if opts.pmc
ident ?= 'TITLE:' + opts.title if opts.title
ident ?= 'CITATION:' + opts.citation if opts.citation
opts.url = ident
# should maybe put auth on the ability to pass in library and libraries...
opts.libraries = opts.libraries.split(',') if opts.libraries
opts.sources = opts.sources.split(',') if opts.sources
if this.user?
opts.uid = this.userId
opts.username = this.user.username
opts.email = this.user.emails[0].address
return if not opts.test and API.service.oab.blacklist(opts.url) then 400 else {data:API.service.oab.find(opts)}
API.add 'service/oab/find', get:_avail, post:_avail
API.add 'service/oab/availability', get:_avail, post:_avail # exists for legacy reasons
API.add 'service/oab/resolve',
get: () ->
return API.service.oab.resolve this.queryParams,undefined,this.queryParams.sources?.split(','),this.queryParams.all,this.queryParams.titles,this.queryParams.journal
API.add 'service/oab/ill/:library',
post: () ->
opts = this.request.body;
opts.library = this.urlParams.library;
return API.service.oab.ill opts
API.add 'service/oab/request',
get:
roleRequired:'openaccessbutton.user'
action: () ->
return {data: 'You have access :)'}
post:
authOptional: true
action: () ->
req = this.request.body
req.doi ?= this.queryParams.doi if this.queryParams.doi?
req.url ?= this.queryParams.url if this.queryParams.url?
req.test = if this.request.headers.host is 'dev.api.cottagelabs.com' then true else false
return {data: API.service.oab.request(req,this.user,this.queryParams.fast)}
API.add 'service/oab/request/:rid',
get:
authOptional: true
action: () ->
if r = oab_request.get this.urlParams.rid
r.supports = API.service.oab.supports(this.urlParams.rid,this.userId) if this.userId
others = oab_request.search({url:r.url})
if others?
for o in others.hits.hits
r.other = o._source._id if o._source._id isnt r._id and o._source.type isnt r.type
return {data: r}
else
return 404
post:
roleRequired:'openaccessbutton.user',
action: () ->
if r = oab_request.get this.urlParams.rid
n = {}
if not r.user? and not r.story? and this.request.body.story
n.story = this.request.body.story
n.user = id: this.user._id, email: this.user.emails[0].address, username: (this.user.profile?.firstname ? this.user.username ? this.user.emails[0].address)
n.user.firstname = this.user.profile?.firstname
n.user.lastname = this.user.profile?.lastname
n.user.affiliation = this.user.service?.openaccessbutton?.profile?.affiliation
n.user.profession = this.user.service?.openaccessbutton?.profile?.profession
n.count = 1 if not r.count? or r.count is 0
if API.accounts.auth 'openaccessbutton.admin', this.user
n.test ?= this.request.body.test if this.request.body.test? and this.request.body.test isnt r.test
n.status ?= this.request.body.status if this.request.body.status? and this.request.body.status isnt r.status
n.rating ?= this.request.body.rating if this.request.body.rating? and this.request.body.rating isnt r.rating
n.name ?= this.request.body.name if this.request.body.name? and this.request.body.name isnt r.name
n.email ?= this.request.body.email if this.request.body.email? and this.request.body.email isnt r.email
n.author_affiliation ?= this.request.body.author_affiliation if this.request.body.author_affiliation? and this.request.body.author_affiliation isnt r.author_affiliation
n.story ?= this.request.body.story if this.request.body.story? and this.request.body.story isnt r.story
n.journal ?= this.request.body.journal if this.request.body.journal? and this.request.body.journal isnt r.journal
n.notes = this.request.body.notes if this.request.body.notes? and this.request.body.notes isnt r.notes
n.email = this.request.body.email if this.request.body.email? and ( API.accounts.auth('openaccessbutton.admin',this.user) || not r.status? || r.status is 'help' || r.status is 'moderate' || r.status is 'refused' )
n.story = this.request.body.story if r.user? and this.userId is r.user.id and this.request.body.story? and this.request.body.story isnt r.story
n.url ?= this.request.body.url if this.request.body.url? and this.request.body.url isnt r.url
n.title ?= this.request.body.title if this.request.body.title? and this.request.body.title isnt r.title
n.doi ?= this.request.body.doi if this.request.body.doi? and this.request.body.doi isnt r.doi
if n.story
res = oab_request.search 'rating:1 AND story.exact:"' + n.story + '"'
if res.hits.total
nres = oab_request.search 'rating:0 AND story.exact:"' + n.story + '"'
n.rating = 1 if nres.hits.total is 0
if not n.status?
if (not r.title and not n.title) || (not r.email and not n.email) || (not r.story and not n.story)
n.status = 'help' if r.status isnt 'help'
else if r.status is 'help' and ( (r.title or n.title) and (r.email or n.email) and (r.story or n.story) )
n.status = 'moderate'
if n.title? and typeof n.title is 'string'
try n.title = n.title.charAt(0).toUpperCase() + n.title.slice(1)
if n.journal? and typeof n.journal is 'string'
try n.journal = n.journal.charAt(0).toUpperCase() + n.journal.slice(1)
if not n.doi? and not r.doi? and r.url? and r.url.indexOf('10.') isnt -1 and r.url.split('10.')[1].indexOf('/') isnt -1
n.doi = '10.' + r.url.split('10.')[1]
r.doi = n.doi
if r.doi and not r.title and not n.title
try
cr = API.use.crossref.works.doi r.doi
n.title = cr.title[0]
n.author ?= cr.author if not r.author?
n.journal ?= cr['container-title'][0] if cr['container-title']? and not r.journal?
n.issn ?= cr.ISSN[0] if cr.ISSN? and not r.issn?
n.subject ?= cr.subject if not r.subject?
n.publisher ?= cr.publisher if not r.publisher?
n.year = cr['published-print']['date-parts'][0][0] if not r.year? and cr['published-print']?['date-parts']? and cr['published-print']['date-parts'].length > 0 and cr['published-print']['date-parts'][0].length > 0
n.crossref_type = cr.type if not r.crossref_type?
n.year ?= cr.created['date-time'].split('-')[0] if not r.year? and cr.created?['date-time']?
r.author_affiliation = n.author_affiliation if n.author_affiliation?
if n.crossref_type? and ['journal-article', 'proceedings-article'].indexOf(n.crossref_type) is -1
n.status = 'closed'
n.closed_on_update = true
n.closed_on_update_reason = 'notarticle'
if (not r.email and not n.email) and r.author and r.author.length and (r.author[0].affiliation? or r.author_affiliation)
try
email = API.use.hunter.email {company: (r.author_affiliation ? r.author[0].affiliation[0].name), first_name: r.author[0].family, last_name: r.author[0].given}, API.settings.service.openaccessbutton.hunter.api_key
if email?.email?
n.email = email.email
oab_request.update(r._id,n) if JSON.stringify(n) isnt '{}'
return oab_request.get r._id
else
return 404
delete:
roleRequired:'openaccessbutton.user'
action: () ->
r = oab_request.get this.urlParams.rid
oab_request.remove(this.urlParams.rid) if API.accounts.auth('openaccessbutton.admin',this.user) or this.userId is r.user.id
return {}
API.add 'service/oab/request/:rid/admin/:action',
get:
roleRequired:'openaccessbutton.admin'
action: () ->
API.service.oab.admin this.urlParams.rid,this.urlParams.action
return {}
API.add 'service/oab/support/:rid',
get:
authOptional: true
action: () ->
return API.service.oab.support this.urlParams.rid, this.queryParams.story, this.user
post:
authOptional: true
action: () ->
return API.service.oab.support this.urlParams.rid, this.request.body.story, this.user
API.add 'service/oab/supports/:rid',
get:
roleRequired:'openaccessbutton.user'
action: () ->
return API.service.oab.supports this.urlParams.rid, this.user
API.add 'service/oab/supports',
get: () -> return oab_support.search this.queryParams
post: () -> return oab_support.search this.bodyParams
API.add 'service/oab/availabilities',
get: () -> return oab_availability.search this.queryParams
post: () -> return oab_availability.search this.bodyParams
API.add 'service/oab/requests',
get: () -> return oab_request.search this.queryParams
post: () -> return oab_request.search this.bodyParams
API.add 'service/oab/requests.csv', { get: (() -> API.convert.json2csv2response(this, oab_request.search(this.queryParams ? this.bodyParams))), post: (() -> API.convert.json2csv2response(this, oab_request.search(this.queryParams ? this.bodyParams))) }
API.add 'service/oab/history',
get: () -> return oab_request.history this.queryParams
post: () -> return oab_request.history this.bodyParams
API.add 'service/oab/users',
get:
roleRequired:'openaccessbutton.admin'
action: () -> return Users.search this.queryParams, {restrict:[{exists:{field:'roles.openaccessbutton'}}]}
post:
roleRequired:'openaccessbutton.admin'
action: () -> return Users.search this.bodyParams, {restrict:[{exists:{field:'roles.openaccessbutton'}}]}
API.add 'service/oab/scrape',
get:
#roleRequired:'openaccessbutton.user'
action: () -> return {data:API.service.oab.scrape(this.queryParams.url,this.queryParams.content,this.queryParams.doi)}
API.add 'service/oab/redirect',
get: () -> return API.service.oab.redirect this.queryParams.url
API.add 'service/oab/blacklist',
get: () -> return {data:API.service.oab.blacklist(undefined,undefined,this.queryParams.stale)}
API.add 'service/oab/templates',
get: () -> return API.service.oab.template(this.queryParams.template,this.queryParams.refresh)
API.add 'service/oab/substitute',
post: () -> return API.service.oab.substitute this.request.body.content,this.request.body.vars,this.request.body.markdown
API.add 'service/oab/mail',
post:
roleRequired:'openaccessbutton.admin'
action: () -> return API.service.oab.mail this.request.body
API.add 'service/oab/receive/:rid',
get: () -> return if r = oab_request.find({receiver:this.urlParams.rid}) then r else 404
post:
authOptional: true
action: () ->
if r = oab_request.find {receiver:this.urlParams.rid}
admin = this.bodyParams.admin and this.userId and API.accounts.auth('openaccessbutton.admin',this.user)
return API.service.oab.receive this.urlParams.rid, this.request.files, this.bodyParams.url, this.bodyParams.title, this.bodyParams.description, this.bodyParams.firstname, this.bodyParams.lastname, undefined, admin
else
return 404
API.add 'service/oab/redeposit/:rid',
post:
roleRequired: 'openaccessbutton.admin'
action: () -> return API.service.oab.redeposit this.urlParams.rid
API.add 'service/oab/receive/:rid/:holdrefuse',
get: () ->
if r = oab_request.find {receiver:this.urlParams.rid}
if this.urlParams.holdrefuse is 'refuse'
API.service.oab.refuse r._id, this.queryParams.reason
else
if isNaN(parseInt(this.urlParams.holdrefuse))
return 400
else
API.service.oab.hold r._id, parseInt(this.urlParams.holdrefuse)
return true
else
return 404
API.add 'service/oab/dnr',
get:
authOptional: true
action: () ->
return API.service.oab.dnr() if not this.queryParams.email? and this.user and API.accounts.auth 'openaccessbutton.admin', this.user
d = {}
d.dnr = API.service.oab.dnr this.queryParams.email
if not d.dnr and this.queryParams.user
u = API.accounts.retrieve this.queryParams.user
d.dnr = 'user' if u.emails[0].address is this.queryParams.email
if not d.dnr and this.queryParams.request
r = oab_request.get this.queryParams.request
d.dnr = 'creator' if r.user.email is this.queryParams.email
if not d.dnr
d.dnr = 'supporter' if oab_support.find {rid:this.queryParams.request, email:this.queryParams.email}
if not d.dnr and this.queryParams.validate
d.validation = API.mail.validate this.queryParams.email, API.settings.service?.openaccessbutton?.mail?.pubkey
d.dnr = 'invalid' if not d.validation.is_valid
return d
post: () ->
e = this.queryParams.email ? this.request.body.email
refuse = if this.queryParams.refuse in ['false',false] then false else true
return if e then API.service.oab.dnr(e,true,refuse) else 400
delete:
authRequired: 'openaccessbutton.admin'
action: () ->
oab_dnr.remove({email:this.queryParams.email}) if this.queryParams.email
return {}
API.add 'service/oab/bug',
post: () ->
whoto = ['help@openaccessbutton.org']
try
if this.request.body?.form is 'wrong'
whoto.push 'requests@openaccessbutton.org'
API.mail.send {
service: 'openaccessbutton',
from: 'help@openaccessbutton.org',
to: whoto,
subject: 'Feedback form submission',
text: JSON.stringify(this.request.body,undefined,2)
}
return {
statusCode: 302,
headers: {
'Content-Type': 'text/plain',
'Location': (if API.settings.dev then 'https://dev.openaccessbutton.org' else 'https://openaccessbutton.org') + '/bug#defaultthanks'
},
body: 'Location: ' + (if API.settings.dev then 'https://dev.openaccessbutton.org' else 'https://openaccessbutton.org') + '/bug#defaultthanks'
}
API.add 'service/oab/import',
post:
roleRequired: 'openaccessbutton.admin', # later could be opened to other oab users, with some sort of quota / limit
action: () ->
try
records = this.request.body
resp = {found:0,updated:0,missing:[]}
for p in this.request.body
if p._id
rq = oab_request.get p._id
if rq
resp.found += 1
updates = []
update = []
for up of p
p[up] = undefined if p[up] is 'DELETE' # this is not used yet
if (not p[up]? or p[up]) and p[up] not in ['createdAt','created_date','plugin','from','embedded','names']
if up.indexOf('refused.') is 0 and ( not rq.refused? or rq.refused[up.split('.')[1]] isnt p[up] )
rq.refused ?= {}
rq.refused[up.split('.')[1]] = p[up]
update.refused ?= {}
update.refused[up.split('.')[1]] = p[up]
else if up.indexOf('received.') is 0 and ( not rq.received? or rq.received[up.split('.')[1]] isnt p[up] )
rq.received ?= {}
rq.received[up.split('.')[1]] = p[up]
update.received = rq.received
else if up.indexOf('followup.') is 0
if up isnt 'followup.date' and p['followup.count'] isnt rq.followup?.count
rq.followup ?= {}
rq.followup.count = p['followup.count']
rq.followup.date ?= []
rq.followup.date.push moment(Date.now(), "x").format "YYYYMMDD"
update.followup = rq.followup
else if up is 'sherpa.color' and ( not rq.sherpa? or rq.sherpa.color isnt p[up] )
rq.sherpa = {color:p[up]}
update.sherpa = rq.sherpa
else if rq[up] isnt p[up]
rq[up] = p[up]
update[up] = rq[up]
try
rq._bulk_import ?= {}
rq._bulk_import[Date.now()] = JSON.stringify update
# TODO should update collection update to take lists of records that it updates in bulk
rq.updatedAt = Date.now()
rq.updated_date = moment(rq.updatedAt, "x").format "YYYY-MM-DD HHmm.ss"
updates.push rq
resp.updated += 1
else
resp.missing.push p._id
if updates.length
resp.imports = oab_request.import(updates)
return resp
catch err
return {status:'error'}
#match = {must:[{term:{'signature.exact':proc.signature}}], must_not:[{exists:{field:'_raw_result.error'}}]}
#try
# if typeof job.refresh is 'number' and job.refresh isnt 0
# d = new Date()
# match.must.push {range:{createdAt:{gt:d.setDate(d.getDate() - job.refresh)}}}
API.add 'service/oab/export/:what',
get:
roleRequired: 'openaccessbutton.admin',
action: () ->
results = []
fields = if this.urlParams.what is 'changes' then ['_id','createdAt','created_date','action'] else if this.urlParams.what is 'request' then ['_id','created_date','type','count','status','title','url','doi','journal','publisher','sherpa.color','name','names','email','author_affiliation','user.username','user.email','user.firstname','user.lastname','story','rating','receiver','followup.count','followup.date','refused.email','refused.date','received.date','received.from','received.description','received.url','received.admin','received.cron','received.notfromauthor','notes','plugin','from','embedded'] else []
match = {}
match.range = {createdAt: {}} if this.queryParams.from or this.queryParams.to
match.range.createdAt.gte = this.queryParams.from if this.queryParams.from
match.range.createdAt.lte = parseInt(this.queryParams.to) + 86400000 if this.queryParams.to
if this.urlParams.what is 'dnr' or this.urlParams.what is 'mail' or this.urlParams.what is 'request'
results = if this.urlParams.what is 'dnr' then oab_dnr.fetch(match, true) else if this.urlParams.what is 'request' then oab_request.fetch(match, true) else mail_progress.fetch match, true
for r of results
if this.urlParams.what isnt 'request'
for f of results[r]
fields.push(f) if fields.indexOf(f) is -1
else
results[r].names = []
if results[r].author?
for a in results[r].author
if a.family
results[r].names.push a.given + ' ' + a.family
else if this.urlParams.what is 'changes'
res = oab_request.fetch_history match, true
for r in res
m = {
action: r.action,
_id: r.document,
createdAt: r.createdAt,
created_date: r.created_date
}
if r.action
for mr of r[r.action]
fields.push(mr) if fields.indexOf(mr) is -1
m[mr] = r[r.action][mr]
if r.string
fields.push('string') if fields.indexOf('string') is -1
m.string = r.string
results.push m
csv = API.convert.json2csv {fields:fields}, undefined, results
name = 'export_' + this.urlParams.what
this.response.writeHead(200, {
'Content-disposition': "attachment; filename="+name+".csv",
'Content-type': 'text/csv; charset=UTF-8',
'Content-Encoding': 'UTF-8'
})
this.response.end(csv)
this.done() # added this now that underlying lib has been rewritten - should work without crash. Below note left for posterity.
# NOTE: this should really return to stop restivus throwing an error, and should really include
# the file length in the above head call, but this causes an intermittent write afer end error
# which crashes the whole system. So pick the lesser of two fuck ups.
API.add 'service/oab/terms/:type/:key', get: () -> return API.es.terms 'oab', this.urlParams.type, this.urlParams.key
API.add 'service/oab/min/:type/:key', get: () -> return API.es.min 'oab', this.urlParams.type, this.urlParams.key
API.add 'service/oab/max/:type/:key', get: () -> return API.es.max 'oab', this.urlParams.type, this.urlParams.key
API.add 'service/oab/range/:type/:key', get: () -> return API.es.range 'oab', this.urlParams.type, this.urlParams.key
API.add 'service/oab/keys/:type', get: () -> return API.es.keys 'oab', this.urlParams.type
API.add 'service/oab/job',
get:
action: () ->
jobs = job_job.search({service:'openaccessbutton'},{size:1000,newest:true}).hits.hits
for j of jobs
jobs[j] = jobs[j]._source
ju = API.accounts.retrieve jobs[j].user
jobs[j].email = ju?.emails[0].address
jobs[j].processes = if jobs[j].processes? then jobs[j].processes.length else 0
return jobs
post:
roleRequired: 'openaccessbutton.user'
action: () ->
processes = this.request.body.processes ? this.request.body
for p in processes
p.plugin = this.request.body.plugin ? 'bulk'
p.libraries = this.request.body.libraries if this.request.body.libraries?
p.sources = this.request.body.sources if this.request.body.sources?
p.all = this.request.body.all ?= false
p.refresh = 0 if this.request.body.refresh
p.titles = this.request.body.titles ?= true
job = API.job.create {refresh:this.request.body.refresh, complete:'API.service.oab.job_complete', user:this.userId, service:'openaccessbutton', function:'API.service.oab.find', name:(this.request.body.name ? "oab_availability"), processes:processes}
API.service.oab.job_started job
return job
API.add 'service/oab/job/generate/:start/:end',
post:
roleRequired: 'openaccessbutton.admin'
action: () ->
start = moment(this.urlParams.start, "DDMMYYYY").valueOf()
end = moment(this.urlParams.end, "DDMMYYYY").endOf('day').valueOf()
processes = oab_request.find 'NOT status.exact:received AND createdAt:>' + start + ' AND createdAt:<' + end
if processes.length
procs = []
for p in processes
pro = {url:p.url}
pro.libraries = this.request.body.libraries if this.request.body.libraries?
pro.sources = this.request.body.sources if this.request.body.sources?
procs.push(pro)
name = 'sys_requests_' + this.urlParams.start + '_' + this.urlParams.end
jid = API.job.create {complete:'API.service.oab.job_complete', user:this.userId, service:'openaccessbutton', function:'API.service.oab.find', name:name, processes:procs}
return {job:jid, count:processes.length}
else
return {count:0}
API.add 'service/oab/job/:jid/progress', get: () -> return API.job.progress this.urlParams.jid
API.add 'service/oab/job/:jid/reload',
get:
roleRequired: 'openaccessbutton.admin'
action: () ->
return API.job.reload this.urlParams.jid
API.add 'service/oab/job/:jid/remove',
get:
roleRequired: 'openaccessbutton.admin'
action: () ->
return API.job.remove this.urlParams.jid
API.add 'service/oab/job/:jid/request',
get:
roleRequired: 'openaccessbutton.admin'
action: () ->
results = API.job.results this.urlParams.jid
identifiers = []
for r in results
if r.availability.length is 0 and r.requests.length is 0
rq = {}
if r.match
if r.match.indexOf('TITLE:') is 0
rq.title = r.match.replace('TITLE:','')
else if r.match.indexOf('CITATION:') isnt 0
rq.url = r.match
if r.meta and r.meta.article
if r.meta.article.doi
rq.doi = r.meta.article.doi
rq.url ?= 'https://doi.org/' + r.meta.article.doi
rq.title ?= r.meta.article.title
if rq.url
rq.story = this.queryParams.story ? ''
created = API.service.oab.request rq, this.userId
identifiers.push(created) if created
return identifiers
API.add 'service/oab/job/:jid/results', get: () -> return API.job.results this.urlParams.jid
API.add 'service/oab/job/:jid/results.json', get: () -> return API.job.results this.urlParams.jid
API.add 'service/oab/job/:jid/results.csv',
get: () ->
res = API.job.results this.urlParams.jid, true
inputs = []
csv = '"MATCH",'
csv += '"BING","REVERSED",' if API.settings.dev
csv += '"AVAILABLE","SOURCE","REQUEST","TITLE","DOI"'
liborder = []
sources = []
extras = []
if res.length and res[0].args?
jargs = JSON.parse res[0].args
if jargs.libraries?
for l in jargs.libraries
liborder.push l
csv += ',"' + l.toUpperCase() + '"'
if jargs.sources
sources = jargs.sources
for s in sources
csv += ',"' + s.toUpperCase() + '"'
for er in res
if er.args?
erargs = JSON.parse er.args
for k of erargs
extras.push(k) if k.toLowerCase() not in ['refresh','library','libraries','sources','plugin','all','titles'] and k not in extras
if extras.length
exhd = ''
exhd += '"' + ex + '",' for ex in extras
csv = exhd + csv
for r in res
row = if r.string then JSON.parse(r.string) else r._raw_result['API.service.oab.find']
csv += '\n'
if r.args?
ea = JSON.parse r.args
for extra in extras
csv += '"' + (if ea[extra]? then ea[extra] else '') + '",'
csv += '"' + (if row.match then row.match.replace('TITLE:','').replace(/"/g,'') + '","' else '","')
if API.settings.dev
csv += (if row.meta?.article?.bing then 'Yes' else 'No') + '","'
csv += (if row.meta?.article?.reversed then 'Yes' else 'No') + '","'
av = 'No'
if row.availability?
for a in row.availability
av = a.url.replace(/"/g,'') if a.type is 'article'
csv += av + '","'
csv += row.meta.article.source if av isnt 'No' and row.meta?.article?.source
csv += '","'
rq = ''
if row.requests
for re in row.requests
if re.type is 'article'
rq = 'https://' + (if API.settings.dev then 'dev.' else '') + 'openaccessbutton.org/request/' + re._id
csv += rq + '","'
csv += row.meta.article.title.replace(/"/g,'').replace(/[^\x00-\x7F]/g, "") if row.meta?.article?.title?
csv += '","'
csv += row.meta.article.doi if row.meta?.article?.doi
csv += '"'
if row.libraries
for lib in liborder
csv += ',"'
js = false
if lib?.journal?.library
js = true
csv += 'Journal subscribed'
rp = false
if lib?.repository
rp = true
csv += '; ' if js
csv += 'In repository'
ll = false
if lib?.local?.length
ll = true
csv += '; ' if js or rp
csv += 'In library'
csv += 'Not available' if not js and not rp and not ll
csv += '"'
for src in sources
csv += ',"'
csv += row.meta.article.found[src] if row.meta?.article?.found?[src]?
csv += '"'
job = job_job.get this.urlParams.jid
name = if job.name then job.name.split('.')[0].replace(/ /g,'_') + '_results' else 'results'
this.response.writeHead 200,
'Content-disposition': "attachment; filename="+name+".csv"
'Content-type': 'text/csv; charset=UTF-8'
'Content-Encoding': 'UTF-8'
this.response.end csv
this.done()
API.add 'service/oab/status', get: () -> return API.service.oab.status()
API.add 'service/oab/embed/:rid', # is this still needed?
get: () ->
rid = this.urlParams.rid
b = oab_request.get rid
if b
title = b.title ? b.url
template = '<div style="width:800px;padding:0;margin:0;"> \
<div style="padding:0;margin:0;float:left;width:150px;height:200px;background-color:white;border:2px solid #398bc5;;"> \
<img src="//openaccessbutton.org/static/icon_OAB.png" style="height:100%;width:100%;"> \
</div> \
<div style="padding:0;margin:0;float:left;width:400px;height:200px;background-color:#398bc5;;"> \
<div style="height:166px;"> \
<p style="margin:2px;color:white;font-size:30px;text-align:center;"> \
<a target="_blank" href="https://openaccessbutton.org/request/' + rid + '" style="color:white;font-family:Sans-Serif;"> \
Open Access Button \
</a> \
</p> \
<p style="margin:2px;color:white;font-size:16px;text-align:center;font-family:Sans-Serif;"> \
Request for content related to the article <br> \
<a target="_blank" id="oab_article" href="https://openaccessbutton.org/request/' + rid + '" style="font-style:italic;color:white;font-family:Sans-Serif;"> \
' + title + '</a> \
</p> \
</div> \
<div style="height:30px;background-color:#f04717;"> \
<p style="text-align:center;font-size:16px;margin-right:2px;padding-top:1px;"> \
<a target="_blank" style="color:white;font-family:Sans-Serif;" href="https://openaccessbutton.org/request/' + rid + '"> \
ADD YOUR SUPPORT \
</a> \
</p> \
</div> \
</div> \
<div style="padding:0;margin:0;float:left;width:200px;height:200px;background-color:#212f3f;"> \
<h1 style="text-align:center;font-size:50px;color:#f04717;font-family:Sans-Serif;" id="oab_counter"> \
' + b.count + '</h1> \
<p style="text-align:center;color:white;font-size:14px;font-family:Sans-Serif;"> \
people have been unable to access this content, and support this request \
</p> \
</div> \
<div style="width:100%;clear:both;"></div> \
</div>';
return {statusCode: 200, body: {status: 'success', data: template}}
else
return {statusCode: 404, body: {status: 'error', data:'404 not found'}}
| 157656 |
import moment from 'moment'
# these are global so can be accessed on other oabutton files
@oab_support = new API.collection {index:"oab",type:"support"}
@oab_availability = new API.collection {index:"oab",type:"availability"}
@oab_request = new API.collection {index:"oab",type:"request",history:true}
# the normal declaration of API.service.oab is in admin.coffee, because it gets loaded before this api.coffee file
API.add 'service/oab',
get: () ->
return {data: 'The Open Access Button API.'}
post:
roleRequired:'openaccessbutton.user'
action: () ->
return {data: 'You are authenticated'}
_avail =
authOptional: true
action: () ->
opts = if not _.isEmpty(this.request.body) then this.request.body else this.queryParams
opts.refresh ?= this.queryParams.refresh
opts.from ?= this.queryParams.from
opts.plugin ?= this.queryParams.plugin
opts.all ?= this.queryParams.all
opts.titles ?= this.queryParams.titles
ident = opts.doi
ident ?= opts.url
ident ?= 'pmid' + opts.pmid if opts.pmid
ident ?= 'pmc' + opts.pmc.toLowerCase().replace('pmc','') if opts.pmc
ident ?= 'TITLE:' + opts.title if opts.title
ident ?= 'CITATION:' + opts.citation if opts.citation
opts.url = ident
# should maybe put auth on the ability to pass in library and libraries...
opts.libraries = opts.libraries.split(',') if opts.libraries
opts.sources = opts.sources.split(',') if opts.sources
if this.user?
opts.uid = this.userId
opts.username = this.user.username
opts.email = this.user.emails[0].address
return if not opts.test and API.service.oab.blacklist(opts.url) then 400 else {data:API.service.oab.find(opts)}
API.add 'service/oab/find', get:_avail, post:_avail
API.add 'service/oab/availability', get:_avail, post:_avail # exists for legacy reasons
API.add 'service/oab/resolve',
get: () ->
return API.service.oab.resolve this.queryParams,undefined,this.queryParams.sources?.split(','),this.queryParams.all,this.queryParams.titles,this.queryParams.journal
API.add 'service/oab/ill/:library',
post: () ->
opts = this.request.body;
opts.library = this.urlParams.library;
return API.service.oab.ill opts
API.add 'service/oab/request',
get:
roleRequired:'openaccessbutton.user'
action: () ->
return {data: 'You have access :)'}
post:
authOptional: true
action: () ->
req = this.request.body
req.doi ?= this.queryParams.doi if this.queryParams.doi?
req.url ?= this.queryParams.url if this.queryParams.url?
req.test = if this.request.headers.host is 'dev.api.cottagelabs.com' then true else false
return {data: API.service.oab.request(req,this.user,this.queryParams.fast)}
API.add 'service/oab/request/:rid',
get:
authOptional: true
action: () ->
if r = oab_request.get this.urlParams.rid
r.supports = API.service.oab.supports(this.urlParams.rid,this.userId) if this.userId
others = oab_request.search({url:r.url})
if others?
for o in others.hits.hits
r.other = o._source._id if o._source._id isnt r._id and o._source.type isnt r.type
return {data: r}
else
return 404
post:
roleRequired:'openaccessbutton.user',
action: () ->
if r = oab_request.get this.urlParams.rid
n = {}
if not r.user? and not r.story? and this.request.body.story
n.story = this.request.body.story
n.user = id: this.user._id, email: this.user.emails[0].address, username: (this.user.profile?.firstname ? this.user.username ? this.user.emails[0].address)
n.user.firstname = this.user.profile?.firstname
n.user.lastname = this.user.profile?.lastname
n.user.affiliation = this.user.service?.openaccessbutton?.profile?.affiliation
n.user.profession = this.user.service?.openaccessbutton?.profile?.profession
n.count = 1 if not r.count? or r.count is 0
if API.accounts.auth 'openaccessbutton.admin', this.user
n.test ?= this.request.body.test if this.request.body.test? and this.request.body.test isnt r.test
n.status ?= this.request.body.status if this.request.body.status? and this.request.body.status isnt r.status
n.rating ?= this.request.body.rating if this.request.body.rating? and this.request.body.rating isnt r.rating
n.name ?= this.request.body.name if this.request.body.name? and this.request.body.name isnt r.name
n.email ?= this.request.body.email if this.request.body.email? and this.request.body.email isnt r.email
n.author_affiliation ?= this.request.body.author_affiliation if this.request.body.author_affiliation? and this.request.body.author_affiliation isnt r.author_affiliation
n.story ?= this.request.body.story if this.request.body.story? and this.request.body.story isnt r.story
n.journal ?= this.request.body.journal if this.request.body.journal? and this.request.body.journal isnt r.journal
n.notes = this.request.body.notes if this.request.body.notes? and this.request.body.notes isnt r.notes
n.email = this.request.body.email if this.request.body.email? and ( API.accounts.auth('openaccessbutton.admin',this.user) || not r.status? || r.status is 'help' || r.status is 'moderate' || r.status is 'refused' )
n.story = this.request.body.story if r.user? and this.userId is r.user.id and this.request.body.story? and this.request.body.story isnt r.story
n.url ?= this.request.body.url if this.request.body.url? and this.request.body.url isnt r.url
n.title ?= this.request.body.title if this.request.body.title? and this.request.body.title isnt r.title
n.doi ?= this.request.body.doi if this.request.body.doi? and this.request.body.doi isnt r.doi
if n.story
res = oab_request.search 'rating:1 AND story.exact:"' + n.story + '"'
if res.hits.total
nres = oab_request.search 'rating:0 AND story.exact:"' + n.story + '"'
n.rating = 1 if nres.hits.total is 0
if not n.status?
if (not r.title and not n.title) || (not r.email and not n.email) || (not r.story and not n.story)
n.status = 'help' if r.status isnt 'help'
else if r.status is 'help' and ( (r.title or n.title) and (r.email or n.email) and (r.story or n.story) )
n.status = 'moderate'
if n.title? and typeof n.title is 'string'
try n.title = n.title.charAt(0).toUpperCase() + n.title.slice(1)
if n.journal? and typeof n.journal is 'string'
try n.journal = n.journal.charAt(0).toUpperCase() + n.journal.slice(1)
if not n.doi? and not r.doi? and r.url? and r.url.indexOf('10.') isnt -1 and r.url.split('10.')[1].indexOf('/') isnt -1
n.doi = '10.' + r.url.split('10.')[1]
r.doi = n.doi
if r.doi and not r.title and not n.title
try
cr = API.use.crossref.works.doi r.doi
n.title = cr.title[0]
n.author ?= cr.author if not r.author?
n.journal ?= cr['container-title'][0] if cr['container-title']? and not r.journal?
n.issn ?= cr.ISSN[0] if cr.ISSN? and not r.issn?
n.subject ?= cr.subject if not r.subject?
n.publisher ?= cr.publisher if not r.publisher?
n.year = cr['published-print']['date-parts'][0][0] if not r.year? and cr['published-print']?['date-parts']? and cr['published-print']['date-parts'].length > 0 and cr['published-print']['date-parts'][0].length > 0
n.crossref_type = cr.type if not r.crossref_type?
n.year ?= cr.created['date-time'].split('-')[0] if not r.year? and cr.created?['date-time']?
r.author_affiliation = n.author_affiliation if n.author_affiliation?
if n.crossref_type? and ['journal-article', 'proceedings-article'].indexOf(n.crossref_type) is -1
n.status = 'closed'
n.closed_on_update = true
n.closed_on_update_reason = 'notarticle'
if (not r.email and not n.email) and r.author and r.author.length and (r.author[0].affiliation? or r.author_affiliation)
try
email = API.use.hunter.email {company: (r.author_affiliation ? r.author[0].affiliation[0].name), first_name: r.author[0].family, last_name: r.author[0].given}, API.settings.service.openaccessbutton.hunter.api_key
if email?.email?
n.email = email.email
oab_request.update(r._id,n) if JSON.stringify(n) isnt '{}'
return oab_request.get r._id
else
return 404
delete:
roleRequired:'openaccessbutton.user'
action: () ->
r = oab_request.get this.urlParams.rid
oab_request.remove(this.urlParams.rid) if API.accounts.auth('openaccessbutton.admin',this.user) or this.userId is r.user.id
return {}
API.add 'service/oab/request/:rid/admin/:action',
get:
roleRequired:'openaccessbutton.admin'
action: () ->
API.service.oab.admin this.urlParams.rid,this.urlParams.action
return {}
API.add 'service/oab/support/:rid',
get:
authOptional: true
action: () ->
return API.service.oab.support this.urlParams.rid, this.queryParams.story, this.user
post:
authOptional: true
action: () ->
return API.service.oab.support this.urlParams.rid, this.request.body.story, this.user
API.add 'service/oab/supports/:rid',
get:
roleRequired:'openaccessbutton.user'
action: () ->
return API.service.oab.supports this.urlParams.rid, this.user
API.add 'service/oab/supports',
get: () -> return oab_support.search this.queryParams
post: () -> return oab_support.search this.bodyParams
API.add 'service/oab/availabilities',
get: () -> return oab_availability.search this.queryParams
post: () -> return oab_availability.search this.bodyParams
API.add 'service/oab/requests',
get: () -> return oab_request.search this.queryParams
post: () -> return oab_request.search this.bodyParams
API.add 'service/oab/requests.csv', { get: (() -> API.convert.json2csv2response(this, oab_request.search(this.queryParams ? this.bodyParams))), post: (() -> API.convert.json2csv2response(this, oab_request.search(this.queryParams ? this.bodyParams))) }
API.add 'service/oab/history',
get: () -> return oab_request.history this.queryParams
post: () -> return oab_request.history this.bodyParams
API.add 'service/oab/users',
get:
roleRequired:'openaccessbutton.admin'
action: () -> return Users.search this.queryParams, {restrict:[{exists:{field:'roles.openaccessbutton'}}]}
post:
roleRequired:'openaccessbutton.admin'
action: () -> return Users.search this.bodyParams, {restrict:[{exists:{field:'roles.openaccessbutton'}}]}
API.add 'service/oab/scrape',
get:
#roleRequired:'openaccessbutton.user'
action: () -> return {data:API.service.oab.scrape(this.queryParams.url,this.queryParams.content,this.queryParams.doi)}
API.add 'service/oab/redirect',
get: () -> return API.service.oab.redirect this.queryParams.url
API.add 'service/oab/blacklist',
get: () -> return {data:API.service.oab.blacklist(undefined,undefined,this.queryParams.stale)}
API.add 'service/oab/templates',
get: () -> return API.service.oab.template(this.queryParams.template,this.queryParams.refresh)
API.add 'service/oab/substitute',
post: () -> return API.service.oab.substitute this.request.body.content,this.request.body.vars,this.request.body.markdown
API.add 'service/oab/mail',
post:
roleRequired:'openaccessbutton.admin'
action: () -> return API.service.oab.mail this.request.body
API.add 'service/oab/receive/:rid',
get: () -> return if r = oab_request.find({receiver:this.urlParams.rid}) then r else 404
post:
authOptional: true
action: () ->
if r = oab_request.find {receiver:this.urlParams.rid}
admin = this.bodyParams.admin and this.userId and API.accounts.auth('openaccessbutton.admin',this.user)
return API.service.oab.receive this.urlParams.rid, this.request.files, this.bodyParams.url, this.bodyParams.title, this.bodyParams.description, this.bodyParams.firstname, this.bodyParams.lastname, undefined, admin
else
return 404
API.add 'service/oab/redeposit/:rid',
post:
roleRequired: 'openaccessbutton.admin'
action: () -> return API.service.oab.redeposit this.urlParams.rid
API.add 'service/oab/receive/:rid/:holdrefuse',
get: () ->
if r = oab_request.find {receiver:this.urlParams.rid}
if this.urlParams.holdrefuse is 'refuse'
API.service.oab.refuse r._id, this.queryParams.reason
else
if isNaN(parseInt(this.urlParams.holdrefuse))
return 400
else
API.service.oab.hold r._id, parseInt(this.urlParams.holdrefuse)
return true
else
return 404
API.add 'service/oab/dnr',
get:
authOptional: true
action: () ->
return API.service.oab.dnr() if not this.queryParams.email? and this.user and API.accounts.auth 'openaccessbutton.admin', this.user
d = {}
d.dnr = API.service.oab.dnr this.queryParams.email
if not d.dnr and this.queryParams.user
u = API.accounts.retrieve this.queryParams.user
d.dnr = 'user' if u.emails[0].address is this.queryParams.email
if not d.dnr and this.queryParams.request
r = oab_request.get this.queryParams.request
d.dnr = 'creator' if r.user.email is this.queryParams.email
if not d.dnr
d.dnr = 'supporter' if oab_support.find {rid:this.queryParams.request, email:this.queryParams.email}
if not d.dnr and this.queryParams.validate
d.validation = API.mail.validate this.queryParams.email, API.settings.service?.openaccessbutton?.mail?.pubkey
d.dnr = 'invalid' if not d.validation.is_valid
return d
post: () ->
e = this.queryParams.email ? this.request.body.email
refuse = if this.queryParams.refuse in ['false',false] then false else true
return if e then API.service.oab.dnr(e,true,refuse) else 400
delete:
authRequired: 'openaccessbutton.admin'
action: () ->
oab_dnr.remove({email:this.queryParams.email}) if this.queryParams.email
return {}
API.add 'service/oab/bug',
post: () ->
whoto = ['<EMAIL>']
try
if this.request.body?.form is 'wrong'
whoto.push '<EMAIL>'
API.mail.send {
service: 'openaccessbutton',
from: '<EMAIL>',
to: whoto,
subject: 'Feedback form submission',
text: JSON.stringify(this.request.body,undefined,2)
}
return {
statusCode: 302,
headers: {
'Content-Type': 'text/plain',
'Location': (if API.settings.dev then 'https://dev.openaccessbutton.org' else 'https://openaccessbutton.org') + '/bug#defaultthanks'
},
body: 'Location: ' + (if API.settings.dev then 'https://dev.openaccessbutton.org' else 'https://openaccessbutton.org') + '/bug#defaultthanks'
}
API.add 'service/oab/import',
post:
roleRequired: 'openaccessbutton.admin', # later could be opened to other oab users, with some sort of quota / limit
action: () ->
try
records = this.request.body
resp = {found:0,updated:0,missing:[]}
for p in this.request.body
if p._id
rq = oab_request.get p._id
if rq
resp.found += 1
updates = []
update = []
for up of p
p[up] = undefined if p[up] is 'DELETE' # this is not used yet
if (not p[up]? or p[up]) and p[up] not in ['createdAt','created_date','plugin','from','embedded','names']
if up.indexOf('refused.') is 0 and ( not rq.refused? or rq.refused[up.split('.')[1]] isnt p[up] )
rq.refused ?= {}
rq.refused[up.split('.')[1]] = p[up]
update.refused ?= {}
update.refused[up.split('.')[1]] = p[up]
else if up.indexOf('received.') is 0 and ( not rq.received? or rq.received[up.split('.')[1]] isnt p[up] )
rq.received ?= {}
rq.received[up.split('.')[1]] = p[up]
update.received = rq.received
else if up.indexOf('followup.') is 0
if up isnt 'followup.date' and p['followup.count'] isnt rq.followup?.count
rq.followup ?= {}
rq.followup.count = p['followup.count']
rq.followup.date ?= []
rq.followup.date.push moment(Date.now(), "x").format "YYYYMMDD"
update.followup = rq.followup
else if up is 'sherpa.color' and ( not rq.sherpa? or rq.sherpa.color isnt p[up] )
rq.sherpa = {color:p[up]}
update.sherpa = rq.sherpa
else if rq[up] isnt p[up]
rq[up] = p[up]
update[up] = rq[up]
try
rq._bulk_import ?= {}
rq._bulk_import[Date.now()] = JSON.stringify update
# TODO should update collection update to take lists of records that it updates in bulk
rq.updatedAt = Date.now()
rq.updated_date = moment(rq.updatedAt, "x").format "YYYY-MM-DD HHmm.ss"
updates.push rq
resp.updated += 1
else
resp.missing.push p._id
if updates.length
resp.imports = oab_request.import(updates)
return resp
catch err
return {status:'error'}
#match = {must:[{term:{'signature.exact':proc.signature}}], must_not:[{exists:{field:'_raw_result.error'}}]}
#try
# if typeof job.refresh is 'number' and job.refresh isnt 0
# d = new Date()
# match.must.push {range:{createdAt:{gt:d.setDate(d.getDate() - job.refresh)}}}
API.add 'service/oab/export/:what',
get:
roleRequired: 'openaccessbutton.admin',
action: () ->
results = []
fields = if this.urlParams.what is 'changes' then ['_id','createdAt','created_date','action'] else if this.urlParams.what is 'request' then ['_id','created_date','type','count','status','title','url','doi','journal','publisher','sherpa.color','name','names','email','author_affiliation','user.username','user.email','user.firstname','user.lastname','story','rating','receiver','followup.count','followup.date','refused.email','refused.date','received.date','received.from','received.description','received.url','received.admin','received.cron','received.notfromauthor','notes','plugin','from','embedded'] else []
match = {}
match.range = {createdAt: {}} if this.queryParams.from or this.queryParams.to
match.range.createdAt.gte = this.queryParams.from if this.queryParams.from
match.range.createdAt.lte = parseInt(this.queryParams.to) + 86400000 if this.queryParams.to
if this.urlParams.what is 'dnr' or this.urlParams.what is 'mail' or this.urlParams.what is 'request'
results = if this.urlParams.what is 'dnr' then oab_dnr.fetch(match, true) else if this.urlParams.what is 'request' then oab_request.fetch(match, true) else mail_progress.fetch match, true
for r of results
if this.urlParams.what isnt 'request'
for f of results[r]
fields.push(f) if fields.indexOf(f) is -1
else
results[r].names = []
if results[r].author?
for a in results[r].author
if a.family
results[r].names.push a.given + ' ' + a.family
else if this.urlParams.what is 'changes'
res = oab_request.fetch_history match, true
for r in res
m = {
action: r.action,
_id: r.document,
createdAt: r.createdAt,
created_date: r.created_date
}
if r.action
for mr of r[r.action]
fields.push(mr) if fields.indexOf(mr) is -1
m[mr] = r[r.action][mr]
if r.string
fields.push('string') if fields.indexOf('string') is -1
m.string = r.string
results.push m
csv = API.convert.json2csv {fields:fields}, undefined, results
name = 'export_' + this.urlParams.what
this.response.writeHead(200, {
'Content-disposition': "attachment; filename="+name+".csv",
'Content-type': 'text/csv; charset=UTF-8',
'Content-Encoding': 'UTF-8'
})
this.response.end(csv)
this.done() # added this now that underlying lib has been rewritten - should work without crash. Below note left for posterity.
# NOTE: this should really return to stop restivus throwing an error, and should really include
# the file length in the above head call, but this causes an intermittent write afer end error
# which crashes the whole system. So pick the lesser of two fuck ups.
API.add 'service/oab/terms/:type/:key', get: () -> return API.es.terms 'oab', this.urlParams.type, this.urlParams.key
API.add 'service/oab/min/:type/:key', get: () -> return API.es.min 'oab', this.urlParams.type, this.urlParams.key
API.add 'service/oab/max/:type/:key', get: () -> return API.es.max 'oab', this.urlParams.type, this.urlParams.key
API.add 'service/oab/range/:type/:key', get: () -> return API.es.range 'oab', this.urlParams.type, this.urlParams.key
API.add 'service/oab/keys/:type', get: () -> return API.es.keys 'oab', this.urlParams.type
API.add 'service/oab/job',
get:
action: () ->
jobs = job_job.search({service:'openaccessbutton'},{size:1000,newest:true}).hits.hits
for j of jobs
jobs[j] = jobs[j]._source
ju = API.accounts.retrieve jobs[j].user
jobs[j].email = ju?.emails[0].address
jobs[j].processes = if jobs[j].processes? then jobs[j].processes.length else 0
return jobs
post:
roleRequired: 'openaccessbutton.user'
action: () ->
processes = this.request.body.processes ? this.request.body
for p in processes
p.plugin = this.request.body.plugin ? 'bulk'
p.libraries = this.request.body.libraries if this.request.body.libraries?
p.sources = this.request.body.sources if this.request.body.sources?
p.all = this.request.body.all ?= false
p.refresh = 0 if this.request.body.refresh
p.titles = this.request.body.titles ?= true
job = API.job.create {refresh:this.request.body.refresh, complete:'API.service.oab.job_complete', user:this.userId, service:'openaccessbutton', function:'API.service.oab.find', name:(this.request.body.name ? "oab_availability"), processes:processes}
API.service.oab.job_started job
return job
API.add 'service/oab/job/generate/:start/:end',
post:
roleRequired: 'openaccessbutton.admin'
action: () ->
start = moment(this.urlParams.start, "DDMMYYYY").valueOf()
end = moment(this.urlParams.end, "DDMMYYYY").endOf('day').valueOf()
processes = oab_request.find 'NOT status.exact:received AND createdAt:>' + start + ' AND createdAt:<' + end
if processes.length
procs = []
for p in processes
pro = {url:p.url}
pro.libraries = this.request.body.libraries if this.request.body.libraries?
pro.sources = this.request.body.sources if this.request.body.sources?
procs.push(pro)
name = 'sys_requests_' + this.urlParams.start + '_' + this.urlParams.end
jid = API.job.create {complete:'API.service.oab.job_complete', user:this.userId, service:'openaccessbutton', function:'API.service.oab.find', name:name, processes:procs}
return {job:jid, count:processes.length}
else
return {count:0}
API.add 'service/oab/job/:jid/progress', get: () -> return API.job.progress this.urlParams.jid
API.add 'service/oab/job/:jid/reload',
get:
roleRequired: 'openaccessbutton.admin'
action: () ->
return API.job.reload this.urlParams.jid
API.add 'service/oab/job/:jid/remove',
get:
roleRequired: 'openaccessbutton.admin'
action: () ->
return API.job.remove this.urlParams.jid
API.add 'service/oab/job/:jid/request',
get:
roleRequired: 'openaccessbutton.admin'
action: () ->
results = API.job.results this.urlParams.jid
identifiers = []
for r in results
if r.availability.length is 0 and r.requests.length is 0
rq = {}
if r.match
if r.match.indexOf('TITLE:') is 0
rq.title = r.match.replace('TITLE:','')
else if r.match.indexOf('CITATION:') isnt 0
rq.url = r.match
if r.meta and r.meta.article
if r.meta.article.doi
rq.doi = r.meta.article.doi
rq.url ?= 'https://doi.org/' + r.meta.article.doi
rq.title ?= r.meta.article.title
if rq.url
rq.story = this.queryParams.story ? ''
created = API.service.oab.request rq, this.userId
identifiers.push(created) if created
return identifiers
API.add 'service/oab/job/:jid/results', get: () -> return API.job.results this.urlParams.jid
API.add 'service/oab/job/:jid/results.json', get: () -> return API.job.results this.urlParams.jid
API.add 'service/oab/job/:jid/results.csv',
get: () ->
res = API.job.results this.urlParams.jid, true
inputs = []
csv = '"MATCH",'
csv += '"BING","REVERSED",' if API.settings.dev
csv += '"AVAILABLE","SOURCE","REQUEST","TITLE","DOI"'
liborder = []
sources = []
extras = []
if res.length and res[0].args?
jargs = JSON.parse res[0].args
if jargs.libraries?
for l in jargs.libraries
liborder.push l
csv += ',"' + l.toUpperCase() + '"'
if jargs.sources
sources = jargs.sources
for s in sources
csv += ',"' + s.toUpperCase() + '"'
for er in res
if er.args?
erargs = JSON.parse er.args
for k of erargs
extras.push(k) if k.toLowerCase() not in ['refresh','library','libraries','sources','plugin','all','titles'] and k not in extras
if extras.length
exhd = ''
exhd += '"' + ex + '",' for ex in extras
csv = exhd + csv
for r in res
row = if r.string then JSON.parse(r.string) else r._raw_result['API.service.oab.find']
csv += '\n'
if r.args?
ea = JSON.parse r.args
for extra in extras
csv += '"' + (if ea[extra]? then ea[extra] else '') + '",'
csv += '"' + (if row.match then row.match.replace('TITLE:','').replace(/"/g,'') + '","' else '","')
if API.settings.dev
csv += (if row.meta?.article?.bing then 'Yes' else 'No') + '","'
csv += (if row.meta?.article?.reversed then 'Yes' else 'No') + '","'
av = 'No'
if row.availability?
for a in row.availability
av = a.url.replace(/"/g,'') if a.type is 'article'
csv += av + '","'
csv += row.meta.article.source if av isnt 'No' and row.meta?.article?.source
csv += '","'
rq = ''
if row.requests
for re in row.requests
if re.type is 'article'
rq = 'https://' + (if API.settings.dev then 'dev.' else '') + 'openaccessbutton.org/request/' + re._id
csv += rq + '","'
csv += row.meta.article.title.replace(/"/g,'').replace(/[^\x00-\x7F]/g, "") if row.meta?.article?.title?
csv += '","'
csv += row.meta.article.doi if row.meta?.article?.doi
csv += '"'
if row.libraries
for lib in liborder
csv += ',"'
js = false
if lib?.journal?.library
js = true
csv += 'Journal subscribed'
rp = false
if lib?.repository
rp = true
csv += '; ' if js
csv += 'In repository'
ll = false
if lib?.local?.length
ll = true
csv += '; ' if js or rp
csv += 'In library'
csv += 'Not available' if not js and not rp and not ll
csv += '"'
for src in sources
csv += ',"'
csv += row.meta.article.found[src] if row.meta?.article?.found?[src]?
csv += '"'
job = job_job.get this.urlParams.jid
name = if job.name then job.name.split('.')[0].replace(/ /g,'_') + '_results' else 'results'
this.response.writeHead 200,
'Content-disposition': "attachment; filename="+name+".csv"
'Content-type': 'text/csv; charset=UTF-8'
'Content-Encoding': 'UTF-8'
this.response.end csv
this.done()
API.add 'service/oab/status', get: () -> return API.service.oab.status()
API.add 'service/oab/embed/:rid', # is this still needed?
get: () ->
rid = this.urlParams.rid
b = oab_request.get rid
if b
title = b.title ? b.url
template = '<div style="width:800px;padding:0;margin:0;"> \
<div style="padding:0;margin:0;float:left;width:150px;height:200px;background-color:white;border:2px solid #398bc5;;"> \
<img src="//openaccessbutton.org/static/icon_OAB.png" style="height:100%;width:100%;"> \
</div> \
<div style="padding:0;margin:0;float:left;width:400px;height:200px;background-color:#398bc5;;"> \
<div style="height:166px;"> \
<p style="margin:2px;color:white;font-size:30px;text-align:center;"> \
<a target="_blank" href="https://openaccessbutton.org/request/' + rid + '" style="color:white;font-family:Sans-Serif;"> \
Open Access Button \
</a> \
</p> \
<p style="margin:2px;color:white;font-size:16px;text-align:center;font-family:Sans-Serif;"> \
Request for content related to the article <br> \
<a target="_blank" id="oab_article" href="https://openaccessbutton.org/request/' + rid + '" style="font-style:italic;color:white;font-family:Sans-Serif;"> \
' + title + '</a> \
</p> \
</div> \
<div style="height:30px;background-color:#f04717;"> \
<p style="text-align:center;font-size:16px;margin-right:2px;padding-top:1px;"> \
<a target="_blank" style="color:white;font-family:Sans-Serif;" href="https://openaccessbutton.org/request/' + rid + '"> \
ADD YOUR SUPPORT \
</a> \
</p> \
</div> \
</div> \
<div style="padding:0;margin:0;float:left;width:200px;height:200px;background-color:#212f3f;"> \
<h1 style="text-align:center;font-size:50px;color:#f04717;font-family:Sans-Serif;" id="oab_counter"> \
' + b.count + '</h1> \
<p style="text-align:center;color:white;font-size:14px;font-family:Sans-Serif;"> \
people have been unable to access this content, and support this request \
</p> \
</div> \
<div style="width:100%;clear:both;"></div> \
</div>';
return {statusCode: 200, body: {status: 'success', data: template}}
else
return {statusCode: 404, body: {status: 'error', data:'404 not found'}}
| true |
import moment from 'moment'
# these are global so can be accessed on other oabutton files
@oab_support = new API.collection {index:"oab",type:"support"}
@oab_availability = new API.collection {index:"oab",type:"availability"}
@oab_request = new API.collection {index:"oab",type:"request",history:true}
# the normal declaration of API.service.oab is in admin.coffee, because it gets loaded before this api.coffee file
API.add 'service/oab',
get: () ->
return {data: 'The Open Access Button API.'}
post:
roleRequired:'openaccessbutton.user'
action: () ->
return {data: 'You are authenticated'}
_avail =
authOptional: true
action: () ->
opts = if not _.isEmpty(this.request.body) then this.request.body else this.queryParams
opts.refresh ?= this.queryParams.refresh
opts.from ?= this.queryParams.from
opts.plugin ?= this.queryParams.plugin
opts.all ?= this.queryParams.all
opts.titles ?= this.queryParams.titles
ident = opts.doi
ident ?= opts.url
ident ?= 'pmid' + opts.pmid if opts.pmid
ident ?= 'pmc' + opts.pmc.toLowerCase().replace('pmc','') if opts.pmc
ident ?= 'TITLE:' + opts.title if opts.title
ident ?= 'CITATION:' + opts.citation if opts.citation
opts.url = ident
# should maybe put auth on the ability to pass in library and libraries...
opts.libraries = opts.libraries.split(',') if opts.libraries
opts.sources = opts.sources.split(',') if opts.sources
if this.user?
opts.uid = this.userId
opts.username = this.user.username
opts.email = this.user.emails[0].address
return if not opts.test and API.service.oab.blacklist(opts.url) then 400 else {data:API.service.oab.find(opts)}
API.add 'service/oab/find', get:_avail, post:_avail
API.add 'service/oab/availability', get:_avail, post:_avail # exists for legacy reasons
API.add 'service/oab/resolve',
get: () ->
return API.service.oab.resolve this.queryParams,undefined,this.queryParams.sources?.split(','),this.queryParams.all,this.queryParams.titles,this.queryParams.journal
API.add 'service/oab/ill/:library',
post: () ->
opts = this.request.body;
opts.library = this.urlParams.library;
return API.service.oab.ill opts
API.add 'service/oab/request',
get:
roleRequired:'openaccessbutton.user'
action: () ->
return {data: 'You have access :)'}
post:
authOptional: true
action: () ->
req = this.request.body
req.doi ?= this.queryParams.doi if this.queryParams.doi?
req.url ?= this.queryParams.url if this.queryParams.url?
req.test = if this.request.headers.host is 'dev.api.cottagelabs.com' then true else false
return {data: API.service.oab.request(req,this.user,this.queryParams.fast)}
API.add 'service/oab/request/:rid',
get:
authOptional: true
action: () ->
if r = oab_request.get this.urlParams.rid
r.supports = API.service.oab.supports(this.urlParams.rid,this.userId) if this.userId
others = oab_request.search({url:r.url})
if others?
for o in others.hits.hits
r.other = o._source._id if o._source._id isnt r._id and o._source.type isnt r.type
return {data: r}
else
return 404
post:
roleRequired:'openaccessbutton.user',
action: () ->
if r = oab_request.get this.urlParams.rid
n = {}
if not r.user? and not r.story? and this.request.body.story
n.story = this.request.body.story
n.user = id: this.user._id, email: this.user.emails[0].address, username: (this.user.profile?.firstname ? this.user.username ? this.user.emails[0].address)
n.user.firstname = this.user.profile?.firstname
n.user.lastname = this.user.profile?.lastname
n.user.affiliation = this.user.service?.openaccessbutton?.profile?.affiliation
n.user.profession = this.user.service?.openaccessbutton?.profile?.profession
n.count = 1 if not r.count? or r.count is 0
if API.accounts.auth 'openaccessbutton.admin', this.user
n.test ?= this.request.body.test if this.request.body.test? and this.request.body.test isnt r.test
n.status ?= this.request.body.status if this.request.body.status? and this.request.body.status isnt r.status
n.rating ?= this.request.body.rating if this.request.body.rating? and this.request.body.rating isnt r.rating
n.name ?= this.request.body.name if this.request.body.name? and this.request.body.name isnt r.name
n.email ?= this.request.body.email if this.request.body.email? and this.request.body.email isnt r.email
n.author_affiliation ?= this.request.body.author_affiliation if this.request.body.author_affiliation? and this.request.body.author_affiliation isnt r.author_affiliation
n.story ?= this.request.body.story if this.request.body.story? and this.request.body.story isnt r.story
n.journal ?= this.request.body.journal if this.request.body.journal? and this.request.body.journal isnt r.journal
n.notes = this.request.body.notes if this.request.body.notes? and this.request.body.notes isnt r.notes
n.email = this.request.body.email if this.request.body.email? and ( API.accounts.auth('openaccessbutton.admin',this.user) || not r.status? || r.status is 'help' || r.status is 'moderate' || r.status is 'refused' )
n.story = this.request.body.story if r.user? and this.userId is r.user.id and this.request.body.story? and this.request.body.story isnt r.story
n.url ?= this.request.body.url if this.request.body.url? and this.request.body.url isnt r.url
n.title ?= this.request.body.title if this.request.body.title? and this.request.body.title isnt r.title
n.doi ?= this.request.body.doi if this.request.body.doi? and this.request.body.doi isnt r.doi
if n.story
res = oab_request.search 'rating:1 AND story.exact:"' + n.story + '"'
if res.hits.total
nres = oab_request.search 'rating:0 AND story.exact:"' + n.story + '"'
n.rating = 1 if nres.hits.total is 0
if not n.status?
if (not r.title and not n.title) || (not r.email and not n.email) || (not r.story and not n.story)
n.status = 'help' if r.status isnt 'help'
else if r.status is 'help' and ( (r.title or n.title) and (r.email or n.email) and (r.story or n.story) )
n.status = 'moderate'
if n.title? and typeof n.title is 'string'
try n.title = n.title.charAt(0).toUpperCase() + n.title.slice(1)
if n.journal? and typeof n.journal is 'string'
try n.journal = n.journal.charAt(0).toUpperCase() + n.journal.slice(1)
if not n.doi? and not r.doi? and r.url? and r.url.indexOf('10.') isnt -1 and r.url.split('10.')[1].indexOf('/') isnt -1
n.doi = '10.' + r.url.split('10.')[1]
r.doi = n.doi
if r.doi and not r.title and not n.title
try
cr = API.use.crossref.works.doi r.doi
n.title = cr.title[0]
n.author ?= cr.author if not r.author?
n.journal ?= cr['container-title'][0] if cr['container-title']? and not r.journal?
n.issn ?= cr.ISSN[0] if cr.ISSN? and not r.issn?
n.subject ?= cr.subject if not r.subject?
n.publisher ?= cr.publisher if not r.publisher?
n.year = cr['published-print']['date-parts'][0][0] if not r.year? and cr['published-print']?['date-parts']? and cr['published-print']['date-parts'].length > 0 and cr['published-print']['date-parts'][0].length > 0
n.crossref_type = cr.type if not r.crossref_type?
n.year ?= cr.created['date-time'].split('-')[0] if not r.year? and cr.created?['date-time']?
r.author_affiliation = n.author_affiliation if n.author_affiliation?
if n.crossref_type? and ['journal-article', 'proceedings-article'].indexOf(n.crossref_type) is -1
n.status = 'closed'
n.closed_on_update = true
n.closed_on_update_reason = 'notarticle'
if (not r.email and not n.email) and r.author and r.author.length and (r.author[0].affiliation? or r.author_affiliation)
try
email = API.use.hunter.email {company: (r.author_affiliation ? r.author[0].affiliation[0].name), first_name: r.author[0].family, last_name: r.author[0].given}, API.settings.service.openaccessbutton.hunter.api_key
if email?.email?
n.email = email.email
oab_request.update(r._id,n) if JSON.stringify(n) isnt '{}'
return oab_request.get r._id
else
return 404
delete:
roleRequired:'openaccessbutton.user'
action: () ->
r = oab_request.get this.urlParams.rid
oab_request.remove(this.urlParams.rid) if API.accounts.auth('openaccessbutton.admin',this.user) or this.userId is r.user.id
return {}
API.add 'service/oab/request/:rid/admin/:action',
get:
roleRequired:'openaccessbutton.admin'
action: () ->
API.service.oab.admin this.urlParams.rid,this.urlParams.action
return {}
API.add 'service/oab/support/:rid',
get:
authOptional: true
action: () ->
return API.service.oab.support this.urlParams.rid, this.queryParams.story, this.user
post:
authOptional: true
action: () ->
return API.service.oab.support this.urlParams.rid, this.request.body.story, this.user
API.add 'service/oab/supports/:rid',
get:
roleRequired:'openaccessbutton.user'
action: () ->
return API.service.oab.supports this.urlParams.rid, this.user
API.add 'service/oab/supports',
get: () -> return oab_support.search this.queryParams
post: () -> return oab_support.search this.bodyParams
API.add 'service/oab/availabilities',
get: () -> return oab_availability.search this.queryParams
post: () -> return oab_availability.search this.bodyParams
API.add 'service/oab/requests',
get: () -> return oab_request.search this.queryParams
post: () -> return oab_request.search this.bodyParams
API.add 'service/oab/requests.csv', { get: (() -> API.convert.json2csv2response(this, oab_request.search(this.queryParams ? this.bodyParams))), post: (() -> API.convert.json2csv2response(this, oab_request.search(this.queryParams ? this.bodyParams))) }
API.add 'service/oab/history',
get: () -> return oab_request.history this.queryParams
post: () -> return oab_request.history this.bodyParams
API.add 'service/oab/users',
get:
roleRequired:'openaccessbutton.admin'
action: () -> return Users.search this.queryParams, {restrict:[{exists:{field:'roles.openaccessbutton'}}]}
post:
roleRequired:'openaccessbutton.admin'
action: () -> return Users.search this.bodyParams, {restrict:[{exists:{field:'roles.openaccessbutton'}}]}
API.add 'service/oab/scrape',
get:
#roleRequired:'openaccessbutton.user'
action: () -> return {data:API.service.oab.scrape(this.queryParams.url,this.queryParams.content,this.queryParams.doi)}
API.add 'service/oab/redirect',
get: () -> return API.service.oab.redirect this.queryParams.url
API.add 'service/oab/blacklist',
get: () -> return {data:API.service.oab.blacklist(undefined,undefined,this.queryParams.stale)}
API.add 'service/oab/templates',
get: () -> return API.service.oab.template(this.queryParams.template,this.queryParams.refresh)
API.add 'service/oab/substitute',
post: () -> return API.service.oab.substitute this.request.body.content,this.request.body.vars,this.request.body.markdown
API.add 'service/oab/mail',
post:
roleRequired:'openaccessbutton.admin'
action: () -> return API.service.oab.mail this.request.body
API.add 'service/oab/receive/:rid',
get: () -> return if r = oab_request.find({receiver:this.urlParams.rid}) then r else 404
post:
authOptional: true
action: () ->
if r = oab_request.find {receiver:this.urlParams.rid}
admin = this.bodyParams.admin and this.userId and API.accounts.auth('openaccessbutton.admin',this.user)
return API.service.oab.receive this.urlParams.rid, this.request.files, this.bodyParams.url, this.bodyParams.title, this.bodyParams.description, this.bodyParams.firstname, this.bodyParams.lastname, undefined, admin
else
return 404
API.add 'service/oab/redeposit/:rid',
post:
roleRequired: 'openaccessbutton.admin'
action: () -> return API.service.oab.redeposit this.urlParams.rid
API.add 'service/oab/receive/:rid/:holdrefuse',
get: () ->
if r = oab_request.find {receiver:this.urlParams.rid}
if this.urlParams.holdrefuse is 'refuse'
API.service.oab.refuse r._id, this.queryParams.reason
else
if isNaN(parseInt(this.urlParams.holdrefuse))
return 400
else
API.service.oab.hold r._id, parseInt(this.urlParams.holdrefuse)
return true
else
return 404
API.add 'service/oab/dnr',
get:
authOptional: true
action: () ->
return API.service.oab.dnr() if not this.queryParams.email? and this.user and API.accounts.auth 'openaccessbutton.admin', this.user
d = {}
d.dnr = API.service.oab.dnr this.queryParams.email
if not d.dnr and this.queryParams.user
u = API.accounts.retrieve this.queryParams.user
d.dnr = 'user' if u.emails[0].address is this.queryParams.email
if not d.dnr and this.queryParams.request
r = oab_request.get this.queryParams.request
d.dnr = 'creator' if r.user.email is this.queryParams.email
if not d.dnr
d.dnr = 'supporter' if oab_support.find {rid:this.queryParams.request, email:this.queryParams.email}
if not d.dnr and this.queryParams.validate
d.validation = API.mail.validate this.queryParams.email, API.settings.service?.openaccessbutton?.mail?.pubkey
d.dnr = 'invalid' if not d.validation.is_valid
return d
post: () ->
e = this.queryParams.email ? this.request.body.email
refuse = if this.queryParams.refuse in ['false',false] then false else true
return if e then API.service.oab.dnr(e,true,refuse) else 400
delete:
authRequired: 'openaccessbutton.admin'
action: () ->
oab_dnr.remove({email:this.queryParams.email}) if this.queryParams.email
return {}
API.add 'service/oab/bug',
post: () ->
whoto = ['PI:EMAIL:<EMAIL>END_PI']
try
if this.request.body?.form is 'wrong'
whoto.push 'PI:EMAIL:<EMAIL>END_PI'
API.mail.send {
service: 'openaccessbutton',
from: 'PI:EMAIL:<EMAIL>END_PI',
to: whoto,
subject: 'Feedback form submission',
text: JSON.stringify(this.request.body,undefined,2)
}
return {
statusCode: 302,
headers: {
'Content-Type': 'text/plain',
'Location': (if API.settings.dev then 'https://dev.openaccessbutton.org' else 'https://openaccessbutton.org') + '/bug#defaultthanks'
},
body: 'Location: ' + (if API.settings.dev then 'https://dev.openaccessbutton.org' else 'https://openaccessbutton.org') + '/bug#defaultthanks'
}
API.add 'service/oab/import',
post:
roleRequired: 'openaccessbutton.admin', # later could be opened to other oab users, with some sort of quota / limit
action: () ->
try
records = this.request.body
resp = {found:0,updated:0,missing:[]}
for p in this.request.body
if p._id
rq = oab_request.get p._id
if rq
resp.found += 1
updates = []
update = []
for up of p
p[up] = undefined if p[up] is 'DELETE' # this is not used yet
if (not p[up]? or p[up]) and p[up] not in ['createdAt','created_date','plugin','from','embedded','names']
if up.indexOf('refused.') is 0 and ( not rq.refused? or rq.refused[up.split('.')[1]] isnt p[up] )
rq.refused ?= {}
rq.refused[up.split('.')[1]] = p[up]
update.refused ?= {}
update.refused[up.split('.')[1]] = p[up]
else if up.indexOf('received.') is 0 and ( not rq.received? or rq.received[up.split('.')[1]] isnt p[up] )
rq.received ?= {}
rq.received[up.split('.')[1]] = p[up]
update.received = rq.received
else if up.indexOf('followup.') is 0
if up isnt 'followup.date' and p['followup.count'] isnt rq.followup?.count
rq.followup ?= {}
rq.followup.count = p['followup.count']
rq.followup.date ?= []
rq.followup.date.push moment(Date.now(), "x").format "YYYYMMDD"
update.followup = rq.followup
else if up is 'sherpa.color' and ( not rq.sherpa? or rq.sherpa.color isnt p[up] )
rq.sherpa = {color:p[up]}
update.sherpa = rq.sherpa
else if rq[up] isnt p[up]
rq[up] = p[up]
update[up] = rq[up]
try
rq._bulk_import ?= {}
rq._bulk_import[Date.now()] = JSON.stringify update
# TODO should update collection update to take lists of records that it updates in bulk
rq.updatedAt = Date.now()
rq.updated_date = moment(rq.updatedAt, "x").format "YYYY-MM-DD HHmm.ss"
updates.push rq
resp.updated += 1
else
resp.missing.push p._id
if updates.length
resp.imports = oab_request.import(updates)
return resp
catch err
return {status:'error'}
#match = {must:[{term:{'signature.exact':proc.signature}}], must_not:[{exists:{field:'_raw_result.error'}}]}
#try
# if typeof job.refresh is 'number' and job.refresh isnt 0
# d = new Date()
# match.must.push {range:{createdAt:{gt:d.setDate(d.getDate() - job.refresh)}}}
API.add 'service/oab/export/:what',
get:
roleRequired: 'openaccessbutton.admin',
action: () ->
results = []
fields = if this.urlParams.what is 'changes' then ['_id','createdAt','created_date','action'] else if this.urlParams.what is 'request' then ['_id','created_date','type','count','status','title','url','doi','journal','publisher','sherpa.color','name','names','email','author_affiliation','user.username','user.email','user.firstname','user.lastname','story','rating','receiver','followup.count','followup.date','refused.email','refused.date','received.date','received.from','received.description','received.url','received.admin','received.cron','received.notfromauthor','notes','plugin','from','embedded'] else []
match = {}
match.range = {createdAt: {}} if this.queryParams.from or this.queryParams.to
match.range.createdAt.gte = this.queryParams.from if this.queryParams.from
match.range.createdAt.lte = parseInt(this.queryParams.to) + 86400000 if this.queryParams.to
if this.urlParams.what is 'dnr' or this.urlParams.what is 'mail' or this.urlParams.what is 'request'
results = if this.urlParams.what is 'dnr' then oab_dnr.fetch(match, true) else if this.urlParams.what is 'request' then oab_request.fetch(match, true) else mail_progress.fetch match, true
for r of results
if this.urlParams.what isnt 'request'
for f of results[r]
fields.push(f) if fields.indexOf(f) is -1
else
results[r].names = []
if results[r].author?
for a in results[r].author
if a.family
results[r].names.push a.given + ' ' + a.family
else if this.urlParams.what is 'changes'
res = oab_request.fetch_history match, true
for r in res
m = {
action: r.action,
_id: r.document,
createdAt: r.createdAt,
created_date: r.created_date
}
if r.action
for mr of r[r.action]
fields.push(mr) if fields.indexOf(mr) is -1
m[mr] = r[r.action][mr]
if r.string
fields.push('string') if fields.indexOf('string') is -1
m.string = r.string
results.push m
csv = API.convert.json2csv {fields:fields}, undefined, results
name = 'export_' + this.urlParams.what
this.response.writeHead(200, {
'Content-disposition': "attachment; filename="+name+".csv",
'Content-type': 'text/csv; charset=UTF-8',
'Content-Encoding': 'UTF-8'
})
this.response.end(csv)
this.done() # added this now that underlying lib has been rewritten - should work without crash. Below note left for posterity.
# NOTE: this should really return to stop restivus throwing an error, and should really include
# the file length in the above head call, but this causes an intermittent write afer end error
# which crashes the whole system. So pick the lesser of two fuck ups.
API.add 'service/oab/terms/:type/:key', get: () -> return API.es.terms 'oab', this.urlParams.type, this.urlParams.key
API.add 'service/oab/min/:type/:key', get: () -> return API.es.min 'oab', this.urlParams.type, this.urlParams.key
API.add 'service/oab/max/:type/:key', get: () -> return API.es.max 'oab', this.urlParams.type, this.urlParams.key
API.add 'service/oab/range/:type/:key', get: () -> return API.es.range 'oab', this.urlParams.type, this.urlParams.key
API.add 'service/oab/keys/:type', get: () -> return API.es.keys 'oab', this.urlParams.type
API.add 'service/oab/job',
get:
action: () ->
jobs = job_job.search({service:'openaccessbutton'},{size:1000,newest:true}).hits.hits
for j of jobs
jobs[j] = jobs[j]._source
ju = API.accounts.retrieve jobs[j].user
jobs[j].email = ju?.emails[0].address
jobs[j].processes = if jobs[j].processes? then jobs[j].processes.length else 0
return jobs
post:
roleRequired: 'openaccessbutton.user'
action: () ->
processes = this.request.body.processes ? this.request.body
for p in processes
p.plugin = this.request.body.plugin ? 'bulk'
p.libraries = this.request.body.libraries if this.request.body.libraries?
p.sources = this.request.body.sources if this.request.body.sources?
p.all = this.request.body.all ?= false
p.refresh = 0 if this.request.body.refresh
p.titles = this.request.body.titles ?= true
job = API.job.create {refresh:this.request.body.refresh, complete:'API.service.oab.job_complete', user:this.userId, service:'openaccessbutton', function:'API.service.oab.find', name:(this.request.body.name ? "oab_availability"), processes:processes}
API.service.oab.job_started job
return job
API.add 'service/oab/job/generate/:start/:end',
post:
roleRequired: 'openaccessbutton.admin'
action: () ->
start = moment(this.urlParams.start, "DDMMYYYY").valueOf()
end = moment(this.urlParams.end, "DDMMYYYY").endOf('day').valueOf()
processes = oab_request.find 'NOT status.exact:received AND createdAt:>' + start + ' AND createdAt:<' + end
if processes.length
procs = []
for p in processes
pro = {url:p.url}
pro.libraries = this.request.body.libraries if this.request.body.libraries?
pro.sources = this.request.body.sources if this.request.body.sources?
procs.push(pro)
name = 'sys_requests_' + this.urlParams.start + '_' + this.urlParams.end
jid = API.job.create {complete:'API.service.oab.job_complete', user:this.userId, service:'openaccessbutton', function:'API.service.oab.find', name:name, processes:procs}
return {job:jid, count:processes.length}
else
return {count:0}
API.add 'service/oab/job/:jid/progress', get: () -> return API.job.progress this.urlParams.jid
API.add 'service/oab/job/:jid/reload',
get:
roleRequired: 'openaccessbutton.admin'
action: () ->
return API.job.reload this.urlParams.jid
API.add 'service/oab/job/:jid/remove',
get:
roleRequired: 'openaccessbutton.admin'
action: () ->
return API.job.remove this.urlParams.jid
API.add 'service/oab/job/:jid/request',
get:
roleRequired: 'openaccessbutton.admin'
action: () ->
results = API.job.results this.urlParams.jid
identifiers = []
for r in results
if r.availability.length is 0 and r.requests.length is 0
rq = {}
if r.match
if r.match.indexOf('TITLE:') is 0
rq.title = r.match.replace('TITLE:','')
else if r.match.indexOf('CITATION:') isnt 0
rq.url = r.match
if r.meta and r.meta.article
if r.meta.article.doi
rq.doi = r.meta.article.doi
rq.url ?= 'https://doi.org/' + r.meta.article.doi
rq.title ?= r.meta.article.title
if rq.url
rq.story = this.queryParams.story ? ''
created = API.service.oab.request rq, this.userId
identifiers.push(created) if created
return identifiers
API.add 'service/oab/job/:jid/results', get: () -> return API.job.results this.urlParams.jid
API.add 'service/oab/job/:jid/results.json', get: () -> return API.job.results this.urlParams.jid
API.add 'service/oab/job/:jid/results.csv',
get: () ->
res = API.job.results this.urlParams.jid, true
inputs = []
csv = '"MATCH",'
csv += '"BING","REVERSED",' if API.settings.dev
csv += '"AVAILABLE","SOURCE","REQUEST","TITLE","DOI"'
liborder = []
sources = []
extras = []
if res.length and res[0].args?
jargs = JSON.parse res[0].args
if jargs.libraries?
for l in jargs.libraries
liborder.push l
csv += ',"' + l.toUpperCase() + '"'
if jargs.sources
sources = jargs.sources
for s in sources
csv += ',"' + s.toUpperCase() + '"'
for er in res
if er.args?
erargs = JSON.parse er.args
for k of erargs
extras.push(k) if k.toLowerCase() not in ['refresh','library','libraries','sources','plugin','all','titles'] and k not in extras
if extras.length
exhd = ''
exhd += '"' + ex + '",' for ex in extras
csv = exhd + csv
for r in res
row = if r.string then JSON.parse(r.string) else r._raw_result['API.service.oab.find']
csv += '\n'
if r.args?
ea = JSON.parse r.args
for extra in extras
csv += '"' + (if ea[extra]? then ea[extra] else '') + '",'
csv += '"' + (if row.match then row.match.replace('TITLE:','').replace(/"/g,'') + '","' else '","')
if API.settings.dev
csv += (if row.meta?.article?.bing then 'Yes' else 'No') + '","'
csv += (if row.meta?.article?.reversed then 'Yes' else 'No') + '","'
av = 'No'
if row.availability?
for a in row.availability
av = a.url.replace(/"/g,'') if a.type is 'article'
csv += av + '","'
csv += row.meta.article.source if av isnt 'No' and row.meta?.article?.source
csv += '","'
rq = ''
if row.requests
for re in row.requests
if re.type is 'article'
rq = 'https://' + (if API.settings.dev then 'dev.' else '') + 'openaccessbutton.org/request/' + re._id
csv += rq + '","'
csv += row.meta.article.title.replace(/"/g,'').replace(/[^\x00-\x7F]/g, "") if row.meta?.article?.title?
csv += '","'
csv += row.meta.article.doi if row.meta?.article?.doi
csv += '"'
if row.libraries
for lib in liborder
csv += ',"'
js = false
if lib?.journal?.library
js = true
csv += 'Journal subscribed'
rp = false
if lib?.repository
rp = true
csv += '; ' if js
csv += 'In repository'
ll = false
if lib?.local?.length
ll = true
csv += '; ' if js or rp
csv += 'In library'
csv += 'Not available' if not js and not rp and not ll
csv += '"'
for src in sources
csv += ',"'
csv += row.meta.article.found[src] if row.meta?.article?.found?[src]?
csv += '"'
job = job_job.get this.urlParams.jid
name = if job.name then job.name.split('.')[0].replace(/ /g,'_') + '_results' else 'results'
this.response.writeHead 200,
'Content-disposition': "attachment; filename="+name+".csv"
'Content-type': 'text/csv; charset=UTF-8'
'Content-Encoding': 'UTF-8'
this.response.end csv
this.done()
API.add 'service/oab/status', get: () -> return API.service.oab.status()
API.add 'service/oab/embed/:rid', # is this still needed?
get: () ->
rid = this.urlParams.rid
b = oab_request.get rid
if b
title = b.title ? b.url
template = '<div style="width:800px;padding:0;margin:0;"> \
<div style="padding:0;margin:0;float:left;width:150px;height:200px;background-color:white;border:2px solid #398bc5;;"> \
<img src="//openaccessbutton.org/static/icon_OAB.png" style="height:100%;width:100%;"> \
</div> \
<div style="padding:0;margin:0;float:left;width:400px;height:200px;background-color:#398bc5;;"> \
<div style="height:166px;"> \
<p style="margin:2px;color:white;font-size:30px;text-align:center;"> \
<a target="_blank" href="https://openaccessbutton.org/request/' + rid + '" style="color:white;font-family:Sans-Serif;"> \
Open Access Button \
</a> \
</p> \
<p style="margin:2px;color:white;font-size:16px;text-align:center;font-family:Sans-Serif;"> \
Request for content related to the article <br> \
<a target="_blank" id="oab_article" href="https://openaccessbutton.org/request/' + rid + '" style="font-style:italic;color:white;font-family:Sans-Serif;"> \
' + title + '</a> \
</p> \
</div> \
<div style="height:30px;background-color:#f04717;"> \
<p style="text-align:center;font-size:16px;margin-right:2px;padding-top:1px;"> \
<a target="_blank" style="color:white;font-family:Sans-Serif;" href="https://openaccessbutton.org/request/' + rid + '"> \
ADD YOUR SUPPORT \
</a> \
</p> \
</div> \
</div> \
<div style="padding:0;margin:0;float:left;width:200px;height:200px;background-color:#212f3f;"> \
<h1 style="text-align:center;font-size:50px;color:#f04717;font-family:Sans-Serif;" id="oab_counter"> \
' + b.count + '</h1> \
<p style="text-align:center;color:white;font-size:14px;font-family:Sans-Serif;"> \
people have been unable to access this content, and support this request \
</p> \
</div> \
<div style="width:100%;clear:both;"></div> \
</div>';
return {statusCode: 200, body: {status: 'success', data: template}}
else
return {statusCode: 404, body: {status: 'error', data:'404 not found'}}
|
[
{
"context": "users = [\n\tdisplay_name\t: \"Eagle Mcfly\"\n\thome_directory\t: \"/home/mcfly\"\n\timage\t\t\t: \"mock",
"end": 38,
"score": 0.99986332654953,
"start": 27,
"tag": "NAME",
"value": "Eagle Mcfly"
},
{
"context": "gged_in\t\t: false\n\tsession\t\t\t: \"kde\"\n\tusername\... | src/lightdm_data.coffee | chemo-project/lightdm-webkit2-mockup | 0 | users = [
display_name : "Eagle Mcfly"
home_directory : "/home/mcfly"
image : "mock/res/profile1.jpg"
language : "en_US"
layout : null
logged_in : false
session : "kde"
username : "mcfly"
,
display_name : "Sparky Dogson"
home_directory : "/home/sparky"
image : "mock/res/profile2.jpg"
language : "en_UK"
layout : null
logged_in : false
session : "gnome"
username : "sparky"
,
display_name : "Elicha Deering"
home_directory : "/home/deering"
image : "mock/res/profile3.jpg"
language : "en_US"
layout : null
logged_in : false
session : "deepin"
username : "deering"
]
sessions = [
key : "key1"
name : "gnome"
comment : "no comment"
,
key : "key2"
name : "xfce"
comment : "no comment"
,
key : "key3"
name : "kde"
comment : "no comment"
,
key : "key4"
name : "deepin"
comment : "no comment"
]
languages = [
code: "en_US"
name: "English(US)"
territory: "USA"
,
code: "en_UK"
name: "English(UK)"
territory: "UK"
]
layouts = [
name : "test-layout"
description : "test-layout for testing."
short_description : "test-layout-terretory"
]
| 93152 | users = [
display_name : "<NAME>"
home_directory : "/home/mcfly"
image : "mock/res/profile1.jpg"
language : "en_US"
layout : null
logged_in : false
session : "kde"
username : "mcfly"
,
display_name : "<NAME>"
home_directory : "/home/sparky"
image : "mock/res/profile2.jpg"
language : "en_UK"
layout : null
logged_in : false
session : "gnome"
username : "sparky"
,
display_name : "<NAME>"
home_directory : "/home/deering"
image : "mock/res/profile3.jpg"
language : "en_US"
layout : null
logged_in : false
session : "deepin"
username : "deering"
]
sessions = [
key : "key1"
name : "gnome"
comment : "no comment"
,
key : "key2"
name : "<NAME>"
comment : "no comment"
,
key : "key3"
name : "<NAME>"
comment : "no comment"
,
key : "key4"
name : "<NAME>"
comment : "no comment"
]
languages = [
code: "en_US"
name: "English(US)"
territory: "USA"
,
code: "en_UK"
name: "English(UK)"
territory: "UK"
]
layouts = [
name : "test-layout"
description : "test-layout for testing."
short_description : "test-layout-terretory"
]
| true | users = [
display_name : "PI:NAME:<NAME>END_PI"
home_directory : "/home/mcfly"
image : "mock/res/profile1.jpg"
language : "en_US"
layout : null
logged_in : false
session : "kde"
username : "mcfly"
,
display_name : "PI:NAME:<NAME>END_PI"
home_directory : "/home/sparky"
image : "mock/res/profile2.jpg"
language : "en_UK"
layout : null
logged_in : false
session : "gnome"
username : "sparky"
,
display_name : "PI:NAME:<NAME>END_PI"
home_directory : "/home/deering"
image : "mock/res/profile3.jpg"
language : "en_US"
layout : null
logged_in : false
session : "deepin"
username : "deering"
]
sessions = [
key : "key1"
name : "gnome"
comment : "no comment"
,
key : "key2"
name : "PI:NAME:<NAME>END_PI"
comment : "no comment"
,
key : "key3"
name : "PI:NAME:<NAME>END_PI"
comment : "no comment"
,
key : "key4"
name : "PI:NAME:<NAME>END_PI"
comment : "no comment"
]
languages = [
code: "en_US"
name: "English(US)"
territory: "USA"
,
code: "en_UK"
name: "English(UK)"
territory: "UK"
]
layouts = [
name : "test-layout"
description : "test-layout for testing."
short_description : "test-layout-terretory"
]
|
[
{
"context": "ost:5000/show/foo\")\n .set('X-Access-Token': 'foo-token')\n .end (err, res) ->\n res.body.name.",
"end": 392,
"score": 0.7429895997047424,
"start": 383,
"tag": "KEY",
"value": "foo-token"
},
{
"context": "(err, res) ->\n res.body.name.should.equ... | src/api/apps/shows/test/integration.test.coffee | craigspaeth/positron | 76 | require '../../../test/helpers/db'
_ = require 'underscore'
app = require '../../../'
request = require 'superagent'
describe 'shows endpoints', ->
beforeEach (done) ->
@server = app.listen 5000, ->
done()
afterEach ->
@server.close()
it 'returns a show by id', (done) ->
request
.get("http://localhost:5000/show/foo")
.set('X-Access-Token': 'foo-token')
.end (err, res) ->
res.body.name.should.equal 'Inez & Vinoodh'
done()
return
| 578 | require '../../../test/helpers/db'
_ = require 'underscore'
app = require '../../../'
request = require 'superagent'
describe 'shows endpoints', ->
beforeEach (done) ->
@server = app.listen 5000, ->
done()
afterEach ->
@server.close()
it 'returns a show by id', (done) ->
request
.get("http://localhost:5000/show/foo")
.set('X-Access-Token': '<KEY>')
.end (err, res) ->
res.body.name.should.equal '<NAME> & <NAME>'
done()
return
| true | require '../../../test/helpers/db'
_ = require 'underscore'
app = require '../../../'
request = require 'superagent'
describe 'shows endpoints', ->
beforeEach (done) ->
@server = app.listen 5000, ->
done()
afterEach ->
@server.close()
it 'returns a show by id', (done) ->
request
.get("http://localhost:5000/show/foo")
.set('X-Access-Token': 'PI:KEY:<KEY>END_PI')
.end (err, res) ->
res.body.name.should.equal 'PI:NAME:<NAME>END_PI & PI:NAME:<NAME>END_PI'
done()
return
|
[
{
"context": "# Rubyfy - 0.1.0\n# 2019 Caleb Matthiesen\n# https://github.com/calebkm/rubyfy -------------",
"end": 40,
"score": 0.9998334050178528,
"start": 24,
"tag": "NAME",
"value": "Caleb Matthiesen"
},
{
"context": ".1.0\n# 2019 Caleb Matthiesen\n# https://github.com/calebkm/ru... | src/rubyfy.coffee | calebkm/rubyfy | 0 | # Rubyfy - 0.1.0
# 2019 Caleb Matthiesen
# https://github.com/calebkm/rubyfy ------------------
#
@Rubyfy or= {}
# Setup --------------------------------------------
#
Rubyfy.classes = 'Array Object String'.split(' ')
Rubyfy.Array = 'any compact is_empty first last'.split(' ')
Rubyfy.String = 'capitalize downcase is_blank'.split(' ')
Rubyfy.Object = 'any is_empty keys vals'.split(' ')
# Array --------------------------------------------
#
Rubyfy.compact = (arr) ->
if Rubyfy.defined(arr, 'compact')
arr.filter (i) -> i != null and i != undefined
Rubyfy.first = (arr) ->
if Rubyfy.defined(arr, 'first')
arr[0]
Rubyfy.last = (arr) ->
if Rubyfy.defined(arr, 'last')
arr[arr.length - 1]
# Object -------------------------------------------
#
Rubyfy.keys = (obj) ->
if Rubyfy.defined(obj, 'keys')
Object.keys(obj)
Rubyfy.vals = (obj) ->
if Rubyfy.defined(obj, 'vals')
Object.values(obj)
# Array or Object ----------------------------------
#
Rubyfy.any = (arr_or_obj) ->
if Rubyfy.defined(arr_or_obj, 'any')
!Rubyfy.is_empty(arr_or_obj)
Rubyfy.is_empty = (arr_or_obj) ->
if Rubyfy.defined(arr_or_obj, 'is_empty')
if Rubyfy.is_array(arr_or_obj)
Rubyfy.compact(arr_or_obj).length == 0
else if Rubyfy.is_object(arr_or_obj)
Object.keys(arr_or_obj).length == 0
# String -------------------------------------------
#
Rubyfy.capitalize = (str) ->
if Rubyfy.defined(str, 'capitalize')
str.charAt(0).toUpperCase() + str.slice(1).toLowerCase()
Rubyfy.downcase = (str) ->
if Rubyfy.defined(str, 'downcase')
str.toLowerCase()
Rubyfy.is_blank = (str) ->
if Rubyfy.defined(str, 'is_blank')
str.trim() == ''
# Helpers ------------------------------------------
#
Rubyfy.defined = (o, func) ->
if o != null and o != undefined
class_of = Rubyfy.class_of(o)
if class_of and Rubyfy[class_of] and Rubyfy[class_of].includes(func)
true
else
throw new Error("Rubyfy does not define a function `#{func}` for #{Rubyfy.capitalize(class_of)}")
else
throw new Error("Cannot call Rubyfy `#{func}` on #{o}")
Rubyfy.class_of = (t) ->
if Rubyfy.is_array(t)
'Array'
else if Rubyfy.is_string(t)
'String'
else if Rubyfy.is_object(t)
'Object'
else
typeof t
Rubyfy.is_array = (a) ->
Array.isArray(a)
Rubyfy.is_string = (s) ->
typeof s == 'string' or \
s.constructor == String or \
Object.prototype.toString.call(s) == '[object String]'
Rubyfy.is_object = (o) ->
(typeof o == 'object') and \
(o != null) and \
!Rubyfy.is_array(o) and \
!Rubyfy.is_string(o)
# Define `R` shorthand -------------------------------
#
unless Rubyfy.no_r
if typeof @R == 'undefined'
@R = Rubyfy
@_R_IS_DEFINED_ = true
else
console.warn("Unable to assign Rubyfy shorthand `R` because it's already defined. You can still use `Rubyfy` explicitly.")
# Dynamically add prototypes -------------------------
#
unless Rubyfy.no_prototypes
for klass in Rubyfy.classes
do (klass) ->
for func in Rubyfy[klass]
do (func) ->
if typeof eval(klass).prototype[func] == 'undefined'
Object.defineProperty eval(klass).prototype, func,
value: -> Rubyfy[func](@)
else
console.warn("Rubyfy attempted to define `#{klass}##{func}` but it looks like #{klass}.prototype.#{func} has already been defined. You can still use the Rubyfy version of `#{func}` by calling it explicitly with `#{if @_R_IS_DEFINED_ then 'R' else 'Rubyfy'}.#{func}()`")
| 165825 | # Rubyfy - 0.1.0
# 2019 <NAME>
# https://github.com/calebkm/rubyfy ------------------
#
@Rubyfy or= {}
# Setup --------------------------------------------
#
Rubyfy.classes = 'Array Object String'.split(' ')
Rubyfy.Array = 'any compact is_empty first last'.split(' ')
Rubyfy.String = 'capitalize downcase is_blank'.split(' ')
Rubyfy.Object = 'any is_empty keys vals'.split(' ')
# Array --------------------------------------------
#
Rubyfy.compact = (arr) ->
if Rubyfy.defined(arr, 'compact')
arr.filter (i) -> i != null and i != undefined
Rubyfy.first = (arr) ->
if Rubyfy.defined(arr, 'first')
arr[0]
Rubyfy.last = (arr) ->
if Rubyfy.defined(arr, 'last')
arr[arr.length - 1]
# Object -------------------------------------------
#
Rubyfy.keys = (obj) ->
if Rubyfy.defined(obj, 'keys')
Object.keys(obj)
Rubyfy.vals = (obj) ->
if Rubyfy.defined(obj, 'vals')
Object.values(obj)
# Array or Object ----------------------------------
#
Rubyfy.any = (arr_or_obj) ->
if Rubyfy.defined(arr_or_obj, 'any')
!Rubyfy.is_empty(arr_or_obj)
Rubyfy.is_empty = (arr_or_obj) ->
if Rubyfy.defined(arr_or_obj, 'is_empty')
if Rubyfy.is_array(arr_or_obj)
Rubyfy.compact(arr_or_obj).length == 0
else if Rubyfy.is_object(arr_or_obj)
Object.keys(arr_or_obj).length == 0
# String -------------------------------------------
#
Rubyfy.capitalize = (str) ->
if Rubyfy.defined(str, 'capitalize')
str.charAt(0).toUpperCase() + str.slice(1).toLowerCase()
Rubyfy.downcase = (str) ->
if Rubyfy.defined(str, 'downcase')
str.toLowerCase()
Rubyfy.is_blank = (str) ->
if Rubyfy.defined(str, 'is_blank')
str.trim() == ''
# Helpers ------------------------------------------
#
Rubyfy.defined = (o, func) ->
if o != null and o != undefined
class_of = Rubyfy.class_of(o)
if class_of and Rubyfy[class_of] and Rubyfy[class_of].includes(func)
true
else
throw new Error("Rubyfy does not define a function `#{func}` for #{Rubyfy.capitalize(class_of)}")
else
throw new Error("Cannot call Rubyfy `#{func}` on #{o}")
Rubyfy.class_of = (t) ->
if Rubyfy.is_array(t)
'Array'
else if Rubyfy.is_string(t)
'String'
else if Rubyfy.is_object(t)
'Object'
else
typeof t
Rubyfy.is_array = (a) ->
Array.isArray(a)
Rubyfy.is_string = (s) ->
typeof s == 'string' or \
s.constructor == String or \
Object.prototype.toString.call(s) == '[object String]'
Rubyfy.is_object = (o) ->
(typeof o == 'object') and \
(o != null) and \
!Rubyfy.is_array(o) and \
!Rubyfy.is_string(o)
# Define `R` shorthand -------------------------------
#
unless Rubyfy.no_r
if typeof @R == 'undefined'
@R = Rubyfy
@_R_IS_DEFINED_ = true
else
console.warn("Unable to assign Rubyfy shorthand `R` because it's already defined. You can still use `Rubyfy` explicitly.")
# Dynamically add prototypes -------------------------
#
unless Rubyfy.no_prototypes
for klass in Rubyfy.classes
do (klass) ->
for func in Rubyfy[klass]
do (func) ->
if typeof eval(klass).prototype[func] == 'undefined'
Object.defineProperty eval(klass).prototype, func,
value: -> Rubyfy[func](@)
else
console.warn("Rubyfy attempted to define `#{klass}##{func}` but it looks like #{klass}.prototype.#{func} has already been defined. You can still use the Rubyfy version of `#{func}` by calling it explicitly with `#{if @_R_IS_DEFINED_ then 'R' else 'Rubyfy'}.#{func}()`")
| true | # Rubyfy - 0.1.0
# 2019 PI:NAME:<NAME>END_PI
# https://github.com/calebkm/rubyfy ------------------
#
@Rubyfy or= {}
# Setup --------------------------------------------
#
Rubyfy.classes = 'Array Object String'.split(' ')
Rubyfy.Array = 'any compact is_empty first last'.split(' ')
Rubyfy.String = 'capitalize downcase is_blank'.split(' ')
Rubyfy.Object = 'any is_empty keys vals'.split(' ')
# Array --------------------------------------------
#
Rubyfy.compact = (arr) ->
if Rubyfy.defined(arr, 'compact')
arr.filter (i) -> i != null and i != undefined
Rubyfy.first = (arr) ->
if Rubyfy.defined(arr, 'first')
arr[0]
Rubyfy.last = (arr) ->
if Rubyfy.defined(arr, 'last')
arr[arr.length - 1]
# Object -------------------------------------------
#
Rubyfy.keys = (obj) ->
if Rubyfy.defined(obj, 'keys')
Object.keys(obj)
Rubyfy.vals = (obj) ->
if Rubyfy.defined(obj, 'vals')
Object.values(obj)
# Array or Object ----------------------------------
#
Rubyfy.any = (arr_or_obj) ->
if Rubyfy.defined(arr_or_obj, 'any')
!Rubyfy.is_empty(arr_or_obj)
Rubyfy.is_empty = (arr_or_obj) ->
if Rubyfy.defined(arr_or_obj, 'is_empty')
if Rubyfy.is_array(arr_or_obj)
Rubyfy.compact(arr_or_obj).length == 0
else if Rubyfy.is_object(arr_or_obj)
Object.keys(arr_or_obj).length == 0
# String -------------------------------------------
#
Rubyfy.capitalize = (str) ->
if Rubyfy.defined(str, 'capitalize')
str.charAt(0).toUpperCase() + str.slice(1).toLowerCase()
Rubyfy.downcase = (str) ->
if Rubyfy.defined(str, 'downcase')
str.toLowerCase()
Rubyfy.is_blank = (str) ->
if Rubyfy.defined(str, 'is_blank')
str.trim() == ''
# Helpers ------------------------------------------
#
Rubyfy.defined = (o, func) ->
if o != null and o != undefined
class_of = Rubyfy.class_of(o)
if class_of and Rubyfy[class_of] and Rubyfy[class_of].includes(func)
true
else
throw new Error("Rubyfy does not define a function `#{func}` for #{Rubyfy.capitalize(class_of)}")
else
throw new Error("Cannot call Rubyfy `#{func}` on #{o}")
Rubyfy.class_of = (t) ->
if Rubyfy.is_array(t)
'Array'
else if Rubyfy.is_string(t)
'String'
else if Rubyfy.is_object(t)
'Object'
else
typeof t
Rubyfy.is_array = (a) ->
Array.isArray(a)
Rubyfy.is_string = (s) ->
typeof s == 'string' or \
s.constructor == String or \
Object.prototype.toString.call(s) == '[object String]'
Rubyfy.is_object = (o) ->
(typeof o == 'object') and \
(o != null) and \
!Rubyfy.is_array(o) and \
!Rubyfy.is_string(o)
# Define `R` shorthand -------------------------------
#
unless Rubyfy.no_r
if typeof @R == 'undefined'
@R = Rubyfy
@_R_IS_DEFINED_ = true
else
console.warn("Unable to assign Rubyfy shorthand `R` because it's already defined. You can still use `Rubyfy` explicitly.")
# Dynamically add prototypes -------------------------
#
unless Rubyfy.no_prototypes
for klass in Rubyfy.classes
do (klass) ->
for func in Rubyfy[klass]
do (func) ->
if typeof eval(klass).prototype[func] == 'undefined'
Object.defineProperty eval(klass).prototype, func,
value: -> Rubyfy[func](@)
else
console.warn("Rubyfy attempted to define `#{klass}##{func}` but it looks like #{klass}.prototype.#{func} has already been defined. You can still use the Rubyfy version of `#{func}` by calling it explicitly with `#{if @_R_IS_DEFINED_ then 'R' else 'Rubyfy'}.#{func}()`")
|
[
{
"context": " @timeout = 3000\n hackery.checkFtp {host: '216.38.80.156'}, (valid, perms, conn) ->\n should.exist v",
"end": 248,
"score": 0.9997254014015198,
"start": 235,
"tag": "IP_ADDRESS",
"value": "216.38.80.156"
},
{
"context": " 3000\n hackery.checkFtp {use... | test/test.coffee | contra/hackery | 1 | hackery = require '../index'
should = require 'should'
mocha = require 'mocha'
describe 'hackery', ->
describe 'ftp', ->
it 'should login to a public ftp properly', (done) ->
@timeout = 3000
hackery.checkFtp {host: '216.38.80.156'}, (valid, perms, conn) ->
should.exist valid
valid.should.equal true
should.exist perms
perms.should.be.instanceof Array
should.exist conn
conn.raw.quit (err, res) ->
should.exist res
should.exist res.code
res.code.should.equal 221
done()
it 'should fail to login with an invalid user', (done) ->
@timeout = 3000
hackery.checkFtp {user: 'bar', pass: 'foo', host: '216.38.80.156'}, (valid, perms, conn) ->
should.exist valid
valid.should.equal false
done()
it 'should fail to login to an invalid server', (done) ->
@timeout = 3000
hackery.checkFtp {host: 'google.com'}, (valid, perms, conn) ->
should.exist valid
valid.should.equal false
done()
describe 'ping', ->
it 'should report github as online', (done) ->
@timeout = 1000
hackery.ping 'github.com', (online) ->
should.exist online
online.should.equal true
done()
it 'should report an fizzbaromgw0t.com site as offline', (done) ->
@timeout = 1000
hackery.ping 'fizzbaromgw0t.com', (online) ->
should.exist online
online.should.equal false
done()
describe 'scan', ->
it 'should report github.com:10 as closed', (done) ->
@timeout = 1000
hackery.scan 'github.com', 10, (open) ->
should.exist open
open.should.equal false
done()
it 'should report github.com:80 as open', (done) ->
@timeout = 1000
hackery.scan 'github.com', 80, (open) ->
should.exist open
open.should.equal true
done()
it 'should report github.com:80 as open when scanning 75-85', (done) ->
@timeout = 10000
hackery.scanRange 'github.com', 75, 85, (open) ->
should.exist open
open.indexOf(80).should.not.equal -1
done()
it 'should report github.com:80 and 22 as open when scanning 22, 80 and 85', (done) ->
@timeout = 3000
hackery.scanAll 'github.com', [22, 80], (open) ->
should.exist open
open.indexOf(85).should.equal -1
open.indexOf(22).should.not.equal -1
open.indexOf(80).should.not.equal -1
done() | 214988 | hackery = require '../index'
should = require 'should'
mocha = require 'mocha'
describe 'hackery', ->
describe 'ftp', ->
it 'should login to a public ftp properly', (done) ->
@timeout = 3000
hackery.checkFtp {host: '172.16.31.10'}, (valid, perms, conn) ->
should.exist valid
valid.should.equal true
should.exist perms
perms.should.be.instanceof Array
should.exist conn
conn.raw.quit (err, res) ->
should.exist res
should.exist res.code
res.code.should.equal 221
done()
it 'should fail to login with an invalid user', (done) ->
@timeout = 3000
hackery.checkFtp {user: 'bar', pass: '<PASSWORD>', host: '172.16.31.10'}, (valid, perms, conn) ->
should.exist valid
valid.should.equal false
done()
it 'should fail to login to an invalid server', (done) ->
@timeout = 3000
hackery.checkFtp {host: 'google.com'}, (valid, perms, conn) ->
should.exist valid
valid.should.equal false
done()
describe 'ping', ->
it 'should report github as online', (done) ->
@timeout = 1000
hackery.ping 'github.com', (online) ->
should.exist online
online.should.equal true
done()
it 'should report an fizzbaromgw0t.com site as offline', (done) ->
@timeout = 1000
hackery.ping 'fizzbaromgw0t.com', (online) ->
should.exist online
online.should.equal false
done()
describe 'scan', ->
it 'should report github.com:10 as closed', (done) ->
@timeout = 1000
hackery.scan 'github.com', 10, (open) ->
should.exist open
open.should.equal false
done()
it 'should report github.com:80 as open', (done) ->
@timeout = 1000
hackery.scan 'github.com', 80, (open) ->
should.exist open
open.should.equal true
done()
it 'should report github.com:80 as open when scanning 75-85', (done) ->
@timeout = 10000
hackery.scanRange 'github.com', 75, 85, (open) ->
should.exist open
open.indexOf(80).should.not.equal -1
done()
it 'should report github.com:80 and 22 as open when scanning 22, 80 and 85', (done) ->
@timeout = 3000
hackery.scanAll 'github.com', [22, 80], (open) ->
should.exist open
open.indexOf(85).should.equal -1
open.indexOf(22).should.not.equal -1
open.indexOf(80).should.not.equal -1
done() | true | hackery = require '../index'
should = require 'should'
mocha = require 'mocha'
describe 'hackery', ->
describe 'ftp', ->
it 'should login to a public ftp properly', (done) ->
@timeout = 3000
hackery.checkFtp {host: 'PI:IP_ADDRESS:172.16.31.10END_PI'}, (valid, perms, conn) ->
should.exist valid
valid.should.equal true
should.exist perms
perms.should.be.instanceof Array
should.exist conn
conn.raw.quit (err, res) ->
should.exist res
should.exist res.code
res.code.should.equal 221
done()
it 'should fail to login with an invalid user', (done) ->
@timeout = 3000
hackery.checkFtp {user: 'bar', pass: 'PI:PASSWORD:<PASSWORD>END_PI', host: 'PI:IP_ADDRESS:172.16.31.10END_PI'}, (valid, perms, conn) ->
should.exist valid
valid.should.equal false
done()
it 'should fail to login to an invalid server', (done) ->
@timeout = 3000
hackery.checkFtp {host: 'google.com'}, (valid, perms, conn) ->
should.exist valid
valid.should.equal false
done()
describe 'ping', ->
it 'should report github as online', (done) ->
@timeout = 1000
hackery.ping 'github.com', (online) ->
should.exist online
online.should.equal true
done()
it 'should report an fizzbaromgw0t.com site as offline', (done) ->
@timeout = 1000
hackery.ping 'fizzbaromgw0t.com', (online) ->
should.exist online
online.should.equal false
done()
describe 'scan', ->
it 'should report github.com:10 as closed', (done) ->
@timeout = 1000
hackery.scan 'github.com', 10, (open) ->
should.exist open
open.should.equal false
done()
it 'should report github.com:80 as open', (done) ->
@timeout = 1000
hackery.scan 'github.com', 80, (open) ->
should.exist open
open.should.equal true
done()
it 'should report github.com:80 as open when scanning 75-85', (done) ->
@timeout = 10000
hackery.scanRange 'github.com', 75, 85, (open) ->
should.exist open
open.indexOf(80).should.not.equal -1
done()
it 'should report github.com:80 and 22 as open when scanning 22, 80 and 85', (done) ->
@timeout = 3000
hackery.scanAll 'github.com', [22, 80], (open) ->
should.exist open
open.indexOf(85).should.equal -1
open.indexOf(22).should.not.equal -1
open.indexOf(80).should.not.equal -1
done() |
[
{
"context": "ire \"../../../lib/config\"\n\ngapi.server.setApiKey \"AIzaSyB14Ua7k5_wusxHTQEH3sqmglO7MHjHPCI\"\ngapi.server.load \"plus\", \"v1\", ->\n request = ga",
"end": 134,
"score": 0.9997796416282654,
"start": 95,
"tag": "KEY",
"value": "AIzaSyB14Ua7k5_wusxHTQEH3sqmglO7MHjHPCI"
},
... | static/node_modules/gapi/test/plus/people/search.coffee | nohharri/chator | 4 | gapi = require "../../../index"
config = require "../../../lib/config"
gapi.server.setApiKey "AIzaSyB14Ua7k5_wusxHTQEH3sqmglO7MHjHPCI"
gapi.server.load "plus", "v1", ->
request = gapi.server.plus.people.search query: 'Blaine Bublitz'
describe "calling people.search({query: 'Blaine Bublitz'})", ->
it "should add /plus/v1/people?key=AIzaSyB14Ua7k5_wusxHTQEH3sqmglO7MHjHPCI&alt=json&query=Blaine%20Bublitz to config.requestOptions.path", ->
config.requestOptions.path.should.equal '/plus/v1/people?key=AIzaSyB14Ua7k5_wusxHTQEH3sqmglO7MHjHPCI&alt=json&query=Blaine%20Bublitz'
it "should return execute function", ->
request.should.have.property 'execute'
request.execute.should.be.a 'function'
describe "calling execute with a callback", ->
it "should make a request and return an object to the callback", (done) ->
request.execute (resp) ->
resp.should.be.a 'object'
#resp.should.not.have.property 'error'
done() | 5696 | gapi = require "../../../index"
config = require "../../../lib/config"
gapi.server.setApiKey "<KEY>"
gapi.server.load "plus", "v1", ->
request = gapi.server.plus.people.search query: '<NAME>'
describe "calling people.search({query: '<NAME>'})", ->
it "should add /plus/v1/people?key=<KEY>&alt=json&query=<NAME>%20<NAME> to config.requestOptions.path", ->
config.requestOptions.path.should.equal '/plus/v1/people?key=<KEY>&alt=json&query=<NAME>%20<NAME>'
it "should return execute function", ->
request.should.have.property 'execute'
request.execute.should.be.a 'function'
describe "calling execute with a callback", ->
it "should make a request and return an object to the callback", (done) ->
request.execute (resp) ->
resp.should.be.a 'object'
#resp.should.not.have.property 'error'
done() | true | gapi = require "../../../index"
config = require "../../../lib/config"
gapi.server.setApiKey "PI:KEY:<KEY>END_PI"
gapi.server.load "plus", "v1", ->
request = gapi.server.plus.people.search query: 'PI:NAME:<NAME>END_PI'
describe "calling people.search({query: 'PI:NAME:<NAME>END_PI'})", ->
it "should add /plus/v1/people?key=PI:KEY:<KEY>END_PI&alt=json&query=PI:NAME:<NAME>END_PI%20PI:NAME:<NAME>END_PI to config.requestOptions.path", ->
config.requestOptions.path.should.equal '/plus/v1/people?key=PI:KEY:<KEY>END_PI&alt=json&query=PI:NAME:<NAME>END_PI%20PI:NAME:<NAME>END_PI'
it "should return execute function", ->
request.should.have.property 'execute'
request.execute.should.be.a 'function'
describe "calling execute with a callback", ->
it "should make a request and return an object to the callback", (done) ->
request.execute (resp) ->
resp.should.be.a 'object'
#resp.should.not.have.property 'error'
done() |
[
{
"context": "ring and Search for jQuery\nCopyright (c) 2013-2017 Ryan McGeary; Licensed MIT\n###\n$ = jQuery\n$.fn.sieve = (option",
"end": 105,
"score": 0.9998301267623901,
"start": 93,
"tag": "NAME",
"value": "Ryan McGeary"
}
] | src/jquery.sieve.coffee | rmm5t/jquery-sieve | 50 | ###
jQuery Sieve
Table and Container Filtering and Search for jQuery
Copyright (c) 2013-2017 Ryan McGeary; Licensed MIT
###
$ = jQuery
$.fn.sieve = (options) ->
compact = (array) -> item for item in array when item
this.each ->
container = $(this)
settings = $.extend({
searchInput: null
searchTemplate: "<div><label>Search: <input type='text'></label></div>"
itemSelector: "tbody tr"
textSelector: null
toggle: (item, match) -> item.toggle(match)
complete: ->
}, options)
if !settings.searchInput
searchBar = $(settings.searchTemplate)
settings.searchInput = searchBar.find("input")
container.before(searchBar)
settings.searchInput.on "keyup.sieve change.sieve", ->
query = compact($(this).val().toLowerCase().split(/\s+/))
items = container.find(settings.itemSelector)
items.each ->
item = $(this)
if settings.textSelector
cells = item.find(settings.textSelector)
text = cells.text().toLowerCase()
else
text = item.text().toLowerCase()
match = true
for q in query
match &&= text.indexOf(q) >= 0
settings.toggle(item, match)
settings.complete()
| 92589 | ###
jQuery Sieve
Table and Container Filtering and Search for jQuery
Copyright (c) 2013-2017 <NAME>; Licensed MIT
###
$ = jQuery
$.fn.sieve = (options) ->
compact = (array) -> item for item in array when item
this.each ->
container = $(this)
settings = $.extend({
searchInput: null
searchTemplate: "<div><label>Search: <input type='text'></label></div>"
itemSelector: "tbody tr"
textSelector: null
toggle: (item, match) -> item.toggle(match)
complete: ->
}, options)
if !settings.searchInput
searchBar = $(settings.searchTemplate)
settings.searchInput = searchBar.find("input")
container.before(searchBar)
settings.searchInput.on "keyup.sieve change.sieve", ->
query = compact($(this).val().toLowerCase().split(/\s+/))
items = container.find(settings.itemSelector)
items.each ->
item = $(this)
if settings.textSelector
cells = item.find(settings.textSelector)
text = cells.text().toLowerCase()
else
text = item.text().toLowerCase()
match = true
for q in query
match &&= text.indexOf(q) >= 0
settings.toggle(item, match)
settings.complete()
| true | ###
jQuery Sieve
Table and Container Filtering and Search for jQuery
Copyright (c) 2013-2017 PI:NAME:<NAME>END_PI; Licensed MIT
###
$ = jQuery
$.fn.sieve = (options) ->
compact = (array) -> item for item in array when item
this.each ->
container = $(this)
settings = $.extend({
searchInput: null
searchTemplate: "<div><label>Search: <input type='text'></label></div>"
itemSelector: "tbody tr"
textSelector: null
toggle: (item, match) -> item.toggle(match)
complete: ->
}, options)
if !settings.searchInput
searchBar = $(settings.searchTemplate)
settings.searchInput = searchBar.find("input")
container.before(searchBar)
settings.searchInput.on "keyup.sieve change.sieve", ->
query = compact($(this).val().toLowerCase().split(/\s+/))
items = container.find(settings.itemSelector)
items.each ->
item = $(this)
if settings.textSelector
cells = item.find(settings.textSelector)
text = cells.text().toLowerCase()
else
text = item.text().toLowerCase()
match = true
for q in query
match &&= text.indexOf(q) >= 0
settings.toggle(item, match)
settings.complete()
|
[
{
"context": " clients[name] = {id: Number(Date.now()), name: name, paths: paths}\n \n # Listen and serve incoming a",
"end": 2591,
"score": 0.7850888967514038,
"start": 2587,
"tag": "NAME",
"value": "name"
}
] | src/client/index.coffee | travisjeffery/socketstream | 1 | # Client Asset Manager
# --------------------
# The Client Asset Manager allows you to define multiple single-page 'clients' which can be served on
# different URLs or to different devices. Note: The Client Asset Manager deliberately makes extensive use
# of synchronous code. This is because all operations only ever run once on startup (when packing the assets)
# unless you are running in dev mode
log = console.log
systemAssets = require('./system')
templateEngine = require('./template_engine')
formatters = require('./formatters')
# Set defaults
packAssets = false
options =
packAssets: {}
liveReload: ['code', 'css', 'static', 'templates', 'views']
dirs:
code: '/client/code'
css: '/client/css'
static: '/client/static'
assets: '/client/static/assets'
templates: '/client/templates'
views: '/client/views'
workers: '/client/workers'
# Store each client as an object
clients = {}
exports.init = (ss, router) ->
http = require('./http').init(ss.root, clients, options)
systemAssets.load()
# Allow third-party modules to send libs to the client by extending the ss API
ss.client = {send: systemAssets.send}
# Return API
formatters: formatters.init(ss.root)
templateEngine: templateEngine.init(ss, options)
assets: systemAssets
options: options
# Merge optional options
set: (newOption) ->
throw new Error('ss.client.set() takes an object e.g. {liveReload: false}') unless typeof(newOption) == 'object'
for k, v of newOption
if v instanceof Object
options[k][x] = y for x, y of v
else
options[k] = v
# Tell the asset manager to pack and minimise all assets
packAssets: (opts) ->
packAssets = true
options.packAssets = opts
# Define a new Single Page Client
define: (name, paths) ->
throw new Error("Client name '#{name}' has already been defined") if clients[name]?
throw new Error("You may only define one HTML view per single-page client. Please pass a filename as a string, not an Array") if typeof(paths.view) != 'string'
throw new Error("The '#{paths.view}' view must have a valid HTML extension (such as .html or .jade)") if paths.view.indexOf('.') == -1
# Alias 'templates' to 'tmpl'
paths.tmpl = paths.templates if paths.templates
# Force each into an array
['css','code','tmpl'].forEach (assetType) =>
paths[assetType] = [paths[assetType]] unless paths[assetType] instanceof Array
# Define new client object
clients[name] = {id: Number(Date.now()), name: name, paths: paths}
# Listen and serve incoming asset requests
load: (ss) ->
formatters.load()
# Code to execute once everything is loaded
systemAssets.send('code', 'init', "require('/entry'); require('socketstream').connect();")
# Bundle initial assets if we're running in production mode
if packAssets
pack = require('./pack')
pack(ss.root, client, options) for name, client of clients
# Else serve files and watch for changes to files in development
else
require('./serve/dev')(ss.root, router, options)
require('./live_reload')(ss.root, options, ss) if options.liveReload
# Listen out for requests to async load new assets
require('./serve/ondemand')(ss.root, router, options)
| 13716 | # Client Asset Manager
# --------------------
# The Client Asset Manager allows you to define multiple single-page 'clients' which can be served on
# different URLs or to different devices. Note: The Client Asset Manager deliberately makes extensive use
# of synchronous code. This is because all operations only ever run once on startup (when packing the assets)
# unless you are running in dev mode
log = console.log
systemAssets = require('./system')
templateEngine = require('./template_engine')
formatters = require('./formatters')
# Set defaults
packAssets = false
options =
packAssets: {}
liveReload: ['code', 'css', 'static', 'templates', 'views']
dirs:
code: '/client/code'
css: '/client/css'
static: '/client/static'
assets: '/client/static/assets'
templates: '/client/templates'
views: '/client/views'
workers: '/client/workers'
# Store each client as an object
clients = {}
exports.init = (ss, router) ->
http = require('./http').init(ss.root, clients, options)
systemAssets.load()
# Allow third-party modules to send libs to the client by extending the ss API
ss.client = {send: systemAssets.send}
# Return API
formatters: formatters.init(ss.root)
templateEngine: templateEngine.init(ss, options)
assets: systemAssets
options: options
# Merge optional options
set: (newOption) ->
throw new Error('ss.client.set() takes an object e.g. {liveReload: false}') unless typeof(newOption) == 'object'
for k, v of newOption
if v instanceof Object
options[k][x] = y for x, y of v
else
options[k] = v
# Tell the asset manager to pack and minimise all assets
packAssets: (opts) ->
packAssets = true
options.packAssets = opts
# Define a new Single Page Client
define: (name, paths) ->
throw new Error("Client name '#{name}' has already been defined") if clients[name]?
throw new Error("You may only define one HTML view per single-page client. Please pass a filename as a string, not an Array") if typeof(paths.view) != 'string'
throw new Error("The '#{paths.view}' view must have a valid HTML extension (such as .html or .jade)") if paths.view.indexOf('.') == -1
# Alias 'templates' to 'tmpl'
paths.tmpl = paths.templates if paths.templates
# Force each into an array
['css','code','tmpl'].forEach (assetType) =>
paths[assetType] = [paths[assetType]] unless paths[assetType] instanceof Array
# Define new client object
clients[name] = {id: Number(Date.now()), name: <NAME>, paths: paths}
# Listen and serve incoming asset requests
load: (ss) ->
formatters.load()
# Code to execute once everything is loaded
systemAssets.send('code', 'init', "require('/entry'); require('socketstream').connect();")
# Bundle initial assets if we're running in production mode
if packAssets
pack = require('./pack')
pack(ss.root, client, options) for name, client of clients
# Else serve files and watch for changes to files in development
else
require('./serve/dev')(ss.root, router, options)
require('./live_reload')(ss.root, options, ss) if options.liveReload
# Listen out for requests to async load new assets
require('./serve/ondemand')(ss.root, router, options)
| true | # Client Asset Manager
# --------------------
# The Client Asset Manager allows you to define multiple single-page 'clients' which can be served on
# different URLs or to different devices. Note: The Client Asset Manager deliberately makes extensive use
# of synchronous code. This is because all operations only ever run once on startup (when packing the assets)
# unless you are running in dev mode
log = console.log
systemAssets = require('./system')
templateEngine = require('./template_engine')
formatters = require('./formatters')
# Set defaults
packAssets = false
options =
packAssets: {}
liveReload: ['code', 'css', 'static', 'templates', 'views']
dirs:
code: '/client/code'
css: '/client/css'
static: '/client/static'
assets: '/client/static/assets'
templates: '/client/templates'
views: '/client/views'
workers: '/client/workers'
# Store each client as an object
clients = {}
exports.init = (ss, router) ->
http = require('./http').init(ss.root, clients, options)
systemAssets.load()
# Allow third-party modules to send libs to the client by extending the ss API
ss.client = {send: systemAssets.send}
# Return API
formatters: formatters.init(ss.root)
templateEngine: templateEngine.init(ss, options)
assets: systemAssets
options: options
# Merge optional options
set: (newOption) ->
throw new Error('ss.client.set() takes an object e.g. {liveReload: false}') unless typeof(newOption) == 'object'
for k, v of newOption
if v instanceof Object
options[k][x] = y for x, y of v
else
options[k] = v
# Tell the asset manager to pack and minimise all assets
packAssets: (opts) ->
packAssets = true
options.packAssets = opts
# Define a new Single Page Client
define: (name, paths) ->
throw new Error("Client name '#{name}' has already been defined") if clients[name]?
throw new Error("You may only define one HTML view per single-page client. Please pass a filename as a string, not an Array") if typeof(paths.view) != 'string'
throw new Error("The '#{paths.view}' view must have a valid HTML extension (such as .html or .jade)") if paths.view.indexOf('.') == -1
# Alias 'templates' to 'tmpl'
paths.tmpl = paths.templates if paths.templates
# Force each into an array
['css','code','tmpl'].forEach (assetType) =>
paths[assetType] = [paths[assetType]] unless paths[assetType] instanceof Array
# Define new client object
clients[name] = {id: Number(Date.now()), name: PI:NAME:<NAME>END_PI, paths: paths}
# Listen and serve incoming asset requests
load: (ss) ->
formatters.load()
# Code to execute once everything is loaded
systemAssets.send('code', 'init', "require('/entry'); require('socketstream').connect();")
# Bundle initial assets if we're running in production mode
if packAssets
pack = require('./pack')
pack(ss.root, client, options) for name, client of clients
# Else serve files and watch for changes to files in development
else
require('./serve/dev')(ss.root, router, options)
require('./live_reload')(ss.root, options, ss) if options.liveReload
# Listen out for requests to async load new assets
require('./serve/ondemand')(ss.root, router, options)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.