entities listlengths 1 8.61k | max_stars_repo_path stringlengths 7 172 | max_stars_repo_name stringlengths 5 89 | max_stars_count int64 0 82k | content stringlengths 14 1.05M | id stringlengths 2 6 | new_content stringlengths 15 1.05M | modified bool 1 class | references stringlengths 29 1.05M |
|---|---|---|---|---|---|---|---|---|
[
{
"context": "###*\n# server-checker : lib/checkers/dns\n# Author: MrKMG (https://github.com/mrkmg)\n# Contributor: Sven Sl",
"end": 56,
"score": 0.9996091723442078,
"start": 51,
"tag": "USERNAME",
"value": "MrKMG"
},
{
"context": "/checkers/dns\n# Author: MrKMG (https://github.com/mrkmg)\n# Contributor: Sven Slootweg (joepie91) (http://",
"end": 82,
"score": 0.999525249004364,
"start": 77,
"tag": "USERNAME",
"value": "mrkmg"
},
{
"context": "r: MrKMG (https://github.com/mrkmg)\n# Contributor: Sven Slootweg (joepie91) (http://cryto.net/~joepie91/)\n#\n# MIT ",
"end": 112,
"score": 0.9998965859413147,
"start": 99,
"tag": "NAME",
"value": "Sven Slootweg"
},
{
"context": "//github.com/mrkmg)\n# Contributor: Sven Slootweg (joepie91) (http://cryto.net/~joepie91/)\n#\n# MIT License\n##",
"end": 122,
"score": 0.9831932187080383,
"start": 114,
"tag": "USERNAME",
"value": "joepie91"
},
{
"context": "or: Sven Slootweg (joepie91) (http://cryto.net/~joepie91/)\n#\n# MIT License\n###\n\nPromise = require 'blueb",
"end": 149,
"score": 0.5753742456436157,
"start": 145,
"tag": "USERNAME",
"value": "epie"
},
{
"context": "t = (options) ->\n _.defaults options,\n host: '127.0.0.1'\n port: 53\n name: 'google.com'\n type: 'A",
"end": 326,
"score": 0.9997615814208984,
"start": 317,
"tag": "IP_ADDRESS",
"value": "127.0.0.1"
}
] | src/lib/plugins/dns.coffee | mrkmg/service-checker | 12 | ###*
# server-checker : lib/checkers/dns
# Author: MrKMG (https://github.com/mrkmg)
# Contributor: Sven Slootweg (joepie91) (http://cryto.net/~joepie91/)
#
# MIT License
###
Promise = require 'bluebird'
dns = require 'native-dns'
_ = require 'underscore'
makeRequest = (options) ->
_.defaults options,
host: '127.0.0.1'
port: 53
name: 'google.com'
type: 'A'
timeout: 5000
question = dns.Question
name: options.name
type: options.type
dns.Request
cache: false
question: question
timeout: options.timeout
server:
address: options.host
port: options.port
doLookup = (request) ->
new Promise (resolve, reject) ->
request.on 'timeout', ->
error = new Error 'Request Timed Out'
error.code = 'TIMEOUT'
reject error
request.on 'end', ->
resolve null
request.send()
run = (options) ->
Promise
.resolve options
.then makeRequest
.then (request) ->
doLookup request
.catch _.identity
module.exports = dns: run
| 44770 | ###*
# server-checker : lib/checkers/dns
# Author: MrKMG (https://github.com/mrkmg)
# Contributor: <NAME> (joepie91) (http://cryto.net/~joepie91/)
#
# MIT License
###
Promise = require 'bluebird'
dns = require 'native-dns'
_ = require 'underscore'
makeRequest = (options) ->
_.defaults options,
host: '127.0.0.1'
port: 53
name: 'google.com'
type: 'A'
timeout: 5000
question = dns.Question
name: options.name
type: options.type
dns.Request
cache: false
question: question
timeout: options.timeout
server:
address: options.host
port: options.port
doLookup = (request) ->
new Promise (resolve, reject) ->
request.on 'timeout', ->
error = new Error 'Request Timed Out'
error.code = 'TIMEOUT'
reject error
request.on 'end', ->
resolve null
request.send()
run = (options) ->
Promise
.resolve options
.then makeRequest
.then (request) ->
doLookup request
.catch _.identity
module.exports = dns: run
| true | ###*
# server-checker : lib/checkers/dns
# Author: MrKMG (https://github.com/mrkmg)
# Contributor: PI:NAME:<NAME>END_PI (joepie91) (http://cryto.net/~joepie91/)
#
# MIT License
###
Promise = require 'bluebird'
dns = require 'native-dns'
_ = require 'underscore'
makeRequest = (options) ->
_.defaults options,
host: '127.0.0.1'
port: 53
name: 'google.com'
type: 'A'
timeout: 5000
question = dns.Question
name: options.name
type: options.type
dns.Request
cache: false
question: question
timeout: options.timeout
server:
address: options.host
port: options.port
doLookup = (request) ->
new Promise (resolve, reject) ->
request.on 'timeout', ->
error = new Error 'Request Timed Out'
error.code = 'TIMEOUT'
reject error
request.on 'end', ->
resolve null
request.send()
run = (options) ->
Promise
.resolve options
.then makeRequest
.then (request) ->
doLookup request
.catch _.identity
module.exports = dns: run
|
[
{
"context": "lling\nscroll.mouseWheelEnabled = true\n\n\nnames = [\"Adi\", \"Vincent\"]\n# \n# for name, i in names\n# \tprint i",
"end": 787,
"score": 0.9997667074203491,
"start": 784,
"tag": "NAME",
"value": "Adi"
},
{
"context": "croll.mouseWheelEnabled = true\n\n\nnames = [\"Adi\", \"Vincent\"]\n# \n# for name, i in names\n# \tprint i\n# \t\n# Numb",
"end": 798,
"score": 0.9997616410255432,
"start": 791,
"tag": "NAME",
"value": "Vincent"
}
] | prototypes/framerBasics II.framer/app.coffee | davo/FramerNYCMeetup | 7 | # 1. BackgroundLayer
# 2. Scroll Component
# 3. For Loop
# 4. Adding items using a for loop
# 5. Set Constraints, Events
# 6. States / Animation
# Append Script
appendScript = (src) ->
parseScr = Utils.domLoadDataSync src
script = document.createElement 'script'
script.type = 'text/javascript'
script.innerHTML = parseScr
document.head.appendChild(script)
appendScript('https://cdnjs.cloudflare.com/ajax/libs/Faker/3.1.0/faker.min.js')
# Set background
Canvas.backgroundColor = "#FFFFFF"
# New scroll component
scroll = new ScrollComponent
# Set size of scroll component
# scroll.width = Screen.width
scroll.size = Screen.size
# Fix dragging behaviour to vertical only
scroll.scrollHorizontal = false
# Enable mouse scrolling
scroll.mouseWheelEnabled = true
names = ["Adi", "Vincent"]
#
# for name, i in names
# print i
#
# Number of items to populate
items = 10
# Initialize for loop
for item in [0..items]
# print 10 + 210 * item
# Create layer
layer = new Layer
parent: scroll.content
# backgroundColor: Utils.randomColor()
borderRadius: 4
width: Screen.width-20
height: 200
x: Align.center()
y: 10 + 210 * item
image: Utils.randomImage()
headLine = new TextLayer
parent: layer
width: 200
color: "white"
fontFamily: "Helvetica"
fontSize: 18
x: 21
y: Align.center
text: faker.lorem.sentence()
# Create a new state
layer.states =
changeImage:
image: Utils.randomImage()
layer.onTap ->
@.image = Utils.randomImage()
| 39258 | # 1. BackgroundLayer
# 2. Scroll Component
# 3. For Loop
# 4. Adding items using a for loop
# 5. Set Constraints, Events
# 6. States / Animation
# Append Script
appendScript = (src) ->
parseScr = Utils.domLoadDataSync src
script = document.createElement 'script'
script.type = 'text/javascript'
script.innerHTML = parseScr
document.head.appendChild(script)
appendScript('https://cdnjs.cloudflare.com/ajax/libs/Faker/3.1.0/faker.min.js')
# Set background
Canvas.backgroundColor = "#FFFFFF"
# New scroll component
scroll = new ScrollComponent
# Set size of scroll component
# scroll.width = Screen.width
scroll.size = Screen.size
# Fix dragging behaviour to vertical only
scroll.scrollHorizontal = false
# Enable mouse scrolling
scroll.mouseWheelEnabled = true
names = ["<NAME>", "<NAME>"]
#
# for name, i in names
# print i
#
# Number of items to populate
items = 10
# Initialize for loop
for item in [0..items]
# print 10 + 210 * item
# Create layer
layer = new Layer
parent: scroll.content
# backgroundColor: Utils.randomColor()
borderRadius: 4
width: Screen.width-20
height: 200
x: Align.center()
y: 10 + 210 * item
image: Utils.randomImage()
headLine = new TextLayer
parent: layer
width: 200
color: "white"
fontFamily: "Helvetica"
fontSize: 18
x: 21
y: Align.center
text: faker.lorem.sentence()
# Create a new state
layer.states =
changeImage:
image: Utils.randomImage()
layer.onTap ->
@.image = Utils.randomImage()
| true | # 1. BackgroundLayer
# 2. Scroll Component
# 3. For Loop
# 4. Adding items using a for loop
# 5. Set Constraints, Events
# 6. States / Animation
# Append Script
appendScript = (src) ->
parseScr = Utils.domLoadDataSync src
script = document.createElement 'script'
script.type = 'text/javascript'
script.innerHTML = parseScr
document.head.appendChild(script)
appendScript('https://cdnjs.cloudflare.com/ajax/libs/Faker/3.1.0/faker.min.js')
# Set background
Canvas.backgroundColor = "#FFFFFF"
# New scroll component
scroll = new ScrollComponent
# Set size of scroll component
# scroll.width = Screen.width
scroll.size = Screen.size
# Fix dragging behaviour to vertical only
scroll.scrollHorizontal = false
# Enable mouse scrolling
scroll.mouseWheelEnabled = true
names = ["PI:NAME:<NAME>END_PI", "PI:NAME:<NAME>END_PI"]
#
# for name, i in names
# print i
#
# Number of items to populate
items = 10
# Initialize for loop
for item in [0..items]
# print 10 + 210 * item
# Create layer
layer = new Layer
parent: scroll.content
# backgroundColor: Utils.randomColor()
borderRadius: 4
width: Screen.width-20
height: 200
x: Align.center()
y: 10 + 210 * item
image: Utils.randomImage()
headLine = new TextLayer
parent: layer
width: 200
color: "white"
fontFamily: "Helvetica"
fontSize: 18
x: 21
y: Align.center
text: faker.lorem.sentence()
# Create a new state
layer.states =
changeImage:
image: Utils.randomImage()
layer.onTap ->
@.image = Utils.randomImage()
|
[
{
"context": "on.com/json-schema-proposal/)\n\n\tCopyright (c) 2007 Kris Zyp SitePen (www.sitepen.com)\n\tCopyright (c) 2011 Vla",
"end": 165,
"score": 0.9998822808265686,
"start": 157,
"tag": "NAME",
"value": "Kris Zyp"
},
{
"context": " Zyp SitePen (www.sitepen.com)\n\tCopyright (c) 2011 Vladimir Dronnikov dronnikov@gmail.com\n\n\tLicensed under the MIT (MIT",
"end": 230,
"score": 0.9998953342437744,
"start": 212,
"tag": "NAME",
"value": "Vladimir Dronnikov"
},
{
"context": "itepen.com)\n\tCopyright (c) 2011 Vladimir Dronnikov dronnikov@gmail.com\n\n\tLicensed under the MIT (MIT-LICENSE.txt) licens",
"end": 250,
"score": 0.9999305605888367,
"start": 231,
"tag": "EMAIL",
"value": "dronnikov@gmail.com"
},
{
"context": "NSE.txt) license\n\n###\n\n###\n *\n * Copyright(c) 2011 Vladimir Dronnikov <dronnikov@gmail.com>\n * MIT Licensed\n *\n###\n\n###",
"end": 354,
"score": 0.9998918771743774,
"start": 336,
"tag": "NAME",
"value": "Vladimir Dronnikov"
},
{
"context": "\n\n###\n *\n * Copyright(c) 2011 Vladimir Dronnikov <dronnikov@gmail.com>\n * MIT Licensed\n *\n###\n\n###\n\tRewrite of kriszyp'",
"end": 375,
"score": 0.9999311566352844,
"start": 356,
"tag": "EMAIL",
"value": "dronnikov@gmail.com"
}
] | src/schema.coffee | dvv/underscore-data | 5 | 'use strict'
###
JSONSchema Validator - Validates JavaScript objects using JSON Schemas
(http://www.json.com/json-schema-proposal/)
Copyright (c) 2007 Kris Zyp SitePen (www.sitepen.com)
Copyright (c) 2011 Vladimir Dronnikov dronnikov@gmail.com
Licensed under the MIT (MIT-LICENSE.txt) license
###
###
*
* Copyright(c) 2011 Vladimir Dronnikov <dronnikov@gmail.com>
* MIT Licensed
*
###
###
Rewrite of kriszyp's json-schema validator https://github.com/kriszyp/json-schema
Relies on documentcloud/underscore to normalize JS
###
#
# we allow property definition to contain `veto` attribute to control whether to retain the property after validation
# if it's === true -- the property will be deleted
# if it is a hash, it specifies the flavors of validation ('add', 'update', 'get', 'query') when the property is deleted
#
# E.g. veto: {get: true} means when validation is called with truthy options.veto and options.flavor === 'get', the property will be deleted
#
#
# given `value`, try to coerce it to `type`
#
# FIXME: we should skip conversion if type is matched?
#
coerce = (value, type) ->
if type is 'string'
value = if value? then String(value) else ''
else if type in ['number', 'integer']
unless _.isNaN value
value = Number value
value = Math.floor value if type is 'integer'
else if type is 'boolean'
value = if value is 'false' then false else not not value
else if type is 'null'
value = null
else if type is 'object'
# can't really think of any sensible coercion to an object
if JSON?.parse
try
value = JSON.parse value
catch err
else if type is 'array'
value = _.ensureArray value
else if type is 'date'
date = _.parseDate value
value = date if _.isDate date
value
#
# N.B. since we allow "enum" attribute to be async, the whole validator is treated as async if callback is specified
#
# we allow type coercion if options.coerce
#
#
# N.B. properties are by default required, use `optional: true` to override
#
#
# N.B. we introduce `value` attribute which fixes the value of the property
#
#
# TODO: introduce rename attribute -- id ---!get---> _id ---get---> id
#
validate = (instance, schema, options = {}, callback) ->
# save the context
self = @
# FIXME: what it is?
_changing = options.changing
# pending validators
asyncs = []
# collected errors
errors = []
# validate a value against a property definition
checkProp = (value, schema, path, i) ->
if path
if _.isNumber i
path += '[' + i + ']'
else if i is undefined
path += ''
else
path += '.' + i
else
path += i
addError = (message) ->
errors.push property: path, message: message
if (typeof schema isnt 'object' or _.isArray schema) and (path or typeof schema isnt 'function') and not schema?.type
if _.isFunction schema
addError 'type' unless value instanceof schema
else if schema
addError 'invalid'
return null
if _changing and schema.readonly
addError 'readonly'
if schema.extends # if it extends another schema, it must pass that schema as well
checkProp value, schema.extends, path, i
# validate a value against a type definition
checkType = (type, value) ->
if type
# TODO: coffee-ize, underscore-ize
if typeof type is 'string' and type isnt 'any' and
`(type == 'null' ? value !== null : typeof value !== type) &&
!(type === 'array' && _.isArray(value)) &&
!(type === 'date' && _.isDate(value)) &&
!(type === 'integer' && value%1===0)`
return [property: path, message: 'type']
if _.isArray type
# a union type
unionErrors = []
for t in type
unionErrors = checkType t, value
break unless unionErrors.length
return unionErrors if unionErrors.length
else if typeof type is 'object'
priorErrors = errors
errors = []
checkProp value, type, path
theseErrors = errors
errors = priorErrors
return theseErrors
[]
if value is undefined
if (not schema.optional or typeof schema.optional is 'object' and not schema.optional[options.flavor]) and not schema.get and not schema.default?
addError 'required'
else
errors = errors.concat checkType schema.type, value
if schema.disallow and not checkType(schema.disallow, value).length
addError 'disallowed'
if value isnt null
if _.isArray value
if schema.items
itemsIsArray = _.isArray schema.items
propDef = schema.items
for v, i in value
if itemsIsArray
propDef = schema.items[i]
if options.coerce and propDef.type
value[i] = coerce v, propDef.type
errors.concat checkProp v, propDef, path, i
if schema.minItems and value.length < schema.minItems
addError 'minItems'
if schema.maxItems and value.length > schema.maxItems
addError 'maxItems'
else if schema.properties or schema.additionalProperties
errors.concat checkObj value, schema.properties, path, schema.additionalProperties
if _.isString value
if schema.pattern and not value.match schema.pattern
addError 'pattern'
if schema.maxLength and value.length > schema.maxLength
addError 'maxLength'
if schema.minLength and value.length < schema.minLength
addError 'minLength'
if schema.minimum isnt undefined and typeof value is typeof schema.minimum and schema.minimum > value
addError 'minimum'
if schema.maximum isnt undefined and typeof value is typeof schema.maximum and schema.maximum < value
addError 'maximum'
if schema.enum
enumeration = schema.enum
# if function specified, distinguish between async and sync flavors
if _.isFunction enumeration
# async validator
if enumeration.length is 2
asyncs.push value: value, path: path, fetch: enumeration
# sync validator
else if enumeration.length is 1
addError 'enum' unless enumeration.call(self, value)
# sync getter
else
enumeration = enumeration.call self
addError 'enum' unless _.include enumeration, value
else
# simple array
addError 'enum' unless _.include enumeration, value
if _.isNumber(schema.maxDecimal) and (new RegExp("\\.[0-9]{#{(schema.maxDecimal+1)},}")).test value
addError 'digits'
null
# validate an object against a schema
checkObj = (instance, objTypeDef = {}, path, additionalProp) ->
if _.isObject objTypeDef
if typeof instance isnt 'object' or _.isArray instance
errors.push property: path, message: 'type'
for own i, propDef of objTypeDef
value = instance[i]
# set the value unconditionally if 'value' attribute specified, if 'add' and 'update' flavors
if 'value' of propDef and options.flavor in ['add', 'update']
value = instance[i] = propDef.value
# skip _not_ specified properties
continue if value is undefined and options.existingOnly
# veto props
if options.veto and (propDef.veto is true or typeof propDef.veto is 'object' and propDef.veto[options.flavor])
delete instance[i]
continue
# done with validation if it is called for 'get' or 'query' and no coercion needed
continue if options.flavor in ['query', 'get'] and not options.coerce
# set default if validation called for 'add'
if value is undefined and propDef.default? and options.flavor is 'add'
value = instance[i] = propDef.default
# throw undefined properties, unless 'add' flavor
if value is undefined and options.flavor isnt 'add'
delete instance[i]
continue
# coerce if coercion is enabled and value is not undefined
if options.coerce and propDef.type and i of instance and value isnt undefined
value = coerce value, propDef.type
instance[i] = value
# remove undefined properties if they are optional
if value is undefined and propDef.optional
delete instance[i]
continue
#
checkProp value, propDef, path, i
for i, value of instance
if i of instance and not objTypeDef[i] and (additionalProp is false or options.removeAdditionalProps)
if options.removeAdditionalProps
delete instance[i]
continue
else
errors.push property: path, message: 'unspecifed'
requires = objTypeDef[i]?.requires
if requires and not requires of instance
errors.push property: path, message: 'requires'
# N.B. additional properties are validated only if schema is specified in additionalProperties
# otherwise they just go intact
if additionalProp?.type and not objTypeDef[i]
# coerce if coercion is enabled
if options.coerce and additionalProp.type
value = coerce value, additionalProp.type
instance[i] = value
checkProp value, additionalProp, path, i
if not _changing and value?.$schema
errors = errors.concat checkProp value, value.$schema, path, i
errors
if schema
checkProp instance, schema, '', _changing or ''
if not _changing and instance?.$schema
checkProp instance, instance.$schema, '', ''
# TODO: extend async validators to query the property values?
# run async validators, if any
len = asyncs.length
if callback and len
for async, i in asyncs
do (async) ->
async.fetch.call self, async.value, (err) ->
if err
errors.push property: async.path, message: 'enum'
len -= 1
# proceed when async validators are done
unless len
callback errors.length and errors or null, instance
else if callback
callback errors.length and errors or null, instance
else
return errors.length and errors or null
return
#
# expose
#
_.mixin
coerce: coerce
validate: validate
| 214229 | 'use strict'
###
JSONSchema Validator - Validates JavaScript objects using JSON Schemas
(http://www.json.com/json-schema-proposal/)
Copyright (c) 2007 <NAME> SitePen (www.sitepen.com)
Copyright (c) 2011 <NAME> <EMAIL>
Licensed under the MIT (MIT-LICENSE.txt) license
###
###
*
* Copyright(c) 2011 <NAME> <<EMAIL>>
* MIT Licensed
*
###
###
Rewrite of kriszyp's json-schema validator https://github.com/kriszyp/json-schema
Relies on documentcloud/underscore to normalize JS
###
#
# we allow property definition to contain `veto` attribute to control whether to retain the property after validation
# if it's === true -- the property will be deleted
# if it is a hash, it specifies the flavors of validation ('add', 'update', 'get', 'query') when the property is deleted
#
# E.g. veto: {get: true} means when validation is called with truthy options.veto and options.flavor === 'get', the property will be deleted
#
#
# given `value`, try to coerce it to `type`
#
# FIXME: we should skip conversion if type is matched?
#
coerce = (value, type) ->
if type is 'string'
value = if value? then String(value) else ''
else if type in ['number', 'integer']
unless _.isNaN value
value = Number value
value = Math.floor value if type is 'integer'
else if type is 'boolean'
value = if value is 'false' then false else not not value
else if type is 'null'
value = null
else if type is 'object'
# can't really think of any sensible coercion to an object
if JSON?.parse
try
value = JSON.parse value
catch err
else if type is 'array'
value = _.ensureArray value
else if type is 'date'
date = _.parseDate value
value = date if _.isDate date
value
#
# N.B. since we allow "enum" attribute to be async, the whole validator is treated as async if callback is specified
#
# we allow type coercion if options.coerce
#
#
# N.B. properties are by default required, use `optional: true` to override
#
#
# N.B. we introduce `value` attribute which fixes the value of the property
#
#
# TODO: introduce rename attribute -- id ---!get---> _id ---get---> id
#
validate = (instance, schema, options = {}, callback) ->
# save the context
self = @
# FIXME: what it is?
_changing = options.changing
# pending validators
asyncs = []
# collected errors
errors = []
# validate a value against a property definition
checkProp = (value, schema, path, i) ->
if path
if _.isNumber i
path += '[' + i + ']'
else if i is undefined
path += ''
else
path += '.' + i
else
path += i
addError = (message) ->
errors.push property: path, message: message
if (typeof schema isnt 'object' or _.isArray schema) and (path or typeof schema isnt 'function') and not schema?.type
if _.isFunction schema
addError 'type' unless value instanceof schema
else if schema
addError 'invalid'
return null
if _changing and schema.readonly
addError 'readonly'
if schema.extends # if it extends another schema, it must pass that schema as well
checkProp value, schema.extends, path, i
# validate a value against a type definition
checkType = (type, value) ->
if type
# TODO: coffee-ize, underscore-ize
if typeof type is 'string' and type isnt 'any' and
`(type == 'null' ? value !== null : typeof value !== type) &&
!(type === 'array' && _.isArray(value)) &&
!(type === 'date' && _.isDate(value)) &&
!(type === 'integer' && value%1===0)`
return [property: path, message: 'type']
if _.isArray type
# a union type
unionErrors = []
for t in type
unionErrors = checkType t, value
break unless unionErrors.length
return unionErrors if unionErrors.length
else if typeof type is 'object'
priorErrors = errors
errors = []
checkProp value, type, path
theseErrors = errors
errors = priorErrors
return theseErrors
[]
if value is undefined
if (not schema.optional or typeof schema.optional is 'object' and not schema.optional[options.flavor]) and not schema.get and not schema.default?
addError 'required'
else
errors = errors.concat checkType schema.type, value
if schema.disallow and not checkType(schema.disallow, value).length
addError 'disallowed'
if value isnt null
if _.isArray value
if schema.items
itemsIsArray = _.isArray schema.items
propDef = schema.items
for v, i in value
if itemsIsArray
propDef = schema.items[i]
if options.coerce and propDef.type
value[i] = coerce v, propDef.type
errors.concat checkProp v, propDef, path, i
if schema.minItems and value.length < schema.minItems
addError 'minItems'
if schema.maxItems and value.length > schema.maxItems
addError 'maxItems'
else if schema.properties or schema.additionalProperties
errors.concat checkObj value, schema.properties, path, schema.additionalProperties
if _.isString value
if schema.pattern and not value.match schema.pattern
addError 'pattern'
if schema.maxLength and value.length > schema.maxLength
addError 'maxLength'
if schema.minLength and value.length < schema.minLength
addError 'minLength'
if schema.minimum isnt undefined and typeof value is typeof schema.minimum and schema.minimum > value
addError 'minimum'
if schema.maximum isnt undefined and typeof value is typeof schema.maximum and schema.maximum < value
addError 'maximum'
if schema.enum
enumeration = schema.enum
# if function specified, distinguish between async and sync flavors
if _.isFunction enumeration
# async validator
if enumeration.length is 2
asyncs.push value: value, path: path, fetch: enumeration
# sync validator
else if enumeration.length is 1
addError 'enum' unless enumeration.call(self, value)
# sync getter
else
enumeration = enumeration.call self
addError 'enum' unless _.include enumeration, value
else
# simple array
addError 'enum' unless _.include enumeration, value
if _.isNumber(schema.maxDecimal) and (new RegExp("\\.[0-9]{#{(schema.maxDecimal+1)},}")).test value
addError 'digits'
null
# validate an object against a schema
checkObj = (instance, objTypeDef = {}, path, additionalProp) ->
if _.isObject objTypeDef
if typeof instance isnt 'object' or _.isArray instance
errors.push property: path, message: 'type'
for own i, propDef of objTypeDef
value = instance[i]
# set the value unconditionally if 'value' attribute specified, if 'add' and 'update' flavors
if 'value' of propDef and options.flavor in ['add', 'update']
value = instance[i] = propDef.value
# skip _not_ specified properties
continue if value is undefined and options.existingOnly
# veto props
if options.veto and (propDef.veto is true or typeof propDef.veto is 'object' and propDef.veto[options.flavor])
delete instance[i]
continue
# done with validation if it is called for 'get' or 'query' and no coercion needed
continue if options.flavor in ['query', 'get'] and not options.coerce
# set default if validation called for 'add'
if value is undefined and propDef.default? and options.flavor is 'add'
value = instance[i] = propDef.default
# throw undefined properties, unless 'add' flavor
if value is undefined and options.flavor isnt 'add'
delete instance[i]
continue
# coerce if coercion is enabled and value is not undefined
if options.coerce and propDef.type and i of instance and value isnt undefined
value = coerce value, propDef.type
instance[i] = value
# remove undefined properties if they are optional
if value is undefined and propDef.optional
delete instance[i]
continue
#
checkProp value, propDef, path, i
for i, value of instance
if i of instance and not objTypeDef[i] and (additionalProp is false or options.removeAdditionalProps)
if options.removeAdditionalProps
delete instance[i]
continue
else
errors.push property: path, message: 'unspecifed'
requires = objTypeDef[i]?.requires
if requires and not requires of instance
errors.push property: path, message: 'requires'
# N.B. additional properties are validated only if schema is specified in additionalProperties
# otherwise they just go intact
if additionalProp?.type and not objTypeDef[i]
# coerce if coercion is enabled
if options.coerce and additionalProp.type
value = coerce value, additionalProp.type
instance[i] = value
checkProp value, additionalProp, path, i
if not _changing and value?.$schema
errors = errors.concat checkProp value, value.$schema, path, i
errors
if schema
checkProp instance, schema, '', _changing or ''
if not _changing and instance?.$schema
checkProp instance, instance.$schema, '', ''
# TODO: extend async validators to query the property values?
# run async validators, if any
len = asyncs.length
if callback and len
for async, i in asyncs
do (async) ->
async.fetch.call self, async.value, (err) ->
if err
errors.push property: async.path, message: 'enum'
len -= 1
# proceed when async validators are done
unless len
callback errors.length and errors or null, instance
else if callback
callback errors.length and errors or null, instance
else
return errors.length and errors or null
return
#
# expose
#
_.mixin
coerce: coerce
validate: validate
| true | 'use strict'
###
JSONSchema Validator - Validates JavaScript objects using JSON Schemas
(http://www.json.com/json-schema-proposal/)
Copyright (c) 2007 PI:NAME:<NAME>END_PI SitePen (www.sitepen.com)
Copyright (c) 2011 PI:NAME:<NAME>END_PI PI:EMAIL:<EMAIL>END_PI
Licensed under the MIT (MIT-LICENSE.txt) license
###
###
*
* Copyright(c) 2011 PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI>
* MIT Licensed
*
###
###
Rewrite of kriszyp's json-schema validator https://github.com/kriszyp/json-schema
Relies on documentcloud/underscore to normalize JS
###
#
# we allow property definition to contain `veto` attribute to control whether to retain the property after validation
# if it's === true -- the property will be deleted
# if it is a hash, it specifies the flavors of validation ('add', 'update', 'get', 'query') when the property is deleted
#
# E.g. veto: {get: true} means when validation is called with truthy options.veto and options.flavor === 'get', the property will be deleted
#
#
# given `value`, try to coerce it to `type`
#
# FIXME: we should skip conversion if type is matched?
#
coerce = (value, type) ->
if type is 'string'
value = if value? then String(value) else ''
else if type in ['number', 'integer']
unless _.isNaN value
value = Number value
value = Math.floor value if type is 'integer'
else if type is 'boolean'
value = if value is 'false' then false else not not value
else if type is 'null'
value = null
else if type is 'object'
# can't really think of any sensible coercion to an object
if JSON?.parse
try
value = JSON.parse value
catch err
else if type is 'array'
value = _.ensureArray value
else if type is 'date'
date = _.parseDate value
value = date if _.isDate date
value
#
# N.B. since we allow "enum" attribute to be async, the whole validator is treated as async if callback is specified
#
# we allow type coercion if options.coerce
#
#
# N.B. properties are by default required, use `optional: true` to override
#
#
# N.B. we introduce `value` attribute which fixes the value of the property
#
#
# TODO: introduce rename attribute -- id ---!get---> _id ---get---> id
#
validate = (instance, schema, options = {}, callback) ->
# save the context
self = @
# FIXME: what it is?
_changing = options.changing
# pending validators
asyncs = []
# collected errors
errors = []
# validate a value against a property definition
checkProp = (value, schema, path, i) ->
if path
if _.isNumber i
path += '[' + i + ']'
else if i is undefined
path += ''
else
path += '.' + i
else
path += i
addError = (message) ->
errors.push property: path, message: message
if (typeof schema isnt 'object' or _.isArray schema) and (path or typeof schema isnt 'function') and not schema?.type
if _.isFunction schema
addError 'type' unless value instanceof schema
else if schema
addError 'invalid'
return null
if _changing and schema.readonly
addError 'readonly'
if schema.extends # if it extends another schema, it must pass that schema as well
checkProp value, schema.extends, path, i
# validate a value against a type definition
checkType = (type, value) ->
if type
# TODO: coffee-ize, underscore-ize
if typeof type is 'string' and type isnt 'any' and
`(type == 'null' ? value !== null : typeof value !== type) &&
!(type === 'array' && _.isArray(value)) &&
!(type === 'date' && _.isDate(value)) &&
!(type === 'integer' && value%1===0)`
return [property: path, message: 'type']
if _.isArray type
# a union type
unionErrors = []
for t in type
unionErrors = checkType t, value
break unless unionErrors.length
return unionErrors if unionErrors.length
else if typeof type is 'object'
priorErrors = errors
errors = []
checkProp value, type, path
theseErrors = errors
errors = priorErrors
return theseErrors
[]
if value is undefined
if (not schema.optional or typeof schema.optional is 'object' and not schema.optional[options.flavor]) and not schema.get and not schema.default?
addError 'required'
else
errors = errors.concat checkType schema.type, value
if schema.disallow and not checkType(schema.disallow, value).length
addError 'disallowed'
if value isnt null
if _.isArray value
if schema.items
itemsIsArray = _.isArray schema.items
propDef = schema.items
for v, i in value
if itemsIsArray
propDef = schema.items[i]
if options.coerce and propDef.type
value[i] = coerce v, propDef.type
errors.concat checkProp v, propDef, path, i
if schema.minItems and value.length < schema.minItems
addError 'minItems'
if schema.maxItems and value.length > schema.maxItems
addError 'maxItems'
else if schema.properties or schema.additionalProperties
errors.concat checkObj value, schema.properties, path, schema.additionalProperties
if _.isString value
if schema.pattern and not value.match schema.pattern
addError 'pattern'
if schema.maxLength and value.length > schema.maxLength
addError 'maxLength'
if schema.minLength and value.length < schema.minLength
addError 'minLength'
if schema.minimum isnt undefined and typeof value is typeof schema.minimum and schema.minimum > value
addError 'minimum'
if schema.maximum isnt undefined and typeof value is typeof schema.maximum and schema.maximum < value
addError 'maximum'
if schema.enum
enumeration = schema.enum
# if function specified, distinguish between async and sync flavors
if _.isFunction enumeration
# async validator
if enumeration.length is 2
asyncs.push value: value, path: path, fetch: enumeration
# sync validator
else if enumeration.length is 1
addError 'enum' unless enumeration.call(self, value)
# sync getter
else
enumeration = enumeration.call self
addError 'enum' unless _.include enumeration, value
else
# simple array
addError 'enum' unless _.include enumeration, value
if _.isNumber(schema.maxDecimal) and (new RegExp("\\.[0-9]{#{(schema.maxDecimal+1)},}")).test value
addError 'digits'
null
# validate an object against a schema
checkObj = (instance, objTypeDef = {}, path, additionalProp) ->
if _.isObject objTypeDef
if typeof instance isnt 'object' or _.isArray instance
errors.push property: path, message: 'type'
for own i, propDef of objTypeDef
value = instance[i]
# set the value unconditionally if 'value' attribute specified, if 'add' and 'update' flavors
if 'value' of propDef and options.flavor in ['add', 'update']
value = instance[i] = propDef.value
# skip _not_ specified properties
continue if value is undefined and options.existingOnly
# veto props
if options.veto and (propDef.veto is true or typeof propDef.veto is 'object' and propDef.veto[options.flavor])
delete instance[i]
continue
# done with validation if it is called for 'get' or 'query' and no coercion needed
continue if options.flavor in ['query', 'get'] and not options.coerce
# set default if validation called for 'add'
if value is undefined and propDef.default? and options.flavor is 'add'
value = instance[i] = propDef.default
# throw undefined properties, unless 'add' flavor
if value is undefined and options.flavor isnt 'add'
delete instance[i]
continue
# coerce if coercion is enabled and value is not undefined
if options.coerce and propDef.type and i of instance and value isnt undefined
value = coerce value, propDef.type
instance[i] = value
# remove undefined properties if they are optional
if value is undefined and propDef.optional
delete instance[i]
continue
#
checkProp value, propDef, path, i
for i, value of instance
if i of instance and not objTypeDef[i] and (additionalProp is false or options.removeAdditionalProps)
if options.removeAdditionalProps
delete instance[i]
continue
else
errors.push property: path, message: 'unspecifed'
requires = objTypeDef[i]?.requires
if requires and not requires of instance
errors.push property: path, message: 'requires'
# N.B. additional properties are validated only if schema is specified in additionalProperties
# otherwise they just go intact
if additionalProp?.type and not objTypeDef[i]
# coerce if coercion is enabled
if options.coerce and additionalProp.type
value = coerce value, additionalProp.type
instance[i] = value
checkProp value, additionalProp, path, i
if not _changing and value?.$schema
errors = errors.concat checkProp value, value.$schema, path, i
errors
if schema
checkProp instance, schema, '', _changing or ''
if not _changing and instance?.$schema
checkProp instance, instance.$schema, '', ''
# TODO: extend async validators to query the property values?
# run async validators, if any
len = asyncs.length
if callback and len
for async, i in asyncs
do (async) ->
async.fetch.call self, async.value, (err) ->
if err
errors.push property: async.path, message: 'enum'
len -= 1
# proceed when async validators are done
unless len
callback errors.length and errors or null, instance
else if callback
callback errors.length and errors or null, instance
else
return errors.length and errors or null
return
#
# expose
#
_.mixin
coerce: coerce
validate: validate
|
[
{
"context": "resolve, reject) =>\n GoogleMapsLoader.KEY = 'AIzaSyCgazv4sqh1i3TX66s9cZ9xC0SJt-jXoj0'\n GoogleMapsLoader.LANGUAGE = 'ja'\n Goo",
"end": 307,
"score": 0.9997138381004333,
"start": 268,
"tag": "KEY",
"value": "AIzaSyCgazv4sqh1i3TX66s9cZ9xC0SJt-jXoj0"
}
] | src/map/view/MapView.coffee | mm-git/googleMapsSample | 0 | GoogleMapsLoader = require('google-maps')
Backbone = require('backbone')
class MapView extends Backbone.View
el: "#map"
initialize: (options) ->
@google = null
@map = null
loadMap: ->
new Promise((resolve, reject) =>
GoogleMapsLoader.KEY = 'AIzaSyCgazv4sqh1i3TX66s9cZ9xC0SJt-jXoj0'
GoogleMapsLoader.LANGUAGE = 'ja'
GoogleMapsLoader.LIBRARIES = ['geometry', 'places']
GoogleMapsLoader.load((google) =>
mapOpt =
center : new google.maps.LatLng(35.1660, 136.0135)
zoom : 12
scrollwheel : true
disableDoubleClickZoom : true
keyboardShortcuts : true
mapTypeId : google.maps.MapTypeId.ROADMAP
mapTypeControl : true
mapTypeControlOptions :
position : google.maps.ControlPosition.TOP_LEFT
streetViewControl : false
scaleControl : true
scaleControlOptions:
position : google.maps.ControlPosition.BOTTOM_RIGHT
zoomControl : true
zoomControlOptions:
position : google.maps.ControlPosition.TOP_LEFT
@map = new google.maps.Map(@$el[0], mapOpt)
@google = google
OverlayView = require('./overlayView.coffee')
@overlayView = new OverlayView(@map)
resolve(@)
)
)
module.exports = MapView | 5099 | GoogleMapsLoader = require('google-maps')
Backbone = require('backbone')
class MapView extends Backbone.View
el: "#map"
initialize: (options) ->
@google = null
@map = null
loadMap: ->
new Promise((resolve, reject) =>
GoogleMapsLoader.KEY = '<KEY>'
GoogleMapsLoader.LANGUAGE = 'ja'
GoogleMapsLoader.LIBRARIES = ['geometry', 'places']
GoogleMapsLoader.load((google) =>
mapOpt =
center : new google.maps.LatLng(35.1660, 136.0135)
zoom : 12
scrollwheel : true
disableDoubleClickZoom : true
keyboardShortcuts : true
mapTypeId : google.maps.MapTypeId.ROADMAP
mapTypeControl : true
mapTypeControlOptions :
position : google.maps.ControlPosition.TOP_LEFT
streetViewControl : false
scaleControl : true
scaleControlOptions:
position : google.maps.ControlPosition.BOTTOM_RIGHT
zoomControl : true
zoomControlOptions:
position : google.maps.ControlPosition.TOP_LEFT
@map = new google.maps.Map(@$el[0], mapOpt)
@google = google
OverlayView = require('./overlayView.coffee')
@overlayView = new OverlayView(@map)
resolve(@)
)
)
module.exports = MapView | true | GoogleMapsLoader = require('google-maps')
Backbone = require('backbone')
class MapView extends Backbone.View
el: "#map"
initialize: (options) ->
@google = null
@map = null
loadMap: ->
new Promise((resolve, reject) =>
GoogleMapsLoader.KEY = 'PI:KEY:<KEY>END_PI'
GoogleMapsLoader.LANGUAGE = 'ja'
GoogleMapsLoader.LIBRARIES = ['geometry', 'places']
GoogleMapsLoader.load((google) =>
mapOpt =
center : new google.maps.LatLng(35.1660, 136.0135)
zoom : 12
scrollwheel : true
disableDoubleClickZoom : true
keyboardShortcuts : true
mapTypeId : google.maps.MapTypeId.ROADMAP
mapTypeControl : true
mapTypeControlOptions :
position : google.maps.ControlPosition.TOP_LEFT
streetViewControl : false
scaleControl : true
scaleControlOptions:
position : google.maps.ControlPosition.BOTTOM_RIGHT
zoomControl : true
zoomControlOptions:
position : google.maps.ControlPosition.TOP_LEFT
@map = new google.maps.Map(@$el[0], mapOpt)
@google = google
OverlayView = require('./overlayView.coffee')
@overlayView = new OverlayView(@map)
resolve(@)
)
)
module.exports = MapView |
[
{
"context": "Push_gcm_project_number'\n\n\t\t\tapn =\n\t\t\t\tpassphrase: RocketChat.settings.get 'Push_apn_passphrase'\n\t\t\t\tkeyData: RocketChat.",
"end": 564,
"score": 0.658521294593811,
"start": 545,
"tag": "PASSWORD",
"value": "RocketChat.settings"
}
] | server/lib/cordova.coffee | ImpressiveSetOfIntelligentStudents/chat | 0 | Meteor.methods
log: ->
console.log.apply console, arguments
Meteor.startup ->
Push.debug = RocketChat.settings.get 'Push_debug'
if RocketChat.settings.get('Push_enable') is true
Push.allow
send: (userId, notification) ->
return RocketChat.authz.hasRole(userId, 'admin')
apn = undefined
gcm = undefined
if RocketChat.settings.get('Push_enable_gateway') is false
gcm =
apiKey: RocketChat.settings.get 'Push_gcm_api_key'
projectNumber: RocketChat.settings.get 'Push_gcm_project_number'
apn =
passphrase: RocketChat.settings.get 'Push_apn_passphrase'
keyData: RocketChat.settings.get 'Push_apn_key'
certData: RocketChat.settings.get 'Push_apn_cert'
if RocketChat.settings.get('Push_production') isnt true
apn =
passphrase: RocketChat.settings.get 'Push_apn_dev_passphrase'
keyData: RocketChat.settings.get 'Push_apn_dev_key'
certData: RocketChat.settings.get 'Push_apn_dev_cert'
gateway: 'gateway.sandbox.push.apple.com'
Push.Configure
apn: apn
gcm: gcm
production: RocketChat.settings.get 'Push_production'
sendInterval: 1000
sendBatchSize: 10
if RocketChat.settings.get('Push_enable_gateway') is true
pushGetway = undefined
Push.serverSend = (options) ->
options = options or { badge: 0 }
query = undefined
if options.from isnt ''+options.from
throw new Error('Push.send: option "from" not a string')
if options.title isnt ''+options.title
throw new Error('Push.send: option "title" not a string')
if options.text isnt ''+options.text
throw new Error('Push.send: option "text" not a string')
if Push.debug
console.log('Push: Send message "' + options.title + '" via query', options.query)
query =
$and: [
options.query
{
$or: [
{ 'token.apn': { $exists: true } }
{ 'token.gcm': { $exists: true } }
]
}
]
Push.appCollection.find(query).forEach (app) ->
if Push.debug
console.log('send to token', app.token)
if app.token.apn?
pushGetway.call 'sendPushNotification', 'apn', app.token.apn, options
else if app.token.gcm?
pushGetway.call 'sendPushNotification', 'gcm', app.token.gcm, options
pushGetway = DDP.connect(RocketChat.settings.get('Push_gateway'), {_dontPrintErrors: false})
Push.enabled = true
| 148331 | Meteor.methods
log: ->
console.log.apply console, arguments
Meteor.startup ->
Push.debug = RocketChat.settings.get 'Push_debug'
if RocketChat.settings.get('Push_enable') is true
Push.allow
send: (userId, notification) ->
return RocketChat.authz.hasRole(userId, 'admin')
apn = undefined
gcm = undefined
if RocketChat.settings.get('Push_enable_gateway') is false
gcm =
apiKey: RocketChat.settings.get 'Push_gcm_api_key'
projectNumber: RocketChat.settings.get 'Push_gcm_project_number'
apn =
passphrase: <PASSWORD>.get 'Push_apn_passphrase'
keyData: RocketChat.settings.get 'Push_apn_key'
certData: RocketChat.settings.get 'Push_apn_cert'
if RocketChat.settings.get('Push_production') isnt true
apn =
passphrase: RocketChat.settings.get 'Push_apn_dev_passphrase'
keyData: RocketChat.settings.get 'Push_apn_dev_key'
certData: RocketChat.settings.get 'Push_apn_dev_cert'
gateway: 'gateway.sandbox.push.apple.com'
Push.Configure
apn: apn
gcm: gcm
production: RocketChat.settings.get 'Push_production'
sendInterval: 1000
sendBatchSize: 10
if RocketChat.settings.get('Push_enable_gateway') is true
pushGetway = undefined
Push.serverSend = (options) ->
options = options or { badge: 0 }
query = undefined
if options.from isnt ''+options.from
throw new Error('Push.send: option "from" not a string')
if options.title isnt ''+options.title
throw new Error('Push.send: option "title" not a string')
if options.text isnt ''+options.text
throw new Error('Push.send: option "text" not a string')
if Push.debug
console.log('Push: Send message "' + options.title + '" via query', options.query)
query =
$and: [
options.query
{
$or: [
{ 'token.apn': { $exists: true } }
{ 'token.gcm': { $exists: true } }
]
}
]
Push.appCollection.find(query).forEach (app) ->
if Push.debug
console.log('send to token', app.token)
if app.token.apn?
pushGetway.call 'sendPushNotification', 'apn', app.token.apn, options
else if app.token.gcm?
pushGetway.call 'sendPushNotification', 'gcm', app.token.gcm, options
pushGetway = DDP.connect(RocketChat.settings.get('Push_gateway'), {_dontPrintErrors: false})
Push.enabled = true
| true | Meteor.methods
log: ->
console.log.apply console, arguments
Meteor.startup ->
Push.debug = RocketChat.settings.get 'Push_debug'
if RocketChat.settings.get('Push_enable') is true
Push.allow
send: (userId, notification) ->
return RocketChat.authz.hasRole(userId, 'admin')
apn = undefined
gcm = undefined
if RocketChat.settings.get('Push_enable_gateway') is false
gcm =
apiKey: RocketChat.settings.get 'Push_gcm_api_key'
projectNumber: RocketChat.settings.get 'Push_gcm_project_number'
apn =
passphrase: PI:PASSWORD:<PASSWORD>END_PI.get 'Push_apn_passphrase'
keyData: RocketChat.settings.get 'Push_apn_key'
certData: RocketChat.settings.get 'Push_apn_cert'
if RocketChat.settings.get('Push_production') isnt true
apn =
passphrase: RocketChat.settings.get 'Push_apn_dev_passphrase'
keyData: RocketChat.settings.get 'Push_apn_dev_key'
certData: RocketChat.settings.get 'Push_apn_dev_cert'
gateway: 'gateway.sandbox.push.apple.com'
Push.Configure
apn: apn
gcm: gcm
production: RocketChat.settings.get 'Push_production'
sendInterval: 1000
sendBatchSize: 10
if RocketChat.settings.get('Push_enable_gateway') is true
pushGetway = undefined
Push.serverSend = (options) ->
options = options or { badge: 0 }
query = undefined
if options.from isnt ''+options.from
throw new Error('Push.send: option "from" not a string')
if options.title isnt ''+options.title
throw new Error('Push.send: option "title" not a string')
if options.text isnt ''+options.text
throw new Error('Push.send: option "text" not a string')
if Push.debug
console.log('Push: Send message "' + options.title + '" via query', options.query)
query =
$and: [
options.query
{
$or: [
{ 'token.apn': { $exists: true } }
{ 'token.gcm': { $exists: true } }
]
}
]
Push.appCollection.find(query).forEach (app) ->
if Push.debug
console.log('send to token', app.token)
if app.token.apn?
pushGetway.call 'sendPushNotification', 'apn', app.token.apn, options
else if app.token.gcm?
pushGetway.call 'sendPushNotification', 'gcm', app.token.gcm, options
pushGetway = DDP.connect(RocketChat.settings.get('Push_gateway'), {_dontPrintErrors: false})
Push.enabled = true
|
[
{
"context": ">\n\n\t\t@settings = {}\n\t\t@user =\n\t\t\t_id: @user_id = \"kwjewkl\"\n\t\t\tfeatures:{}\n\t\t\temail: \"joe@example.com\"\n\n\t\t@U",
"end": 383,
"score": 0.9870140552520752,
"start": 376,
"tag": "USERNAME",
"value": "kwjewkl"
},
{
"context": "d: @user_id = \"kwjewkl\"\n\t\t\tfeatures:{}\n\t\t\temail: \"joe@example.com\"\n\n\t\t@UserLocator =\n\t\t\tfindById: sinon.stub().call",
"end": 426,
"score": 0.9999168515205383,
"start": 411,
"tag": "EMAIL",
"value": "joe@example.com"
},
{
"context": "oller =\n\t\t\tgetLoggedInUserId: sinon.stub().returns(@user._id)\n\t\t\tgetSessionUser: sinon.stub().returns(@use",
"end": 881,
"score": 0.9984549283981323,
"start": 876,
"tag": "USERNAME",
"value": "@user"
},
{
"context": "@user._id)\n\t\t\tgetSessionUser: sinon.stub().returns(@user)\n\t\t\t_getRedirectFromSession: sinon.stub()\n\t\t\t_set",
"end": 932,
"score": 0.9726835489273071,
"start": 927,
"tag": "USERNAME",
"value": "@user"
},
{
"context": "troller\n\t\t@req =\n\t\t\tquery:{}\n\t\t\tsession:\n\t\t\t\t\tuser:@user\n\t\t@res = {}\n\n\n\tdescribe \"registerPage\", ->\n\n\t\tit ",
"end": 1500,
"score": 0.9527231454849243,
"start": 1495,
"tag": "USERNAME",
"value": "@user"
},
{
"context": "render = (page, opts)=>\n\t\t\t\topts.user.should.equal @user\n\t\t\t\tdone()\n\t\t\t@UserPagesController.settingsPa",
"end": 4540,
"score": 0.8646827340126038,
"start": 4540,
"tag": "USERNAME",
"value": ""
},
{
"context": "ser_id = @user_id\n\t\t\t@req.query.token = @token = \"mock-token-123\"\n\n\t\tit \"should 404 without a user_id\", (done) ->\n",
"end": 5733,
"score": 0.9952020645141602,
"start": 5719,
"tag": "PASSWORD",
"value": "mock-token-123"
}
] | test/unit/coffee/User/UserPagesControllerTests.coffee | dtu-compute/web-sharelatex | 0 | should = require('chai').should()
SandboxedModule = require('sandboxed-module')
assert = require('assert')
path = require('path')
sinon = require('sinon')
modulePath = path.join __dirname, "../../../../app/js/Features/User/UserPagesController"
expect = require("chai").expect
describe "UserPagesController", ->
beforeEach ->
@settings = {}
@user =
_id: @user_id = "kwjewkl"
features:{}
email: "joe@example.com"
@UserLocator =
findById: sinon.stub().callsArgWith(1, null, @user)
@UserGetter =
getUser: sinon.stub().callsArgWith(2, null, @user)
@UserSessionsManager =
getAllUserSessions: sinon.stub()
@dropboxStatus = {}
@DropboxHandler =
getUserRegistrationStatus : sinon.stub().callsArgWith(1, null, @dropboxStatus)
@ErrorController =
notFound: sinon.stub()
@AuthenticationController =
getLoggedInUserId: sinon.stub().returns(@user._id)
getSessionUser: sinon.stub().returns(@user)
_getRedirectFromSession: sinon.stub()
_setRedirectInSession: sinon.stub()
@UserPagesController = SandboxedModule.require modulePath, requires:
"settings-sharelatex": @settings
"logger-sharelatex":
log:->
err:->
"./UserLocator": @UserLocator
"./UserGetter": @UserGetter
"./UserSessionsManager": @UserSessionsManager
"../Errors/ErrorController": @ErrorController
'../Dropbox/DropboxHandler': @DropboxHandler
'../Authentication/AuthenticationController': @AuthenticationController
@req =
query:{}
session:
user:@user
@res = {}
describe "registerPage", ->
it "should render the register page", (done)->
@res.render = (page)=>
page.should.equal "user/register"
done()
@UserPagesController.registerPage @req, @res
it "should set sharedProjectData", (done)->
@req.query.project_name = "myProject"
@req.query.user_first_name = "user_first_name_here"
@res.render = (page, opts)=>
opts.sharedProjectData.project_name.should.equal "myProject"
opts.sharedProjectData.user_first_name.should.equal "user_first_name_here"
done()
@UserPagesController.registerPage @req, @res
it "should set newTemplateData", (done)->
@req.session.templateData =
templateName : "templateName"
@res.render = (page, opts)=>
opts.newTemplateData.templateName.should.equal "templateName"
done()
@UserPagesController.registerPage @req, @res
it "should not set the newTemplateData if there is nothing in the session", (done)->
@res.render = (page, opts)=>
assert.equal opts.newTemplateData.templateName, undefined
done()
@UserPagesController.registerPage @req, @res
describe "loginForm", ->
it "should render the login page", (done)->
@res.render = (page)=>
page.should.equal "user/login"
done()
@UserPagesController.loginPage @req, @res
describe 'when an explicit redirect is set via query string', ->
beforeEach ->
@AuthenticationController._getRedirectFromSession = sinon.stub().returns(null)
@AuthenticationController._setRedirectInSession = sinon.stub()
@req.query.redir = '/somewhere/in/particular'
it 'should set a redirect', (done) ->
@res.render = (page) =>
@AuthenticationController._setRedirectInSession.callCount.should.equal 1
expect(@AuthenticationController._setRedirectInSession.lastCall.args[1]).to.equal @req.query.redir
done()
@UserPagesController.loginPage @req, @res
describe 'sessionsPage', ->
beforeEach ->
@UserSessionsManager.getAllUserSessions.callsArgWith(2, null, [])
it 'should render user/sessions', (done) ->
@res.render = (page)->
page.should.equal "user/sessions"
done()
@UserPagesController.sessionsPage @req, @res
it 'should have called getAllUserSessions', (done) ->
@res.render = (page) =>
@UserSessionsManager.getAllUserSessions.callCount.should.equal 1
done()
@UserPagesController.sessionsPage @req, @res
describe 'when getAllUserSessions produces an error', ->
beforeEach ->
@UserSessionsManager.getAllUserSessions.callsArgWith(2, new Error('woops'))
it 'should call next with an error', (done) ->
@next = (err) =>
assert(err != null)
assert(err instanceof Error)
done()
@UserPagesController.sessionsPage @req, @res, @next
describe "settingsPage", ->
it "should render user/settings", (done)->
@res.render = (page)->
page.should.equal "user/settings"
done()
@UserPagesController.settingsPage @req, @res
it "should send user", (done)->
@res.render = (page, opts)=>
opts.user.should.equal @user
done()
@UserPagesController.settingsPage @req, @res
it "should set 'shouldAllowEditingDetails' to true", (done)->
@res.render = (page, opts)=>
opts.shouldAllowEditingDetails.should.equal true
done()
@UserPagesController.settingsPage @req, @res
describe 'when ldap.updateUserDetailsOnLogin is true', ->
beforeEach ->
@settings.ldap = {updateUserDetailsOnLogin: true}
afterEach ->
delete @settings.ldap
it 'should set "shouldAllowEditingDetails" to false', (done) ->
@res.render = (page, opts)=>
opts.shouldAllowEditingDetails.should.equal false
done()
@UserPagesController.settingsPage @req, @res
describe 'when saml.updateUserDetailsOnLogin is true', ->
beforeEach ->
@settings.saml = {updateUserDetailsOnLogin: true}
afterEach ->
delete @settings.saml
it 'should set "shouldAllowEditingDetails" to false', (done) ->
@res.render = (page, opts)=>
opts.shouldAllowEditingDetails.should.equal false
done()
@UserPagesController.settingsPage @req, @res
describe "activateAccountPage", ->
beforeEach ->
@req.query.user_id = @user_id
@req.query.token = @token = "mock-token-123"
it "should 404 without a user_id", (done) ->
delete @req.query.user_id
@ErrorController.notFound = () ->
done()
@UserPagesController.activateAccountPage @req, @res
it "should 404 without a token", (done) ->
delete @req.query.token
@ErrorController.notFound = () ->
done()
@UserPagesController.activateAccountPage @req, @res
it "should 404 without a valid user_id", (done) ->
@UserGetter.getUser = sinon.stub().callsArgWith(2, null, null)
@ErrorController.notFound = () ->
done()
@UserPagesController.activateAccountPage @req, @res
it "should redirect activated users to login", (done) ->
@user.loginCount = 1
@res.redirect = (url) =>
@UserGetter.getUser.calledWith(@user_id).should.equal true
url.should.equal "/login?email=#{encodeURIComponent(@user.email)}"
done()
@UserPagesController.activateAccountPage @req, @res
it "render the activation page if the user has not logged in before", (done) ->
@user.loginCount = 0
@res.render = (page, opts) =>
page.should.equal "user/activate"
opts.email.should.equal @user.email
opts.token.should.equal @token
done()
@UserPagesController.activateAccountPage @req, @res
| 23638 | should = require('chai').should()
SandboxedModule = require('sandboxed-module')
assert = require('assert')
path = require('path')
sinon = require('sinon')
modulePath = path.join __dirname, "../../../../app/js/Features/User/UserPagesController"
expect = require("chai").expect
describe "UserPagesController", ->
beforeEach ->
@settings = {}
@user =
_id: @user_id = "kwjewkl"
features:{}
email: "<EMAIL>"
@UserLocator =
findById: sinon.stub().callsArgWith(1, null, @user)
@UserGetter =
getUser: sinon.stub().callsArgWith(2, null, @user)
@UserSessionsManager =
getAllUserSessions: sinon.stub()
@dropboxStatus = {}
@DropboxHandler =
getUserRegistrationStatus : sinon.stub().callsArgWith(1, null, @dropboxStatus)
@ErrorController =
notFound: sinon.stub()
@AuthenticationController =
getLoggedInUserId: sinon.stub().returns(@user._id)
getSessionUser: sinon.stub().returns(@user)
_getRedirectFromSession: sinon.stub()
_setRedirectInSession: sinon.stub()
@UserPagesController = SandboxedModule.require modulePath, requires:
"settings-sharelatex": @settings
"logger-sharelatex":
log:->
err:->
"./UserLocator": @UserLocator
"./UserGetter": @UserGetter
"./UserSessionsManager": @UserSessionsManager
"../Errors/ErrorController": @ErrorController
'../Dropbox/DropboxHandler': @DropboxHandler
'../Authentication/AuthenticationController': @AuthenticationController
@req =
query:{}
session:
user:@user
@res = {}
describe "registerPage", ->
it "should render the register page", (done)->
@res.render = (page)=>
page.should.equal "user/register"
done()
@UserPagesController.registerPage @req, @res
it "should set sharedProjectData", (done)->
@req.query.project_name = "myProject"
@req.query.user_first_name = "user_first_name_here"
@res.render = (page, opts)=>
opts.sharedProjectData.project_name.should.equal "myProject"
opts.sharedProjectData.user_first_name.should.equal "user_first_name_here"
done()
@UserPagesController.registerPage @req, @res
it "should set newTemplateData", (done)->
@req.session.templateData =
templateName : "templateName"
@res.render = (page, opts)=>
opts.newTemplateData.templateName.should.equal "templateName"
done()
@UserPagesController.registerPage @req, @res
it "should not set the newTemplateData if there is nothing in the session", (done)->
@res.render = (page, opts)=>
assert.equal opts.newTemplateData.templateName, undefined
done()
@UserPagesController.registerPage @req, @res
describe "loginForm", ->
it "should render the login page", (done)->
@res.render = (page)=>
page.should.equal "user/login"
done()
@UserPagesController.loginPage @req, @res
describe 'when an explicit redirect is set via query string', ->
beforeEach ->
@AuthenticationController._getRedirectFromSession = sinon.stub().returns(null)
@AuthenticationController._setRedirectInSession = sinon.stub()
@req.query.redir = '/somewhere/in/particular'
it 'should set a redirect', (done) ->
@res.render = (page) =>
@AuthenticationController._setRedirectInSession.callCount.should.equal 1
expect(@AuthenticationController._setRedirectInSession.lastCall.args[1]).to.equal @req.query.redir
done()
@UserPagesController.loginPage @req, @res
describe 'sessionsPage', ->
beforeEach ->
@UserSessionsManager.getAllUserSessions.callsArgWith(2, null, [])
it 'should render user/sessions', (done) ->
@res.render = (page)->
page.should.equal "user/sessions"
done()
@UserPagesController.sessionsPage @req, @res
it 'should have called getAllUserSessions', (done) ->
@res.render = (page) =>
@UserSessionsManager.getAllUserSessions.callCount.should.equal 1
done()
@UserPagesController.sessionsPage @req, @res
describe 'when getAllUserSessions produces an error', ->
beforeEach ->
@UserSessionsManager.getAllUserSessions.callsArgWith(2, new Error('woops'))
it 'should call next with an error', (done) ->
@next = (err) =>
assert(err != null)
assert(err instanceof Error)
done()
@UserPagesController.sessionsPage @req, @res, @next
describe "settingsPage", ->
it "should render user/settings", (done)->
@res.render = (page)->
page.should.equal "user/settings"
done()
@UserPagesController.settingsPage @req, @res
it "should send user", (done)->
@res.render = (page, opts)=>
opts.user.should.equal @user
done()
@UserPagesController.settingsPage @req, @res
it "should set 'shouldAllowEditingDetails' to true", (done)->
@res.render = (page, opts)=>
opts.shouldAllowEditingDetails.should.equal true
done()
@UserPagesController.settingsPage @req, @res
describe 'when ldap.updateUserDetailsOnLogin is true', ->
beforeEach ->
@settings.ldap = {updateUserDetailsOnLogin: true}
afterEach ->
delete @settings.ldap
it 'should set "shouldAllowEditingDetails" to false', (done) ->
@res.render = (page, opts)=>
opts.shouldAllowEditingDetails.should.equal false
done()
@UserPagesController.settingsPage @req, @res
describe 'when saml.updateUserDetailsOnLogin is true', ->
beforeEach ->
@settings.saml = {updateUserDetailsOnLogin: true}
afterEach ->
delete @settings.saml
it 'should set "shouldAllowEditingDetails" to false', (done) ->
@res.render = (page, opts)=>
opts.shouldAllowEditingDetails.should.equal false
done()
@UserPagesController.settingsPage @req, @res
describe "activateAccountPage", ->
beforeEach ->
@req.query.user_id = @user_id
@req.query.token = @token = "<PASSWORD>"
it "should 404 without a user_id", (done) ->
delete @req.query.user_id
@ErrorController.notFound = () ->
done()
@UserPagesController.activateAccountPage @req, @res
it "should 404 without a token", (done) ->
delete @req.query.token
@ErrorController.notFound = () ->
done()
@UserPagesController.activateAccountPage @req, @res
it "should 404 without a valid user_id", (done) ->
@UserGetter.getUser = sinon.stub().callsArgWith(2, null, null)
@ErrorController.notFound = () ->
done()
@UserPagesController.activateAccountPage @req, @res
it "should redirect activated users to login", (done) ->
@user.loginCount = 1
@res.redirect = (url) =>
@UserGetter.getUser.calledWith(@user_id).should.equal true
url.should.equal "/login?email=#{encodeURIComponent(@user.email)}"
done()
@UserPagesController.activateAccountPage @req, @res
it "render the activation page if the user has not logged in before", (done) ->
@user.loginCount = 0
@res.render = (page, opts) =>
page.should.equal "user/activate"
opts.email.should.equal @user.email
opts.token.should.equal @token
done()
@UserPagesController.activateAccountPage @req, @res
| true | should = require('chai').should()
SandboxedModule = require('sandboxed-module')
assert = require('assert')
path = require('path')
sinon = require('sinon')
modulePath = path.join __dirname, "../../../../app/js/Features/User/UserPagesController"
expect = require("chai").expect
describe "UserPagesController", ->
beforeEach ->
@settings = {}
@user =
_id: @user_id = "kwjewkl"
features:{}
email: "PI:EMAIL:<EMAIL>END_PI"
@UserLocator =
findById: sinon.stub().callsArgWith(1, null, @user)
@UserGetter =
getUser: sinon.stub().callsArgWith(2, null, @user)
@UserSessionsManager =
getAllUserSessions: sinon.stub()
@dropboxStatus = {}
@DropboxHandler =
getUserRegistrationStatus : sinon.stub().callsArgWith(1, null, @dropboxStatus)
@ErrorController =
notFound: sinon.stub()
@AuthenticationController =
getLoggedInUserId: sinon.stub().returns(@user._id)
getSessionUser: sinon.stub().returns(@user)
_getRedirectFromSession: sinon.stub()
_setRedirectInSession: sinon.stub()
@UserPagesController = SandboxedModule.require modulePath, requires:
"settings-sharelatex": @settings
"logger-sharelatex":
log:->
err:->
"./UserLocator": @UserLocator
"./UserGetter": @UserGetter
"./UserSessionsManager": @UserSessionsManager
"../Errors/ErrorController": @ErrorController
'../Dropbox/DropboxHandler': @DropboxHandler
'../Authentication/AuthenticationController': @AuthenticationController
@req =
query:{}
session:
user:@user
@res = {}
describe "registerPage", ->
it "should render the register page", (done)->
@res.render = (page)=>
page.should.equal "user/register"
done()
@UserPagesController.registerPage @req, @res
it "should set sharedProjectData", (done)->
@req.query.project_name = "myProject"
@req.query.user_first_name = "user_first_name_here"
@res.render = (page, opts)=>
opts.sharedProjectData.project_name.should.equal "myProject"
opts.sharedProjectData.user_first_name.should.equal "user_first_name_here"
done()
@UserPagesController.registerPage @req, @res
it "should set newTemplateData", (done)->
@req.session.templateData =
templateName : "templateName"
@res.render = (page, opts)=>
opts.newTemplateData.templateName.should.equal "templateName"
done()
@UserPagesController.registerPage @req, @res
it "should not set the newTemplateData if there is nothing in the session", (done)->
@res.render = (page, opts)=>
assert.equal opts.newTemplateData.templateName, undefined
done()
@UserPagesController.registerPage @req, @res
describe "loginForm", ->
it "should render the login page", (done)->
@res.render = (page)=>
page.should.equal "user/login"
done()
@UserPagesController.loginPage @req, @res
describe 'when an explicit redirect is set via query string', ->
beforeEach ->
@AuthenticationController._getRedirectFromSession = sinon.stub().returns(null)
@AuthenticationController._setRedirectInSession = sinon.stub()
@req.query.redir = '/somewhere/in/particular'
it 'should set a redirect', (done) ->
@res.render = (page) =>
@AuthenticationController._setRedirectInSession.callCount.should.equal 1
expect(@AuthenticationController._setRedirectInSession.lastCall.args[1]).to.equal @req.query.redir
done()
@UserPagesController.loginPage @req, @res
describe 'sessionsPage', ->
beforeEach ->
@UserSessionsManager.getAllUserSessions.callsArgWith(2, null, [])
it 'should render user/sessions', (done) ->
@res.render = (page)->
page.should.equal "user/sessions"
done()
@UserPagesController.sessionsPage @req, @res
it 'should have called getAllUserSessions', (done) ->
@res.render = (page) =>
@UserSessionsManager.getAllUserSessions.callCount.should.equal 1
done()
@UserPagesController.sessionsPage @req, @res
describe 'when getAllUserSessions produces an error', ->
beforeEach ->
@UserSessionsManager.getAllUserSessions.callsArgWith(2, new Error('woops'))
it 'should call next with an error', (done) ->
@next = (err) =>
assert(err != null)
assert(err instanceof Error)
done()
@UserPagesController.sessionsPage @req, @res, @next
describe "settingsPage", ->
it "should render user/settings", (done)->
@res.render = (page)->
page.should.equal "user/settings"
done()
@UserPagesController.settingsPage @req, @res
it "should send user", (done)->
@res.render = (page, opts)=>
opts.user.should.equal @user
done()
@UserPagesController.settingsPage @req, @res
it "should set 'shouldAllowEditingDetails' to true", (done)->
@res.render = (page, opts)=>
opts.shouldAllowEditingDetails.should.equal true
done()
@UserPagesController.settingsPage @req, @res
describe 'when ldap.updateUserDetailsOnLogin is true', ->
beforeEach ->
@settings.ldap = {updateUserDetailsOnLogin: true}
afterEach ->
delete @settings.ldap
it 'should set "shouldAllowEditingDetails" to false', (done) ->
@res.render = (page, opts)=>
opts.shouldAllowEditingDetails.should.equal false
done()
@UserPagesController.settingsPage @req, @res
describe 'when saml.updateUserDetailsOnLogin is true', ->
beforeEach ->
@settings.saml = {updateUserDetailsOnLogin: true}
afterEach ->
delete @settings.saml
it 'should set "shouldAllowEditingDetails" to false', (done) ->
@res.render = (page, opts)=>
opts.shouldAllowEditingDetails.should.equal false
done()
@UserPagesController.settingsPage @req, @res
describe "activateAccountPage", ->
beforeEach ->
@req.query.user_id = @user_id
@req.query.token = @token = "PI:PASSWORD:<PASSWORD>END_PI"
it "should 404 without a user_id", (done) ->
delete @req.query.user_id
@ErrorController.notFound = () ->
done()
@UserPagesController.activateAccountPage @req, @res
it "should 404 without a token", (done) ->
delete @req.query.token
@ErrorController.notFound = () ->
done()
@UserPagesController.activateAccountPage @req, @res
it "should 404 without a valid user_id", (done) ->
@UserGetter.getUser = sinon.stub().callsArgWith(2, null, null)
@ErrorController.notFound = () ->
done()
@UserPagesController.activateAccountPage @req, @res
it "should redirect activated users to login", (done) ->
@user.loginCount = 1
@res.redirect = (url) =>
@UserGetter.getUser.calledWith(@user_id).should.equal true
url.should.equal "/login?email=#{encodeURIComponent(@user.email)}"
done()
@UserPagesController.activateAccountPage @req, @res
it "render the activation page if the user has not logged in before", (done) ->
@user.loginCount = 0
@res.render = (page, opts) =>
page.should.equal "user/activate"
opts.email.should.equal @user.email
opts.token.should.equal @token
done()
@UserPagesController.activateAccountPage @req, @res
|
[
{
"context": "opleth Visualizer'\n\nconfig =\n author:\n name: 'Nerevu Group, LLC'\n handle: 'nerevu'\n url: '//www.",
"end": 73,
"score": 0.7508554458618164,
"start": 67,
"tag": "NAME",
"value": "Nerevu"
},
{
"context": "uthor:\n name: 'Nerevu Group, LLC'\n handle: 'nerevu'\n url: '//www.nerevu.com'\n email: 'reubano@",
"end": 105,
"score": 0.9994914531707764,
"start": 99,
"tag": "USERNAME",
"value": "nerevu"
},
{
"context": " 'nerevu'\n url: '//www.nerevu.com'\n email: 'reubano@gmail.com'\n\n site:\n title: site_name\n description: '",
"end": 164,
"score": 0.9999249577522278,
"start": 147,
"tag": "EMAIL",
"value": "reubano@gmail.com"
},
{
"context": "app'\n type: 'webapp'\n source: '//github.com/nerevu/akili'\n data: '//github.com/nerevu/akili/tree/",
"end": 393,
"score": 0.9989212155342102,
"start": 387,
"tag": "USERNAME",
"value": "nerevu"
},
{
"context": "/github.com/nerevu/akili'\n data: '//github.com/nerevu/akili/tree/master/app/data'\n keywords: \"\"\"\n ",
"end": 431,
"score": 0.9991647601127625,
"start": 425,
"tag": "USERNAME",
"value": "nerevu"
}
] | app/config.coffee | nerevu/akili | 7 | site_name = 'Choropleth Visualizer'
config =
author:
name: 'Nerevu Group, LLC'
handle: 'nerevu'
url: '//www.nerevu.com'
email: 'reubano@gmail.com'
site:
title: site_name
description: 'An HTML5 data viz application built with Brunch and Chaplin.'
url: '//showcase.akili.nerevu.com'
id: 'com.akili.vizapp'
type: 'webapp'
source: '//github.com/nerevu/akili'
data: '//github.com/nerevu/akili/tree/master/app/data'
keywords: """
brunch, chaplin, nodejs, backbonejs, bower, html5, single page app
"""
# Web pages
home:
title: site_name
default:
factor_attr: 'factor'
factor: 'healthcare'
factors: ['unemployment', 'healthcare']
level: 'county'
levels:
state: 'states'
county: 'counties'
id_attr: 'id'
name_attr: 'name'
metric_attr: 'rate'
google:
analytics:
id: $PROCESS_ENV_GOOGLE_ANALYTICS_TRACKING_ID ? null
site_number: 3
adwords_id: $PROCESS_ENV_GOOGLE_ADWORDS_ID ? null
displayads_id: $PROCESS_ENV_GOOGLE_DISPLAYADS_ID ? null
app_name: site_name
app_id: ''
plus_id: $PROCESS_ENV_GOOGLE_PLUS_ID ? null
facebook:
app_id: ''
module.exports = config
| 48816 | site_name = 'Choropleth Visualizer'
config =
author:
name: '<NAME> Group, LLC'
handle: 'nerevu'
url: '//www.nerevu.com'
email: '<EMAIL>'
site:
title: site_name
description: 'An HTML5 data viz application built with Brunch and Chaplin.'
url: '//showcase.akili.nerevu.com'
id: 'com.akili.vizapp'
type: 'webapp'
source: '//github.com/nerevu/akili'
data: '//github.com/nerevu/akili/tree/master/app/data'
keywords: """
brunch, chaplin, nodejs, backbonejs, bower, html5, single page app
"""
# Web pages
home:
title: site_name
default:
factor_attr: 'factor'
factor: 'healthcare'
factors: ['unemployment', 'healthcare']
level: 'county'
levels:
state: 'states'
county: 'counties'
id_attr: 'id'
name_attr: 'name'
metric_attr: 'rate'
google:
analytics:
id: $PROCESS_ENV_GOOGLE_ANALYTICS_TRACKING_ID ? null
site_number: 3
adwords_id: $PROCESS_ENV_GOOGLE_ADWORDS_ID ? null
displayads_id: $PROCESS_ENV_GOOGLE_DISPLAYADS_ID ? null
app_name: site_name
app_id: ''
plus_id: $PROCESS_ENV_GOOGLE_PLUS_ID ? null
facebook:
app_id: ''
module.exports = config
| true | site_name = 'Choropleth Visualizer'
config =
author:
name: 'PI:NAME:<NAME>END_PI Group, LLC'
handle: 'nerevu'
url: '//www.nerevu.com'
email: 'PI:EMAIL:<EMAIL>END_PI'
site:
title: site_name
description: 'An HTML5 data viz application built with Brunch and Chaplin.'
url: '//showcase.akili.nerevu.com'
id: 'com.akili.vizapp'
type: 'webapp'
source: '//github.com/nerevu/akili'
data: '//github.com/nerevu/akili/tree/master/app/data'
keywords: """
brunch, chaplin, nodejs, backbonejs, bower, html5, single page app
"""
# Web pages
home:
title: site_name
default:
factor_attr: 'factor'
factor: 'healthcare'
factors: ['unemployment', 'healthcare']
level: 'county'
levels:
state: 'states'
county: 'counties'
id_attr: 'id'
name_attr: 'name'
metric_attr: 'rate'
google:
analytics:
id: $PROCESS_ENV_GOOGLE_ANALYTICS_TRACKING_ID ? null
site_number: 3
adwords_id: $PROCESS_ENV_GOOGLE_ADWORDS_ID ? null
displayads_id: $PROCESS_ENV_GOOGLE_DISPLAYADS_ID ? null
app_name: site_name
app_id: ''
plus_id: $PROCESS_ENV_GOOGLE_PLUS_ID ? null
facebook:
app_id: ''
module.exports = config
|
[
{
"context": " before (done) ->\n users[0] = new User(name: \"Alice\")\n users[0].save((err, user) ->\n done()\n ",
"end": 395,
"score": 0.9993399977684021,
"start": 390,
"tag": "NAME",
"value": "Alice"
},
{
"context": " before (done) ->\n users[1] = new User(name: \"Bob\")\n users[1].save((err, user) ->\n done()\n ",
"end": 504,
"score": 0.9993205070495605,
"start": 501,
"tag": "NAME",
"value": "Bob"
}
] | test/statics.start.coffee | jbmusso/mongoose-talkable | 4 | process.env.NODE_ENV = "test"
should = require("should")
Conversations = require("../lib/models/conversation")
Users = User = require("../example/models/user")
describe "Conversation.statics.findOrCreate()", ->
users = []
before (done) ->
Conversations.remove({}, -> done())
before (done) ->
Users.remove({}, -> done())
before (done) ->
users[0] = new User(name: "Alice")
users[0].save((err, user) ->
done()
)
before (done) ->
users[1] = new User(name: "Bob")
users[1].save((err, user) ->
done()
)
storedConversation = null
describe "when creating a conversation between 2 users", ->
it "should create a conversation with no message", (done) ->
Conversations.findOrCreate users[0], users, (err, conversation) ->
storedConversation = conversation
should.not.exist(err)
should.exist(conversation)
storedConversation.status.should.equal("requested")
conversation.participants.ids.should.have.lengthOf(users.length)
conversation.participants.names.should.have.lengthOf(users.length)
for user in users
conversation.participants.ids.should.include(user._id)
conversation.participants.names.should.include(user.name)
conversation.messages.should.have.lengthOf(0)
done()
describe "when starting the conversation", ->
it "should start the conversation", (done) ->
storedConversation.start (err, conversation) ->
should.not.exist(err)
should.exist(conversation)
storedConversation.status.should.equal("started")
done()
describe "when adding a message", ->
it "should add a message to the conversation", (done) ->
message = "First post!"
# console.log storedConversation
storedConversation.addMessage users[0], message, (err, conversation) ->
should.not.exist(err)
should.exist(conversation)
conversation.messages.should.have.lengthOf(1)
conversation.messages[0].body.should.equal(message)
done()
describe "when looking for a user inbox", ->
it "should have added 1 conversations to User0's inbox", (done) ->
users[0].getInbox((err, inbox) ->
should.not.exist(err)
should.exist(inbox)
inbox.should.be.an.instanceOf(Array)
inbox.should.have.lengthOf(1)
done()
)
it "should have added 1 conversation to User1's inbox", (done) ->
users[0].getInbox((err, inbox) ->
should.not.exist(err)
should.exist(inbox)
inbox.should.be.an.instanceOf(Array)
inbox.should.have.lengthOf(1)
done()
)
describe "when ending a conversation", ->
it "should end the conversation", (done) ->
storedConversation.end (err, conversation) ->
should.not.exist(err)
should.exist(conversation)
conversation.status.should.equal("ended")
done()
| 212991 | process.env.NODE_ENV = "test"
should = require("should")
Conversations = require("../lib/models/conversation")
Users = User = require("../example/models/user")
describe "Conversation.statics.findOrCreate()", ->
users = []
before (done) ->
Conversations.remove({}, -> done())
before (done) ->
Users.remove({}, -> done())
before (done) ->
users[0] = new User(name: "<NAME>")
users[0].save((err, user) ->
done()
)
before (done) ->
users[1] = new User(name: "<NAME>")
users[1].save((err, user) ->
done()
)
storedConversation = null
describe "when creating a conversation between 2 users", ->
it "should create a conversation with no message", (done) ->
Conversations.findOrCreate users[0], users, (err, conversation) ->
storedConversation = conversation
should.not.exist(err)
should.exist(conversation)
storedConversation.status.should.equal("requested")
conversation.participants.ids.should.have.lengthOf(users.length)
conversation.participants.names.should.have.lengthOf(users.length)
for user in users
conversation.participants.ids.should.include(user._id)
conversation.participants.names.should.include(user.name)
conversation.messages.should.have.lengthOf(0)
done()
describe "when starting the conversation", ->
it "should start the conversation", (done) ->
storedConversation.start (err, conversation) ->
should.not.exist(err)
should.exist(conversation)
storedConversation.status.should.equal("started")
done()
describe "when adding a message", ->
it "should add a message to the conversation", (done) ->
message = "First post!"
# console.log storedConversation
storedConversation.addMessage users[0], message, (err, conversation) ->
should.not.exist(err)
should.exist(conversation)
conversation.messages.should.have.lengthOf(1)
conversation.messages[0].body.should.equal(message)
done()
describe "when looking for a user inbox", ->
it "should have added 1 conversations to User0's inbox", (done) ->
users[0].getInbox((err, inbox) ->
should.not.exist(err)
should.exist(inbox)
inbox.should.be.an.instanceOf(Array)
inbox.should.have.lengthOf(1)
done()
)
it "should have added 1 conversation to User1's inbox", (done) ->
users[0].getInbox((err, inbox) ->
should.not.exist(err)
should.exist(inbox)
inbox.should.be.an.instanceOf(Array)
inbox.should.have.lengthOf(1)
done()
)
describe "when ending a conversation", ->
it "should end the conversation", (done) ->
storedConversation.end (err, conversation) ->
should.not.exist(err)
should.exist(conversation)
conversation.status.should.equal("ended")
done()
| true | process.env.NODE_ENV = "test"
should = require("should")
Conversations = require("../lib/models/conversation")
Users = User = require("../example/models/user")
describe "Conversation.statics.findOrCreate()", ->
users = []
before (done) ->
Conversations.remove({}, -> done())
before (done) ->
Users.remove({}, -> done())
before (done) ->
users[0] = new User(name: "PI:NAME:<NAME>END_PI")
users[0].save((err, user) ->
done()
)
before (done) ->
users[1] = new User(name: "PI:NAME:<NAME>END_PI")
users[1].save((err, user) ->
done()
)
storedConversation = null
describe "when creating a conversation between 2 users", ->
it "should create a conversation with no message", (done) ->
Conversations.findOrCreate users[0], users, (err, conversation) ->
storedConversation = conversation
should.not.exist(err)
should.exist(conversation)
storedConversation.status.should.equal("requested")
conversation.participants.ids.should.have.lengthOf(users.length)
conversation.participants.names.should.have.lengthOf(users.length)
for user in users
conversation.participants.ids.should.include(user._id)
conversation.participants.names.should.include(user.name)
conversation.messages.should.have.lengthOf(0)
done()
describe "when starting the conversation", ->
it "should start the conversation", (done) ->
storedConversation.start (err, conversation) ->
should.not.exist(err)
should.exist(conversation)
storedConversation.status.should.equal("started")
done()
describe "when adding a message", ->
it "should add a message to the conversation", (done) ->
message = "First post!"
# console.log storedConversation
storedConversation.addMessage users[0], message, (err, conversation) ->
should.not.exist(err)
should.exist(conversation)
conversation.messages.should.have.lengthOf(1)
conversation.messages[0].body.should.equal(message)
done()
describe "when looking for a user inbox", ->
it "should have added 1 conversations to User0's inbox", (done) ->
users[0].getInbox((err, inbox) ->
should.not.exist(err)
should.exist(inbox)
inbox.should.be.an.instanceOf(Array)
inbox.should.have.lengthOf(1)
done()
)
it "should have added 1 conversation to User1's inbox", (done) ->
users[0].getInbox((err, inbox) ->
should.not.exist(err)
should.exist(inbox)
inbox.should.be.an.instanceOf(Array)
inbox.should.have.lengthOf(1)
done()
)
describe "when ending a conversation", ->
it "should end the conversation", (done) ->
storedConversation.end (err, conversation) ->
should.not.exist(err)
should.exist(conversation)
conversation.status.should.equal("ended")
done()
|
[
{
"context": "ateRes = (msg) ->\n args: ['#test', msg]\n user: 'Test'\n prefix: '!test@test.com'\n\nnoopMw = (res, next)",
"end": 192,
"score": 0.9994128942489624,
"start": 188,
"tag": "USERNAME",
"value": "Test"
},
{
"context": " args: ['#test', msg]\n user: 'Test'\n prefix: '!test@test.com'\n\nnoopMw = (res, next) -> next()\n\ndescribe 'Domo ",
"end": 219,
"score": 0.9823096990585327,
"start": 206,
"tag": "EMAIL",
"value": "test@test.com"
}
] | test/messaging.coffee | rikukissa/domo | 4 | _ = require 'underscore'
assert = require 'assert'
proxyquire = require 'proxyquire'
messaging = require '../src/lib/messaging'
createRes = (msg) ->
args: ['#test', msg]
user: 'Test'
prefix: '!test@test.com'
noopMw = (res, next) -> next()
describe 'Domo messaging', ->
Domo = null
logOutput = null
logger = ->
logOutput.apply this, arguments
beforeEach ->
Domo = proxyquire '../src/index.coffee',
'./lib/messaging': -> messaging logger
it 'should console.log errors with color red', (done) ->
domo = new Domo()
lines = 0
logOutput = (prefix, message) ->
assert.equal prefix, 'Error:'.red
assert.equal message, 'foobar'.red
done()
domo.error new Error 'foobar'
it 'should console.log warnings with color yellow', (done) ->
domo = new Domo()
lines = 0
logOutput = (prefix, message) ->
assert.equal prefix, 'Warning:'.yellow
assert.equal message, 'foo\nbar\nbaz'.yellow
done()
domo.warn('foo', 'bar', 'baz')
it 'should console.log notifications with color green', (done) ->
domo = new Domo(debug: true)
lines = 0
logOutput = (prefix, message) ->
assert.equal prefix, 'Notify:'.green
assert.equal message, 'foo\nbar\nbaz'.green
done()
domo.notify('foo', 'bar', 'baz')
it 'should console.log notifications only if debug config is true', (done) ->
domo = new Domo()
lines = 0
logOutput = (prefix, message) ->
throw new Error 'Notification logged even though its not allowed in config'
domo.notify('foo', 'bar', 'baz')
setTimeout ->
done()
, 100
| 189016 | _ = require 'underscore'
assert = require 'assert'
proxyquire = require 'proxyquire'
messaging = require '../src/lib/messaging'
createRes = (msg) ->
args: ['#test', msg]
user: 'Test'
prefix: '!<EMAIL>'
noopMw = (res, next) -> next()
describe 'Domo messaging', ->
Domo = null
logOutput = null
logger = ->
logOutput.apply this, arguments
beforeEach ->
Domo = proxyquire '../src/index.coffee',
'./lib/messaging': -> messaging logger
it 'should console.log errors with color red', (done) ->
domo = new Domo()
lines = 0
logOutput = (prefix, message) ->
assert.equal prefix, 'Error:'.red
assert.equal message, 'foobar'.red
done()
domo.error new Error 'foobar'
it 'should console.log warnings with color yellow', (done) ->
domo = new Domo()
lines = 0
logOutput = (prefix, message) ->
assert.equal prefix, 'Warning:'.yellow
assert.equal message, 'foo\nbar\nbaz'.yellow
done()
domo.warn('foo', 'bar', 'baz')
it 'should console.log notifications with color green', (done) ->
domo = new Domo(debug: true)
lines = 0
logOutput = (prefix, message) ->
assert.equal prefix, 'Notify:'.green
assert.equal message, 'foo\nbar\nbaz'.green
done()
domo.notify('foo', 'bar', 'baz')
it 'should console.log notifications only if debug config is true', (done) ->
domo = new Domo()
lines = 0
logOutput = (prefix, message) ->
throw new Error 'Notification logged even though its not allowed in config'
domo.notify('foo', 'bar', 'baz')
setTimeout ->
done()
, 100
| true | _ = require 'underscore'
assert = require 'assert'
proxyquire = require 'proxyquire'
messaging = require '../src/lib/messaging'
createRes = (msg) ->
args: ['#test', msg]
user: 'Test'
prefix: '!PI:EMAIL:<EMAIL>END_PI'
noopMw = (res, next) -> next()
describe 'Domo messaging', ->
Domo = null
logOutput = null
logger = ->
logOutput.apply this, arguments
beforeEach ->
Domo = proxyquire '../src/index.coffee',
'./lib/messaging': -> messaging logger
it 'should console.log errors with color red', (done) ->
domo = new Domo()
lines = 0
logOutput = (prefix, message) ->
assert.equal prefix, 'Error:'.red
assert.equal message, 'foobar'.red
done()
domo.error new Error 'foobar'
it 'should console.log warnings with color yellow', (done) ->
domo = new Domo()
lines = 0
logOutput = (prefix, message) ->
assert.equal prefix, 'Warning:'.yellow
assert.equal message, 'foo\nbar\nbaz'.yellow
done()
domo.warn('foo', 'bar', 'baz')
it 'should console.log notifications with color green', (done) ->
domo = new Domo(debug: true)
lines = 0
logOutput = (prefix, message) ->
assert.equal prefix, 'Notify:'.green
assert.equal message, 'foo\nbar\nbaz'.green
done()
domo.notify('foo', 'bar', 'baz')
it 'should console.log notifications only if debug config is true', (done) ->
domo = new Domo()
lines = 0
logOutput = (prefix, message) ->
throw new Error 'Notification logged even though its not allowed in config'
domo.notify('foo', 'bar', 'baz')
setTimeout ->
done()
, 100
|
[
{
"context": "ct(app.name).toBe('myapp')\n\t\t\texpect(app.key).toBe('-0b')\n\n\t\t\tenv.data.redis.mget [\n\t\t\t\t'a:' + app.id + '",
"end": 1277,
"score": 0.9994063377380371,
"start": 1275,
"tag": "KEY",
"value": "0b"
},
{
"context": "pp.name).toBe('myapp')\n\t\t\t\texpect(app.key).toBe('-1a')\n\t\t\t\texpect(app.secret).toBe('-1a')\n\t\t\t\texpect(a",
"end": 3301,
"score": 0.9087008237838745,
"start": 3299,
"tag": "KEY",
"value": "1a"
},
{
"context": "t(app.key).toBe('-1a')\n\t\t\t\texpect(app.secret).toBe('-1a')\n\t\t\t\texpect(app.owner).toBe(5)\n\t\t\t\tdone()\n\n\tit '",
"end": 3336,
"score": 0.8953912854194641,
"start": 3334,
"tag": "KEY",
"value": "1a"
},
{
"context": "app2.id).toBe(app.id)\n\t\t\t\texpect(app2.key).toBe('-2a')\n\t\t\t\texpect(app2.secret).toBe('-2a')\n\t\t\t\tdone()\n",
"end": 4084,
"score": 0.8778908848762512,
"start": 4082,
"tag": "KEY",
"value": "2a"
},
{
"context": "app2.key).toBe('-2a')\n\t\t\t\texpect(app2.secret).toBe('-2a')\n\t\t\t\tdone()\n\n\tit 'Application retrieval by key -",
"end": 4120,
"score": 0.8407682776451111,
"start": 4118,
"tag": "KEY",
"value": "2a"
},
{
"context": "a.apps.update (success case)', (done) ->\n\t\tuid = 'yahouyahouyahouyahouyahou'\n\t\tenv.data.apps.create {name:'myapp'}, { id: 12 ",
"end": 4743,
"score": 0.738600492477417,
"start": 4718,
"tag": "USERNAME",
"value": "yahouyahouyahouyahouyahou"
},
{
"context": "ct(name).toBe('anothername')\n\t\t\t\t\tdone()\n\t\tuid = '2ahouyahouyahouyahouyahou'\n\t\tenv.data.apps.create {name:'myapp'}, { id: 12 ",
"end": 5082,
"score": 0.6610127091407776,
"start": 5057,
"tag": "USERNAME",
"value": "2ahouyahouyahouyahouyahou"
},
{
"context": "done) ->\n\t\t# existing app with undefined\n\t\tuid = '3ahouyahouyahouyahouyahou'\n\t\tenv.data.apps.create {name:'mya",
"end": 5536,
"score": 0.6251622438430786,
"start": 5526,
"tag": "USERNAME",
"value": "3ahouyahou"
},
{
"context": "ing app with undefined\n\t\tuid = '3ahouyahouyahouyahouyahou'\n\t\tenv.data.apps.create {name:'myapp'}, { id: 12 ",
"end": 5551,
"score": 0.5901752710342407,
"start": 5544,
"tag": "USERNAME",
"value": "ouyahou"
},
{
"context": "a.apps.remove (success case)', (done) ->\n\t\tuid = 'applicationremovaltesttes'\n\t\tenv.data.apps.create {name:'myapp'}, { id: 12 ",
"end": 6400,
"score": 0.9583414793014526,
"start": 6375,
"tag": "USERNAME",
"value": "applicationremovaltesttes"
},
{
"context": "ta.apps.remove (error cases)', (done) ->\n\t\tuid = 'inexistingapplicationtest'\n\t\tenv.data.apps.remove uid, (err) ->\n\t\t\texpect(e",
"end": 6871,
"score": 0.8332505822181702,
"start": 6846,
"tag": "USERNAME",
"value": "inexistingapplicationtest"
},
{
"context": "updateDomains (success case)', (done) ->\n\t\tuid = 'appdomainupdatetestestest'\n\t\tenv.data.apps.create {name:'myapp'}, { id: 12 ",
"end": 7115,
"score": 0.9168747067451477,
"start": 7090,
"tag": "USERNAME",
"value": "appdomainupdatetestestest"
},
{
"context": "updateerrorstest'\n\t\t\t\tenv.data.apps.create {name:'myapp'}, { id: 12 }, (err, app) ->\n\t\t\t\t\tenv.data.apps.u",
"end": 7963,
"score": 0.576076865196228,
"start": 7958,
"tag": "USERNAME",
"value": "myapp"
},
{
"context": "\t(next) ->\n\t\t\t\t# unknown key\n\t\t\t\tuid = 'inexistingapplicationtest'\n\t\t\t\tenv.data.apps.addDomain uid, 'domain1', (err",
"end": 8804,
"score": 0.7088208198547363,
"start": 8789,
"tag": "USERNAME",
"value": "applicationtest"
},
{
"context": "rong argument type\n\t\t\t\tuid = 'appdomainupdateerrorstest'\n\t\t\t\tenv.data.apps.create {name:'myapp'}, { id: 1",
"end": 9022,
"score": 0.6421548128128052,
"start": 9017,
"tag": "USERNAME",
"value": "stest"
},
{
"context": "ps.getDomains (success case)', (done) ->\n\t\tuid = 'appdomainretrievaltestestte'\n\t\tenv.data.apps.create {",
"end": 9368,
"score": 0.5224789381027222,
"start": 9365,
"tag": "USERNAME",
"value": "app"
},
{
"context": "success case)', (done) ->\n\t\tuid = 'appdomainretrievaltestestte'\n\t\tenv.data.apps.create {name: 'myapp'}, {id: 12}",
"end": 9392,
"score": 0.6840598583221436,
"start": 9380,
"tag": "USERNAME",
"value": "valtestestte"
},
{
"context": "pps.getDomains (error cases)', (done) ->\n\t\tuid = 'inexistingapplicationtest'\n\t\tenv.data.apps.getDomains uid, (err, domains) -",
"end": 9865,
"score": 0.7683867812156677,
"start": 9840,
"tag": "USERNAME",
"value": "inexistingapplicationtest"
},
{
"context": "pps.remDomain (success case)', (done) ->\n\t\tuid = 'appremovaltestesttestestte'\n\t\tenv.data.apps.create {name: 'myapp', domains: ",
"end": 10121,
"score": 0.7177305221557617,
"start": 10095,
"tag": "USERNAME",
"value": "appremovaltestesttestestte"
},
{
"context": "eries [\n\t\t\t(next) ->\n\t\t\t\t# unknown key\n\t\t\t\tuid = 'inexistingapplicationtest'\n\t\t\t\tenv.data.apps.remDomain uid, 'domain1', (err",
"end": 10659,
"score": 0.841468095779419,
"start": 10634,
"tag": "USERNAME",
"value": "inexistingapplicationtest"
},
{
"context": "\t\t\t(next) ->\n\t\t\t\t# wrong argument type\n\t\t\t\tuid = 'appremovalestesttestestte'\n\t\t\t\tenv.data.apps.create {name: 'myapp', domains",
"end": 10877,
"score": 0.9321110844612122,
"start": 10852,
"tag": "USERNAME",
"value": "appremovalestesttestestte"
},
{
"context": " app) ->\n\t\t\t\t\tenv.data.apps.remDomain app.key, 'hohoho', (err) ->\n\t\t\t\t\t\texpect(err).toBeDefined()\n\t\t\t\t",
"end": 11017,
"score": 0.6349393725395203,
"start": 11015,
"tag": "KEY",
"value": "ho"
},
{
"context": "ps.setBackend (success case)', (done) ->\n\t\tuid = 'appbackendsettestesttes'\n\t\tenv.data.apps.create {name: 'myapp'}, {id: 13}",
"end": 11329,
"score": 0.9670822024345398,
"start": 11306,
"tag": "USERNAME",
"value": "appbackendsettestesttes"
},
{
"context": "ps.getBackend (success case)', (done) ->\n\t\tuid = 'appbackendgettestesttes'\n\t\tenv.data.apps.create {name: 'myapp'}, {id: 13}",
"end": 12083,
"score": 0.9954166412353516,
"start": 12060,
"tag": "USERNAME",
"value": "appbackendgettestesttes"
},
{
"context": "ps.remBackend (success case)', (done) ->\n\t\tuid = 'appbackendrmttestesttes'\n\t\tenv.data.apps.create {name: 'myapp'}, {id: 13}",
"end": 12633,
"score": 0.9945248961448669,
"start": 12610,
"tag": "USERNAME",
"value": "appbackendrmttestesttes"
},
{
"context": "pps.addKeyset (success case)', (done) ->\n\t\tuid = 'appkeysetaddttestesttes'\n\t\tenv.data.apps.create {name: 'myapp'}, {id: 13}",
"end": 13198,
"score": 0.9936138391494751,
"start": 13175,
"tag": "USERNAME",
"value": "appkeysetaddttestesttes"
},
{
"context": "oBe('both')\n\t\t\t\t\t\t\tnext()\n\t\t\t(next) ->\n\t\t\t\tuid = 'appkeysetget2ttestesttes'\n\t\t\t\tenv.data.apps.create {name: 'myapp'}, {id: 1",
"end": 15043,
"score": 0.8842718601226807,
"start": 15019,
"tag": "USERNAME",
"value": "appkeysetget2ttestesttes"
},
{
"context": "Be('code')\n\t\t\t\t\t\t\t\tnext()\n\t\t\t(next) ->\n\t\t\t\tuid = 'appkeysetget3ttestesttes'\n\t\t\t\tenv.data.apps.create {name: 'myapp'}, {id: 1",
"end": 15624,
"score": 0.8879197835922241,
"start": 15600,
"tag": "USERNAME",
"value": "appkeysetget3ttestesttes"
},
{
"context": "val - env.data.apps.getOwner', (done) ->\n\t\tuid = 'appownerretrievaltestest'\n\t\tenv.data.apps.create {name: 'myapp'}, {id: 54}",
"end": 16390,
"score": 0.9041449427604675,
"start": 16366,
"tag": "USERNAME",
"value": "appownerretrievaltestest"
},
{
"context": "s.checkSecret (success case)', (done) ->\n\t\tuid = 'appsecretchecktestesteste'\n\t\tenv.data.apps.create {name: 'myapp'}, { id: 55",
"end": 16766,
"score": 0.9953855872154236,
"start": 16741,
"tag": "USERNAME",
"value": "appsecretchecktestesteste"
},
{
"context": "ps.checkSecret (error cases)', (done) ->\n\t\tuid = 'appsecretchecktestesteste'\n\t\tuid2 = 'appsecretchecktestesseste'\n\t\tenv.data.",
"end": 17077,
"score": 0.9953518509864807,
"start": 17052,
"tag": "USERNAME",
"value": "appsecretchecktestesteste"
},
{
"context": " ->\n\t\tuid = 'appsecretchecktestesteste'\n\t\tuid2 = 'appsecretchecktestesseste'\n\t\tenv.data.apps.create {name: 'myapp'}, { id: 55",
"end": 17114,
"score": 0.9681341052055359,
"start": 17089,
"tag": "USERNAME",
"value": "appsecretchecktestesseste"
}
] | tests/spec/data/data-apps-spec.coffee | pmstss/oauthd | 443 | testConfig = require '../../test-config'
coreModule = require testConfig.project_root + '/src/core'
dataModule = require testConfig.project_root + '/src/data'
async = require 'async'
describe 'Data - apps module', () ->
env = {
mode: 'test'
}
uid = 'notexisting'
logs = []
beforeEach () ->
env = {
mode: 'test'
}
coreModule(env).initEnv()
coreModule(env).initConfig()
coreModule(env).initUtilities()
dataModule(env)
if ! env.data.apps.___modcreate
env.data.apps.___modcreate = true
oldCreate = env.data.apps.create
env.data.apps.create = (app, user, cb) ->
callback = (err, data) ->
return cb err if err
env.data.redis.sadd 'u:' + user.id + ':apps', data.id, ->
return cb err if err
cb err, data
oldCreate.apply(env.data.apps, [app, user, callback])
env.debug = () ->
logs.push(arguments)
env.data.generateUid = () ->
return uid
it 'Application creation - env.data.apps.create (success case)', (done) ->
expect(env.data.apps.create).toBeDefined()
uid = '-0b'
env.data.apps.create { name: 'myapp' }, { id: 1 }, (err, app) ->
expect(err).toBe(null)
expect(typeof app).toBe('object')
expect(typeof app.id).toBe('number')
expect(app.name).toBe('myapp')
expect(app.key).toBe('-0b')
env.data.redis.mget [
'a:' + app.id + ':name',
'a:' + app.id + ':key',
'a:' + app.id + ':secret',
'a:' + app.id + ':owner',
'a:' + app.id + ':date'
], (err, result) ->
expect(err).toBe(null)
expect(result[0]).toBe('myapp')
expect(result[1]).toBe('-0b')
expect(result[2]).toBe('-0b')
expect(result[3]).toBe('1')
expect(result[4]).toMatch(/^[0-9]+$/)
env.data.redis.hget 'a:keys', '-0b', (err, id) ->
expect(id).toBe(app.id)
done()
it 'Application creation - env.data.apps.create (error cases)', (done) ->
uid = '-0b'
async.series [
(next) ->
env.data.apps.create undefined, { id: 1 }, (err, app) ->
expect(err).toBeDefined()
expect(app).toBeUndefined()
expect(err.message).toBe('You must specify a name and at least one domain for your application.')
next()
(next) ->
env.data.apps.create {name:'myapp'}, undefined, (err, app) ->
expect(err).toBeDefined()
expect(app).toBeUndefined()
expect(err.message).toBe('The user must be defined and contain the field \'id\'')
next()
(next) ->
env.data.apps.create {name: undefined}, { id: 1 }, (err, app) ->
expect(err).toBeDefined()
expect(app).toBeUndefined()
expect(err.message).toBe('You must specify a name and at least one domain for your application.')
next()
(next) ->
env.data.apps.create {name:'myapp'}, {id: undefined}, (err, app) ->
expect(err).toBeDefined()
expect(app).toBeUndefined()
expect(err.message).toBe('The user must be defined and contain the field \'id\'')
next()
], (err) ->
done()
it 'Application retrieval by owner - env.data.apps.getByOwner (success case)', (done) ->
uid = '-1a'
env.data.apps.create {name:'myapp'}, { id: 5 }, (err, app) ->
expect(err).toBeNull()
env.data.apps.getByOwner 5, (err, apps) ->
expect(err).toBeNull()
app = apps[0]
expect(typeof app).toBe('object')
expect(app.name).toBe('myapp')
expect(app.key).toBe('-1a')
expect(app.secret).toBe('-1a')
expect(app.owner).toBe(5)
done()
it 'Application retrieval by owner - env.data.apps.getByOwner (error cases)', (done) ->
uid = '-1a'
env.data.apps.create {name:'myapp'}, { id: 6 }, (err, app) ->
expect(err).toBeNull()
env.data.apps.getByOwner 6, (err, apps) ->
expect(apps.length).toBe(1)
done()
it 'Application retrieval by id - env.data.apps.getById', (done) ->
uid = '-2a'
env.data.apps.create {name:'myapp'}, { id: 1 }, (err, app) ->
expect(err).toBeNull()
env.data.apps.getById app.id, (err, app2) ->
expect(err).toBeNull()
expect(typeof app2).toBe('object')
expect(app2.name).toBe('myapp')
expect(app2.owner).toBe('1')
expect(app2.id).toBe(app.id)
expect(app2.key).toBe('-2a')
expect(app2.secret).toBe('-2a')
done()
it 'Application retrieval by key - env.data.apps.get (success case)', (done) ->
uid = 'qwertyuiop1234567890asd'
env.data.apps.create {name:'myapp'}, { id: 10 }, (err, app) ->
expect(err).toBeNull()
env.data.apps.get app.key, (err, app2) ->
expect(err).toBeNull()
expect(typeof app2).toBe('object')
expect(app2.name).toBe('myapp')
expect(app2.owner).toBe('10')
expect(app2.id).toBe(app.id)
expect(app2.key).toBe(uid)
expect(app2.secret).toBe(uid)
done()
it 'Application key reset - env.data.apps.update (success case)', (done) ->
uid = 'yahouyahouyahouyahouyahou'
env.data.apps.create {name:'myapp'}, { id: 12 }, (err, app) ->
expect(err).toBeNull()
env.data.apps.update app.key, { name: 'anothername' }, (err) ->
expect(err).toBeUndefined()
env.data.redis.get 'a:' + app.id + ':name', (err, name) ->
expect(name).toBe('anothername')
done()
uid = '2ahouyahouyahouyahouyahou'
env.data.apps.create {name:'myapp'}, { id: 12 }, (err, app) ->
expect(err).toBeNull()
env.data.apps.update app.key, { domains: ['somedomain'] }, (err) ->
expect(err).toBeUndefined()
env.data.redis.smembers 'a:' + app.id + ':domains', (err, domains) ->
expect(domains[0]).toBe('somedomain')
done()
it 'Application key reset - env.data.apps.update (error cases)', (done) ->
# existing app with undefined
uid = '3ahouyahouyahouyahouyahou'
env.data.apps.create {name:'myapp'}, { id: 12 }, (err, app) ->
env.data.apps.update app.key, undefined, (err) ->
expect(err).toBeDefined()
expect(err.message).toBe('Bad parameters format')
done()
# unexisting app
uid = '4ahouyahouyahouyahouyahou'
env.data.apps.update uid, {name: 'hey'}, (err) ->
expect(err).toBeDefined()
expect(err.message).toBe('Unknown key')
done()
it 'Application key reset - env.data.apps.resetKey', (done) ->
uid = '5testestestestestesteste'
env.data.apps.create {name:'myapp'}, { id: 12 }, (err, app) ->
uid = 'newkeynewkeynewkeynewkey'
env.data.apps.resetKey app.key, (err, result) ->
expect(result.key).toBe(uid)
expect(result.secret).toBe(uid)
done()
it 'Application removal - env.data.apps.remove (success case)', (done) ->
uid = 'applicationremovaltesttes'
env.data.apps.create {name:'myapp'}, { id: 12 }, (err, app) ->
env.data.apps.remove app.key, (err) ->
expect(err).toBeUndefined()
env.data.redis.keys 'a:' + app.id + '*', (err, keys) ->
expect(keys.length).toBe(0)
env.data.redis.hget 'a:keys', app.key, (err, id) ->
expect(err).toBe(null)
expect(id).toBe(null)
done()
it 'Application removal - env.data.apps.remove (error cases)', (done) ->
uid = 'inexistingapplicationtest'
env.data.apps.remove uid, (err) ->
expect(err).toBeDefined()
expect(err.message).toBe('Unknown key')
done()
it 'Application domain update - env.data.apps.updateDomains (success case)', (done) ->
uid = 'appdomainupdatetestestest'
env.data.apps.create {name:'myapp'}, { id: 12 }, (err, app) ->
env.data.apps.updateDomains app.key, ['domain1', 'domain2'], (err) ->
expect(err).toBeUndefined()
env.data.redis.smembers 'a:' + app.id + ':domains', (err, domains) ->
expect(err).toBeNull()
expect(domains.length).toBe(2)
expect(domains[0]).toBe('domain1')
expect(domains[1]).toBe('domain2')
done()
it 'Application domain update - env.data.apps.updateDomains (error cases)', (done) ->
async.series [
(next) ->
# unknown key
uid = 'inexistingapplicationtest'
env.data.apps.updateDomains uid, ['domain1', 'domain2'], (err) ->
expect(err).toBeDefined()
expect(err.message).toBe('Unknown key')
next()
(next) ->
# wrong argument type
uid = 'appdomainupdateerrorstest'
env.data.apps.create {name:'myapp'}, { id: 12 }, (err, app) ->
env.data.apps.updateDomains uid, undefined, (err) ->
expect(err).toBeDefined()
expect(err.message).toBe('Bad parameters format')
next()
], () ->
done()
it 'Application domain add - env.data.apps.addDomain (success case)', (done) ->
uid = 'appdomainaddadderrorstest'
env.data.apps.create {name:'myapps'}, {id: 12}, (err, app) ->
env.data.apps.addDomain app.key, 'somedomain', (err) ->
expect(err).toBeUndefined()
env.data.redis.smembers 'a:' + app.id + ':domains', (err, domains) ->
expect(err).toBeNull()
expect(domains.length).toBe(1)
expect(domains[0]).toBe('somedomain')
done()
it 'Application domain add - env.data.apps.addDomain (error cases)', (done) ->
async.series [
(next) ->
# unknown key
uid = 'inexistingapplicationtest'
env.data.apps.addDomain uid, 'domain1', (err) ->
expect(err).toBeDefined()
expect(err.message).toBe('Unknown key')
next()
(next) ->
# wrong argument type
uid = 'appdomainupdateerrorstest'
env.data.apps.create {name:'myapp'}, { id: 12 }, (err, app) ->
env.data.apps.addDomain uid, undefined, (err) ->
expect(err).toBeDefined()
expect(err.message).toBe('Bad parameters format')
next()
], () ->
done()
it 'Application domain retrieval - env.data.apps.getDomains (success case)', (done) ->
uid = 'appdomainretrievaltestestte'
env.data.apps.create {name: 'myapp'}, {id: 12}, (err, app) ->
env.data.apps.updateDomains app.key, ['domain1', 'domain2'], (err) ->
env.data.apps.getDomains app.key, (err, domains) ->
expect(err).toBeNull()
expect(domains.length).toBe(2)
expect(domains[0]).toBe('domain1')
expect(domains[1]).toBe('domain2')
done()
it 'Application domain retrieval - env.data.apps.getDomains (error cases)', (done) ->
uid = 'inexistingapplicationtest'
env.data.apps.getDomains uid, (err, domains) ->
expect(err).not.toBeNull()
expect(err.message).toBe('Unknown key')
done()
it 'Application domain removal - env.data.apps.remDomain (success case)', (done) ->
uid = 'appremovaltestesttestestte'
env.data.apps.create {name: 'myapp', domains: ['hello', 'world']}, {id: 12}, (err, app) ->
env.data.apps.remDomain app.key, 'hello', (err) ->
expect(err).toBeUndefined()
env.data.redis.smembers 'a:' + app.id + ':domains', (err, domains) ->
expect(err).toBeNull()
expect(domains.length).toBe(1)
expect(domains[0]).toBe('world')
done()
it 'Application domain removal - env.data.apps.remDomain (error cases)', (done) ->
async.series [
(next) ->
# unknown key
uid = 'inexistingapplicationtest'
env.data.apps.remDomain uid, 'domain1', (err) ->
expect(err).toBeDefined()
expect(err.message).toBe('Unknown key')
next()
(next) ->
# wrong argument type
uid = 'appremovalestesttestestte'
env.data.apps.create {name: 'myapp', domains: ['hello', 'world']}, {id: 12}, (err, app) ->
env.data.apps.remDomain app.key, 'hohoho', (err) ->
expect(err).toBeDefined()
expect(err.message).toBe('Invalid format')
expect(err.body?.domain).toBe('hohoho is already non-valid')
done()
], () ->
done()
it 'Application backend set - env.data.apps.setBackend (success case)', (done) ->
uid = 'appbackendsettestesttes'
env.data.apps.create {name: 'myapp'}, {id: 13}, (err, app) ->
env.data.apps.setBackend app.key, 'backend', {somekey: 'somevalue'}, (err) ->
expect(err).toBeUndefined()
env.data.redis.get 'a:' + app.id + ':backend:name', (err, name) ->
expect(err).toBeNull()
expect(name).toBe('backend')
env.data.redis.get 'a:' + app.id + ':backend:value', (err, value) ->
expect(err).toBeNull()
try
value = JSON.parse(value)
catch error
expect(error).toBeUndefined()
finally
expect(typeof value).toBe('object')
expect(value.somekey).toBe('somevalue')
done()
it 'Application backend retrieval - env.data.apps.getBackend (success case)', (done) ->
uid = 'appbackendgettestesttes'
env.data.apps.create {name: 'myapp'}, {id: 13}, (err, app) ->
env.data.apps.setBackend app.key, 'backend', {somekey: 'somevalue'}, (err) ->
env.data.apps.getBackend app.key, (err, backend) ->
expect(err).toBeNull()
expect(typeof backend).toBe('object')
expect(backend.name).toBe('backend')
expect(typeof backend.value).toBe('object')
expect(backend.value.somekey).toBe('somevalue')
done()
it 'Application backend removal - env.data.apps.remBackend (success case)', (done) ->
uid = 'appbackendrmttestesttes'
env.data.apps.create {name: 'myapp'}, {id: 13}, (err, app) ->
env.data.apps.setBackend app.key, 'backend', {somekey: 'somevalue'}, (err) ->
env.data.apps.remBackend app.key, (err) ->
expect(err).toBeUndefined()
env.data.redis.mget ['a:' + app.id + ':backend:name', 'a:' + app.id + ':backend:value'], (err, result) ->
expect(err).toBeNull()
expect(result[0]).toBeNull()
expect(result[1]).toBeNull()
done()
it 'Application keyset add - env.data.apps.addKeyset (success case)', (done) ->
uid = 'appkeysetaddttestesttes'
env.data.apps.create {name: 'myapp'}, {id: 13}, (err, app) ->
env.data.apps.addKeyset app.key, 'someprovider', { parameters: { hello: 'world' } }, (err) ->
expect(err).toBeUndefined()
env.data.redis.get 'a:' + app.id + ':k:someprovider', (err, data) ->
expect(err).toBeNull()
try
keyset = JSON.parse data
catch error
expect(error).toBeUndefined()
finally
expect(keyset.hello).toBe('world')
done()
it 'Application keysets retrieval - env.data.apps.getKeysets (success case)', (done) ->
uid = 'appkeysetsgetttestesttes'
env.data.apps.create {name: 'myapp'}, {id: 13}, (err, app) ->
env.data.apps.addKeyset app.key, 'someprovider', { parameters: { hello: 'world' } }, (err) ->
expect(err).toBeUndefined()
env.data.apps.getKeysets app.key, (err, keysets) ->
expect(err).toBeNull()
expect(keysets.length).toBe(1)
expect(keysets[0]).toBe('someprovider')
done()
xit 'Application keyset removal - env.data.apps.remKeyset', (done) ->
done()
xit 'Application keyset retrieval with response type - env.data.apps.getKeysetWithResponseType', (done) ->
done()
it 'Application keyset retrieval with right response_types - env.data.apps.getKeysets (success case)', (done) ->
async.series [
(next) ->
uid = 'appkeysetgettttestesttes'
env.data.apps.create {name: 'myapp'}, {id: 13}, (err, app) ->
env.data.apps.addKeyset app.key, 'someprovider', { parameters: { hello: 'world' } }, (err) ->
expect(err).toBeUndefined()
env.data.apps.getKeyset app.key, 'someprovider', (err, keyset) ->
expect(err).toBeNull()
expect(keyset.parameters).toBeDefined()
expect(keyset.parameters.hello).toBe('world')
expect(keyset.response_type).toBe('both')
next()
(next) ->
uid = 'appkeysetget2ttestesttes'
env.data.apps.create {name: 'myapp'}, {id: 13}, (err, app) ->
env.data.apps.setBackend app.key, 'php', {}, (err) ->
env.data.apps.addKeyset app.key, 'someprovider', { parameters: { hello: 'world' } }, (err) ->
expect(err).toBeUndefined()
env.data.apps.getKeyset app.key, 'someprovider', (err, keyset) ->
expect(err).toBeNull()
expect(keyset.parameters).toBeDefined()
expect(keyset.parameters.hello).toBe('world')
expect(keyset.response_type).toBe('code')
next()
(next) ->
uid = 'appkeysetget3ttestesttes'
env.data.apps.create {name: 'myapp'}, {id: 13}, (err, app) ->
env.data.apps.setBackend app.key, 'php', { client_side: true }, (err) ->
env.data.apps.addKeyset app.key, 'someprovider', { parameters: { hello: 'world' } }, (err) ->
expect(err).toBeUndefined()
env.data.apps.getKeyset app.key, 'someprovider', (err, keyset) ->
expect(err).toBeNull()
expect(keyset.parameters).toBeDefined()
expect(keyset.parameters.hello).toBe('world')
expect(keyset.response_type).toBe('both')
next()
], () ->
done()
xit 'Application domain verification - env.data.apps.checkDomain', (done) ->
done()
it 'Application owner retrieval - env.data.apps.getOwner', (done) ->
uid = 'appownerretrievaltestest'
env.data.apps.create {name: 'myapp'}, {id: 54}, (err, app) ->
env.data.apps.getOwner app.key, (err, user) ->
expect(err).toBeNull()
expect(typeof user).toBe('object')
expect(user.id).toBeDefined()
expect(user.id).toBe(54)
done()
it 'Application secret check - env.data.apps.checkSecret (success case)', (done) ->
uid = 'appsecretchecktestesteste'
env.data.apps.create {name: 'myapp'}, { id: 55 }, (err, app) ->
env.data.apps.checkSecret app.key, uid, (err, bool) ->
expect(err).toBeNull()
expect(bool).toBe(true)
done()
it 'Application secret check - env.data.apps.checkSecret (error cases)', (done) ->
uid = 'appsecretchecktestesteste'
uid2 = 'appsecretchecktestesseste'
env.data.apps.create {name: 'myapp'}, { id: 55 }, (err, app) ->
env.data.apps.checkSecret app.key, uid2, (err, bool) ->
expect(err).toBeNull()
expect(bool).toBe(false)
done()
| 206489 | testConfig = require '../../test-config'
coreModule = require testConfig.project_root + '/src/core'
dataModule = require testConfig.project_root + '/src/data'
async = require 'async'
describe 'Data - apps module', () ->
env = {
mode: 'test'
}
uid = 'notexisting'
logs = []
beforeEach () ->
env = {
mode: 'test'
}
coreModule(env).initEnv()
coreModule(env).initConfig()
coreModule(env).initUtilities()
dataModule(env)
if ! env.data.apps.___modcreate
env.data.apps.___modcreate = true
oldCreate = env.data.apps.create
env.data.apps.create = (app, user, cb) ->
callback = (err, data) ->
return cb err if err
env.data.redis.sadd 'u:' + user.id + ':apps', data.id, ->
return cb err if err
cb err, data
oldCreate.apply(env.data.apps, [app, user, callback])
env.debug = () ->
logs.push(arguments)
env.data.generateUid = () ->
return uid
it 'Application creation - env.data.apps.create (success case)', (done) ->
expect(env.data.apps.create).toBeDefined()
uid = '-0b'
env.data.apps.create { name: 'myapp' }, { id: 1 }, (err, app) ->
expect(err).toBe(null)
expect(typeof app).toBe('object')
expect(typeof app.id).toBe('number')
expect(app.name).toBe('myapp')
expect(app.key).toBe('-<KEY>')
env.data.redis.mget [
'a:' + app.id + ':name',
'a:' + app.id + ':key',
'a:' + app.id + ':secret',
'a:' + app.id + ':owner',
'a:' + app.id + ':date'
], (err, result) ->
expect(err).toBe(null)
expect(result[0]).toBe('myapp')
expect(result[1]).toBe('-0b')
expect(result[2]).toBe('-0b')
expect(result[3]).toBe('1')
expect(result[4]).toMatch(/^[0-9]+$/)
env.data.redis.hget 'a:keys', '-0b', (err, id) ->
expect(id).toBe(app.id)
done()
it 'Application creation - env.data.apps.create (error cases)', (done) ->
uid = '-0b'
async.series [
(next) ->
env.data.apps.create undefined, { id: 1 }, (err, app) ->
expect(err).toBeDefined()
expect(app).toBeUndefined()
expect(err.message).toBe('You must specify a name and at least one domain for your application.')
next()
(next) ->
env.data.apps.create {name:'myapp'}, undefined, (err, app) ->
expect(err).toBeDefined()
expect(app).toBeUndefined()
expect(err.message).toBe('The user must be defined and contain the field \'id\'')
next()
(next) ->
env.data.apps.create {name: undefined}, { id: 1 }, (err, app) ->
expect(err).toBeDefined()
expect(app).toBeUndefined()
expect(err.message).toBe('You must specify a name and at least one domain for your application.')
next()
(next) ->
env.data.apps.create {name:'myapp'}, {id: undefined}, (err, app) ->
expect(err).toBeDefined()
expect(app).toBeUndefined()
expect(err.message).toBe('The user must be defined and contain the field \'id\'')
next()
], (err) ->
done()
it 'Application retrieval by owner - env.data.apps.getByOwner (success case)', (done) ->
uid = '-1a'
env.data.apps.create {name:'myapp'}, { id: 5 }, (err, app) ->
expect(err).toBeNull()
env.data.apps.getByOwner 5, (err, apps) ->
expect(err).toBeNull()
app = apps[0]
expect(typeof app).toBe('object')
expect(app.name).toBe('myapp')
expect(app.key).toBe('-<KEY>')
expect(app.secret).toBe('-<KEY>')
expect(app.owner).toBe(5)
done()
it 'Application retrieval by owner - env.data.apps.getByOwner (error cases)', (done) ->
uid = '-1a'
env.data.apps.create {name:'myapp'}, { id: 6 }, (err, app) ->
expect(err).toBeNull()
env.data.apps.getByOwner 6, (err, apps) ->
expect(apps.length).toBe(1)
done()
it 'Application retrieval by id - env.data.apps.getById', (done) ->
uid = '-2a'
env.data.apps.create {name:'myapp'}, { id: 1 }, (err, app) ->
expect(err).toBeNull()
env.data.apps.getById app.id, (err, app2) ->
expect(err).toBeNull()
expect(typeof app2).toBe('object')
expect(app2.name).toBe('myapp')
expect(app2.owner).toBe('1')
expect(app2.id).toBe(app.id)
expect(app2.key).toBe('-<KEY>')
expect(app2.secret).toBe('-<KEY>')
done()
it 'Application retrieval by key - env.data.apps.get (success case)', (done) ->
uid = 'qwertyuiop1234567890asd'
env.data.apps.create {name:'myapp'}, { id: 10 }, (err, app) ->
expect(err).toBeNull()
env.data.apps.get app.key, (err, app2) ->
expect(err).toBeNull()
expect(typeof app2).toBe('object')
expect(app2.name).toBe('myapp')
expect(app2.owner).toBe('10')
expect(app2.id).toBe(app.id)
expect(app2.key).toBe(uid)
expect(app2.secret).toBe(uid)
done()
it 'Application key reset - env.data.apps.update (success case)', (done) ->
uid = 'yahouyahouyahouyahouyahou'
env.data.apps.create {name:'myapp'}, { id: 12 }, (err, app) ->
expect(err).toBeNull()
env.data.apps.update app.key, { name: 'anothername' }, (err) ->
expect(err).toBeUndefined()
env.data.redis.get 'a:' + app.id + ':name', (err, name) ->
expect(name).toBe('anothername')
done()
uid = '2ahouyahouyahouyahouyahou'
env.data.apps.create {name:'myapp'}, { id: 12 }, (err, app) ->
expect(err).toBeNull()
env.data.apps.update app.key, { domains: ['somedomain'] }, (err) ->
expect(err).toBeUndefined()
env.data.redis.smembers 'a:' + app.id + ':domains', (err, domains) ->
expect(domains[0]).toBe('somedomain')
done()
it 'Application key reset - env.data.apps.update (error cases)', (done) ->
# existing app with undefined
uid = '3ahouyahouyahouyahouyahou'
env.data.apps.create {name:'myapp'}, { id: 12 }, (err, app) ->
env.data.apps.update app.key, undefined, (err) ->
expect(err).toBeDefined()
expect(err.message).toBe('Bad parameters format')
done()
# unexisting app
uid = '4ahouyahouyahouyahouyahou'
env.data.apps.update uid, {name: 'hey'}, (err) ->
expect(err).toBeDefined()
expect(err.message).toBe('Unknown key')
done()
it 'Application key reset - env.data.apps.resetKey', (done) ->
uid = '5testestestestestesteste'
env.data.apps.create {name:'myapp'}, { id: 12 }, (err, app) ->
uid = 'newkeynewkeynewkeynewkey'
env.data.apps.resetKey app.key, (err, result) ->
expect(result.key).toBe(uid)
expect(result.secret).toBe(uid)
done()
it 'Application removal - env.data.apps.remove (success case)', (done) ->
uid = 'applicationremovaltesttes'
env.data.apps.create {name:'myapp'}, { id: 12 }, (err, app) ->
env.data.apps.remove app.key, (err) ->
expect(err).toBeUndefined()
env.data.redis.keys 'a:' + app.id + '*', (err, keys) ->
expect(keys.length).toBe(0)
env.data.redis.hget 'a:keys', app.key, (err, id) ->
expect(err).toBe(null)
expect(id).toBe(null)
done()
it 'Application removal - env.data.apps.remove (error cases)', (done) ->
uid = 'inexistingapplicationtest'
env.data.apps.remove uid, (err) ->
expect(err).toBeDefined()
expect(err.message).toBe('Unknown key')
done()
it 'Application domain update - env.data.apps.updateDomains (success case)', (done) ->
uid = 'appdomainupdatetestestest'
env.data.apps.create {name:'myapp'}, { id: 12 }, (err, app) ->
env.data.apps.updateDomains app.key, ['domain1', 'domain2'], (err) ->
expect(err).toBeUndefined()
env.data.redis.smembers 'a:' + app.id + ':domains', (err, domains) ->
expect(err).toBeNull()
expect(domains.length).toBe(2)
expect(domains[0]).toBe('domain1')
expect(domains[1]).toBe('domain2')
done()
it 'Application domain update - env.data.apps.updateDomains (error cases)', (done) ->
async.series [
(next) ->
# unknown key
uid = 'inexistingapplicationtest'
env.data.apps.updateDomains uid, ['domain1', 'domain2'], (err) ->
expect(err).toBeDefined()
expect(err.message).toBe('Unknown key')
next()
(next) ->
# wrong argument type
uid = 'appdomainupdateerrorstest'
env.data.apps.create {name:'myapp'}, { id: 12 }, (err, app) ->
env.data.apps.updateDomains uid, undefined, (err) ->
expect(err).toBeDefined()
expect(err.message).toBe('Bad parameters format')
next()
], () ->
done()
it 'Application domain add - env.data.apps.addDomain (success case)', (done) ->
uid = 'appdomainaddadderrorstest'
env.data.apps.create {name:'myapps'}, {id: 12}, (err, app) ->
env.data.apps.addDomain app.key, 'somedomain', (err) ->
expect(err).toBeUndefined()
env.data.redis.smembers 'a:' + app.id + ':domains', (err, domains) ->
expect(err).toBeNull()
expect(domains.length).toBe(1)
expect(domains[0]).toBe('somedomain')
done()
it 'Application domain add - env.data.apps.addDomain (error cases)', (done) ->
async.series [
(next) ->
# unknown key
uid = 'inexistingapplicationtest'
env.data.apps.addDomain uid, 'domain1', (err) ->
expect(err).toBeDefined()
expect(err.message).toBe('Unknown key')
next()
(next) ->
# wrong argument type
uid = 'appdomainupdateerrorstest'
env.data.apps.create {name:'myapp'}, { id: 12 }, (err, app) ->
env.data.apps.addDomain uid, undefined, (err) ->
expect(err).toBeDefined()
expect(err.message).toBe('Bad parameters format')
next()
], () ->
done()
it 'Application domain retrieval - env.data.apps.getDomains (success case)', (done) ->
uid = 'appdomainretrievaltestestte'
env.data.apps.create {name: 'myapp'}, {id: 12}, (err, app) ->
env.data.apps.updateDomains app.key, ['domain1', 'domain2'], (err) ->
env.data.apps.getDomains app.key, (err, domains) ->
expect(err).toBeNull()
expect(domains.length).toBe(2)
expect(domains[0]).toBe('domain1')
expect(domains[1]).toBe('domain2')
done()
it 'Application domain retrieval - env.data.apps.getDomains (error cases)', (done) ->
uid = 'inexistingapplicationtest'
env.data.apps.getDomains uid, (err, domains) ->
expect(err).not.toBeNull()
expect(err.message).toBe('Unknown key')
done()
it 'Application domain removal - env.data.apps.remDomain (success case)', (done) ->
uid = 'appremovaltestesttestestte'
env.data.apps.create {name: 'myapp', domains: ['hello', 'world']}, {id: 12}, (err, app) ->
env.data.apps.remDomain app.key, 'hello', (err) ->
expect(err).toBeUndefined()
env.data.redis.smembers 'a:' + app.id + ':domains', (err, domains) ->
expect(err).toBeNull()
expect(domains.length).toBe(1)
expect(domains[0]).toBe('world')
done()
it 'Application domain removal - env.data.apps.remDomain (error cases)', (done) ->
async.series [
(next) ->
# unknown key
uid = 'inexistingapplicationtest'
env.data.apps.remDomain uid, 'domain1', (err) ->
expect(err).toBeDefined()
expect(err.message).toBe('Unknown key')
next()
(next) ->
# wrong argument type
uid = 'appremovalestesttestestte'
env.data.apps.create {name: 'myapp', domains: ['hello', 'world']}, {id: 12}, (err, app) ->
env.data.apps.remDomain app.key, 'ho<KEY>ho', (err) ->
expect(err).toBeDefined()
expect(err.message).toBe('Invalid format')
expect(err.body?.domain).toBe('hohoho is already non-valid')
done()
], () ->
done()
it 'Application backend set - env.data.apps.setBackend (success case)', (done) ->
uid = 'appbackendsettestesttes'
env.data.apps.create {name: 'myapp'}, {id: 13}, (err, app) ->
env.data.apps.setBackend app.key, 'backend', {somekey: 'somevalue'}, (err) ->
expect(err).toBeUndefined()
env.data.redis.get 'a:' + app.id + ':backend:name', (err, name) ->
expect(err).toBeNull()
expect(name).toBe('backend')
env.data.redis.get 'a:' + app.id + ':backend:value', (err, value) ->
expect(err).toBeNull()
try
value = JSON.parse(value)
catch error
expect(error).toBeUndefined()
finally
expect(typeof value).toBe('object')
expect(value.somekey).toBe('somevalue')
done()
it 'Application backend retrieval - env.data.apps.getBackend (success case)', (done) ->
uid = 'appbackendgettestesttes'
env.data.apps.create {name: 'myapp'}, {id: 13}, (err, app) ->
env.data.apps.setBackend app.key, 'backend', {somekey: 'somevalue'}, (err) ->
env.data.apps.getBackend app.key, (err, backend) ->
expect(err).toBeNull()
expect(typeof backend).toBe('object')
expect(backend.name).toBe('backend')
expect(typeof backend.value).toBe('object')
expect(backend.value.somekey).toBe('somevalue')
done()
it 'Application backend removal - env.data.apps.remBackend (success case)', (done) ->
uid = 'appbackendrmttestesttes'
env.data.apps.create {name: 'myapp'}, {id: 13}, (err, app) ->
env.data.apps.setBackend app.key, 'backend', {somekey: 'somevalue'}, (err) ->
env.data.apps.remBackend app.key, (err) ->
expect(err).toBeUndefined()
env.data.redis.mget ['a:' + app.id + ':backend:name', 'a:' + app.id + ':backend:value'], (err, result) ->
expect(err).toBeNull()
expect(result[0]).toBeNull()
expect(result[1]).toBeNull()
done()
it 'Application keyset add - env.data.apps.addKeyset (success case)', (done) ->
uid = 'appkeysetaddttestesttes'
env.data.apps.create {name: 'myapp'}, {id: 13}, (err, app) ->
env.data.apps.addKeyset app.key, 'someprovider', { parameters: { hello: 'world' } }, (err) ->
expect(err).toBeUndefined()
env.data.redis.get 'a:' + app.id + ':k:someprovider', (err, data) ->
expect(err).toBeNull()
try
keyset = JSON.parse data
catch error
expect(error).toBeUndefined()
finally
expect(keyset.hello).toBe('world')
done()
it 'Application keysets retrieval - env.data.apps.getKeysets (success case)', (done) ->
uid = 'appkeysetsgetttestesttes'
env.data.apps.create {name: 'myapp'}, {id: 13}, (err, app) ->
env.data.apps.addKeyset app.key, 'someprovider', { parameters: { hello: 'world' } }, (err) ->
expect(err).toBeUndefined()
env.data.apps.getKeysets app.key, (err, keysets) ->
expect(err).toBeNull()
expect(keysets.length).toBe(1)
expect(keysets[0]).toBe('someprovider')
done()
xit 'Application keyset removal - env.data.apps.remKeyset', (done) ->
done()
xit 'Application keyset retrieval with response type - env.data.apps.getKeysetWithResponseType', (done) ->
done()
it 'Application keyset retrieval with right response_types - env.data.apps.getKeysets (success case)', (done) ->
async.series [
(next) ->
uid = 'appkeysetgettttestesttes'
env.data.apps.create {name: 'myapp'}, {id: 13}, (err, app) ->
env.data.apps.addKeyset app.key, 'someprovider', { parameters: { hello: 'world' } }, (err) ->
expect(err).toBeUndefined()
env.data.apps.getKeyset app.key, 'someprovider', (err, keyset) ->
expect(err).toBeNull()
expect(keyset.parameters).toBeDefined()
expect(keyset.parameters.hello).toBe('world')
expect(keyset.response_type).toBe('both')
next()
(next) ->
uid = 'appkeysetget2ttestesttes'
env.data.apps.create {name: 'myapp'}, {id: 13}, (err, app) ->
env.data.apps.setBackend app.key, 'php', {}, (err) ->
env.data.apps.addKeyset app.key, 'someprovider', { parameters: { hello: 'world' } }, (err) ->
expect(err).toBeUndefined()
env.data.apps.getKeyset app.key, 'someprovider', (err, keyset) ->
expect(err).toBeNull()
expect(keyset.parameters).toBeDefined()
expect(keyset.parameters.hello).toBe('world')
expect(keyset.response_type).toBe('code')
next()
(next) ->
uid = 'appkeysetget3ttestesttes'
env.data.apps.create {name: 'myapp'}, {id: 13}, (err, app) ->
env.data.apps.setBackend app.key, 'php', { client_side: true }, (err) ->
env.data.apps.addKeyset app.key, 'someprovider', { parameters: { hello: 'world' } }, (err) ->
expect(err).toBeUndefined()
env.data.apps.getKeyset app.key, 'someprovider', (err, keyset) ->
expect(err).toBeNull()
expect(keyset.parameters).toBeDefined()
expect(keyset.parameters.hello).toBe('world')
expect(keyset.response_type).toBe('both')
next()
], () ->
done()
xit 'Application domain verification - env.data.apps.checkDomain', (done) ->
done()
it 'Application owner retrieval - env.data.apps.getOwner', (done) ->
uid = 'appownerretrievaltestest'
env.data.apps.create {name: 'myapp'}, {id: 54}, (err, app) ->
env.data.apps.getOwner app.key, (err, user) ->
expect(err).toBeNull()
expect(typeof user).toBe('object')
expect(user.id).toBeDefined()
expect(user.id).toBe(54)
done()
it 'Application secret check - env.data.apps.checkSecret (success case)', (done) ->
uid = 'appsecretchecktestesteste'
env.data.apps.create {name: 'myapp'}, { id: 55 }, (err, app) ->
env.data.apps.checkSecret app.key, uid, (err, bool) ->
expect(err).toBeNull()
expect(bool).toBe(true)
done()
it 'Application secret check - env.data.apps.checkSecret (error cases)', (done) ->
uid = 'appsecretchecktestesteste'
uid2 = 'appsecretchecktestesseste'
env.data.apps.create {name: 'myapp'}, { id: 55 }, (err, app) ->
env.data.apps.checkSecret app.key, uid2, (err, bool) ->
expect(err).toBeNull()
expect(bool).toBe(false)
done()
| true | testConfig = require '../../test-config'
coreModule = require testConfig.project_root + '/src/core'
dataModule = require testConfig.project_root + '/src/data'
async = require 'async'
describe 'Data - apps module', () ->
env = {
mode: 'test'
}
uid = 'notexisting'
logs = []
beforeEach () ->
env = {
mode: 'test'
}
coreModule(env).initEnv()
coreModule(env).initConfig()
coreModule(env).initUtilities()
dataModule(env)
if ! env.data.apps.___modcreate
env.data.apps.___modcreate = true
oldCreate = env.data.apps.create
env.data.apps.create = (app, user, cb) ->
callback = (err, data) ->
return cb err if err
env.data.redis.sadd 'u:' + user.id + ':apps', data.id, ->
return cb err if err
cb err, data
oldCreate.apply(env.data.apps, [app, user, callback])
env.debug = () ->
logs.push(arguments)
env.data.generateUid = () ->
return uid
it 'Application creation - env.data.apps.create (success case)', (done) ->
expect(env.data.apps.create).toBeDefined()
uid = '-0b'
env.data.apps.create { name: 'myapp' }, { id: 1 }, (err, app) ->
expect(err).toBe(null)
expect(typeof app).toBe('object')
expect(typeof app.id).toBe('number')
expect(app.name).toBe('myapp')
expect(app.key).toBe('-PI:KEY:<KEY>END_PI')
env.data.redis.mget [
'a:' + app.id + ':name',
'a:' + app.id + ':key',
'a:' + app.id + ':secret',
'a:' + app.id + ':owner',
'a:' + app.id + ':date'
], (err, result) ->
expect(err).toBe(null)
expect(result[0]).toBe('myapp')
expect(result[1]).toBe('-0b')
expect(result[2]).toBe('-0b')
expect(result[3]).toBe('1')
expect(result[4]).toMatch(/^[0-9]+$/)
env.data.redis.hget 'a:keys', '-0b', (err, id) ->
expect(id).toBe(app.id)
done()
it 'Application creation - env.data.apps.create (error cases)', (done) ->
uid = '-0b'
async.series [
(next) ->
env.data.apps.create undefined, { id: 1 }, (err, app) ->
expect(err).toBeDefined()
expect(app).toBeUndefined()
expect(err.message).toBe('You must specify a name and at least one domain for your application.')
next()
(next) ->
env.data.apps.create {name:'myapp'}, undefined, (err, app) ->
expect(err).toBeDefined()
expect(app).toBeUndefined()
expect(err.message).toBe('The user must be defined and contain the field \'id\'')
next()
(next) ->
env.data.apps.create {name: undefined}, { id: 1 }, (err, app) ->
expect(err).toBeDefined()
expect(app).toBeUndefined()
expect(err.message).toBe('You must specify a name and at least one domain for your application.')
next()
(next) ->
env.data.apps.create {name:'myapp'}, {id: undefined}, (err, app) ->
expect(err).toBeDefined()
expect(app).toBeUndefined()
expect(err.message).toBe('The user must be defined and contain the field \'id\'')
next()
], (err) ->
done()
it 'Application retrieval by owner - env.data.apps.getByOwner (success case)', (done) ->
uid = '-1a'
env.data.apps.create {name:'myapp'}, { id: 5 }, (err, app) ->
expect(err).toBeNull()
env.data.apps.getByOwner 5, (err, apps) ->
expect(err).toBeNull()
app = apps[0]
expect(typeof app).toBe('object')
expect(app.name).toBe('myapp')
expect(app.key).toBe('-PI:KEY:<KEY>END_PI')
expect(app.secret).toBe('-PI:KEY:<KEY>END_PI')
expect(app.owner).toBe(5)
done()
it 'Application retrieval by owner - env.data.apps.getByOwner (error cases)', (done) ->
uid = '-1a'
env.data.apps.create {name:'myapp'}, { id: 6 }, (err, app) ->
expect(err).toBeNull()
env.data.apps.getByOwner 6, (err, apps) ->
expect(apps.length).toBe(1)
done()
it 'Application retrieval by id - env.data.apps.getById', (done) ->
uid = '-2a'
env.data.apps.create {name:'myapp'}, { id: 1 }, (err, app) ->
expect(err).toBeNull()
env.data.apps.getById app.id, (err, app2) ->
expect(err).toBeNull()
expect(typeof app2).toBe('object')
expect(app2.name).toBe('myapp')
expect(app2.owner).toBe('1')
expect(app2.id).toBe(app.id)
expect(app2.key).toBe('-PI:KEY:<KEY>END_PI')
expect(app2.secret).toBe('-PI:KEY:<KEY>END_PI')
done()
it 'Application retrieval by key - env.data.apps.get (success case)', (done) ->
uid = 'qwertyuiop1234567890asd'
env.data.apps.create {name:'myapp'}, { id: 10 }, (err, app) ->
expect(err).toBeNull()
env.data.apps.get app.key, (err, app2) ->
expect(err).toBeNull()
expect(typeof app2).toBe('object')
expect(app2.name).toBe('myapp')
expect(app2.owner).toBe('10')
expect(app2.id).toBe(app.id)
expect(app2.key).toBe(uid)
expect(app2.secret).toBe(uid)
done()
it 'Application key reset - env.data.apps.update (success case)', (done) ->
uid = 'yahouyahouyahouyahouyahou'
env.data.apps.create {name:'myapp'}, { id: 12 }, (err, app) ->
expect(err).toBeNull()
env.data.apps.update app.key, { name: 'anothername' }, (err) ->
expect(err).toBeUndefined()
env.data.redis.get 'a:' + app.id + ':name', (err, name) ->
expect(name).toBe('anothername')
done()
uid = '2ahouyahouyahouyahouyahou'
env.data.apps.create {name:'myapp'}, { id: 12 }, (err, app) ->
expect(err).toBeNull()
env.data.apps.update app.key, { domains: ['somedomain'] }, (err) ->
expect(err).toBeUndefined()
env.data.redis.smembers 'a:' + app.id + ':domains', (err, domains) ->
expect(domains[0]).toBe('somedomain')
done()
it 'Application key reset - env.data.apps.update (error cases)', (done) ->
# existing app with undefined
uid = '3ahouyahouyahouyahouyahou'
env.data.apps.create {name:'myapp'}, { id: 12 }, (err, app) ->
env.data.apps.update app.key, undefined, (err) ->
expect(err).toBeDefined()
expect(err.message).toBe('Bad parameters format')
done()
# unexisting app
uid = '4ahouyahouyahouyahouyahou'
env.data.apps.update uid, {name: 'hey'}, (err) ->
expect(err).toBeDefined()
expect(err.message).toBe('Unknown key')
done()
it 'Application key reset - env.data.apps.resetKey', (done) ->
uid = '5testestestestestesteste'
env.data.apps.create {name:'myapp'}, { id: 12 }, (err, app) ->
uid = 'newkeynewkeynewkeynewkey'
env.data.apps.resetKey app.key, (err, result) ->
expect(result.key).toBe(uid)
expect(result.secret).toBe(uid)
done()
it 'Application removal - env.data.apps.remove (success case)', (done) ->
uid = 'applicationremovaltesttes'
env.data.apps.create {name:'myapp'}, { id: 12 }, (err, app) ->
env.data.apps.remove app.key, (err) ->
expect(err).toBeUndefined()
env.data.redis.keys 'a:' + app.id + '*', (err, keys) ->
expect(keys.length).toBe(0)
env.data.redis.hget 'a:keys', app.key, (err, id) ->
expect(err).toBe(null)
expect(id).toBe(null)
done()
it 'Application removal - env.data.apps.remove (error cases)', (done) ->
uid = 'inexistingapplicationtest'
env.data.apps.remove uid, (err) ->
expect(err).toBeDefined()
expect(err.message).toBe('Unknown key')
done()
it 'Application domain update - env.data.apps.updateDomains (success case)', (done) ->
uid = 'appdomainupdatetestestest'
env.data.apps.create {name:'myapp'}, { id: 12 }, (err, app) ->
env.data.apps.updateDomains app.key, ['domain1', 'domain2'], (err) ->
expect(err).toBeUndefined()
env.data.redis.smembers 'a:' + app.id + ':domains', (err, domains) ->
expect(err).toBeNull()
expect(domains.length).toBe(2)
expect(domains[0]).toBe('domain1')
expect(domains[1]).toBe('domain2')
done()
it 'Application domain update - env.data.apps.updateDomains (error cases)', (done) ->
async.series [
(next) ->
# unknown key
uid = 'inexistingapplicationtest'
env.data.apps.updateDomains uid, ['domain1', 'domain2'], (err) ->
expect(err).toBeDefined()
expect(err.message).toBe('Unknown key')
next()
(next) ->
# wrong argument type
uid = 'appdomainupdateerrorstest'
env.data.apps.create {name:'myapp'}, { id: 12 }, (err, app) ->
env.data.apps.updateDomains uid, undefined, (err) ->
expect(err).toBeDefined()
expect(err.message).toBe('Bad parameters format')
next()
], () ->
done()
it 'Application domain add - env.data.apps.addDomain (success case)', (done) ->
uid = 'appdomainaddadderrorstest'
env.data.apps.create {name:'myapps'}, {id: 12}, (err, app) ->
env.data.apps.addDomain app.key, 'somedomain', (err) ->
expect(err).toBeUndefined()
env.data.redis.smembers 'a:' + app.id + ':domains', (err, domains) ->
expect(err).toBeNull()
expect(domains.length).toBe(1)
expect(domains[0]).toBe('somedomain')
done()
it 'Application domain add - env.data.apps.addDomain (error cases)', (done) ->
async.series [
(next) ->
# unknown key
uid = 'inexistingapplicationtest'
env.data.apps.addDomain uid, 'domain1', (err) ->
expect(err).toBeDefined()
expect(err.message).toBe('Unknown key')
next()
(next) ->
# wrong argument type
uid = 'appdomainupdateerrorstest'
env.data.apps.create {name:'myapp'}, { id: 12 }, (err, app) ->
env.data.apps.addDomain uid, undefined, (err) ->
expect(err).toBeDefined()
expect(err.message).toBe('Bad parameters format')
next()
], () ->
done()
it 'Application domain retrieval - env.data.apps.getDomains (success case)', (done) ->
uid = 'appdomainretrievaltestestte'
env.data.apps.create {name: 'myapp'}, {id: 12}, (err, app) ->
env.data.apps.updateDomains app.key, ['domain1', 'domain2'], (err) ->
env.data.apps.getDomains app.key, (err, domains) ->
expect(err).toBeNull()
expect(domains.length).toBe(2)
expect(domains[0]).toBe('domain1')
expect(domains[1]).toBe('domain2')
done()
it 'Application domain retrieval - env.data.apps.getDomains (error cases)', (done) ->
uid = 'inexistingapplicationtest'
env.data.apps.getDomains uid, (err, domains) ->
expect(err).not.toBeNull()
expect(err.message).toBe('Unknown key')
done()
it 'Application domain removal - env.data.apps.remDomain (success case)', (done) ->
uid = 'appremovaltestesttestestte'
env.data.apps.create {name: 'myapp', domains: ['hello', 'world']}, {id: 12}, (err, app) ->
env.data.apps.remDomain app.key, 'hello', (err) ->
expect(err).toBeUndefined()
env.data.redis.smembers 'a:' + app.id + ':domains', (err, domains) ->
expect(err).toBeNull()
expect(domains.length).toBe(1)
expect(domains[0]).toBe('world')
done()
it 'Application domain removal - env.data.apps.remDomain (error cases)', (done) ->
async.series [
(next) ->
# unknown key
uid = 'inexistingapplicationtest'
env.data.apps.remDomain uid, 'domain1', (err) ->
expect(err).toBeDefined()
expect(err.message).toBe('Unknown key')
next()
(next) ->
# wrong argument type
uid = 'appremovalestesttestestte'
env.data.apps.create {name: 'myapp', domains: ['hello', 'world']}, {id: 12}, (err, app) ->
env.data.apps.remDomain app.key, 'hoPI:KEY:<KEY>END_PIho', (err) ->
expect(err).toBeDefined()
expect(err.message).toBe('Invalid format')
expect(err.body?.domain).toBe('hohoho is already non-valid')
done()
], () ->
done()
it 'Application backend set - env.data.apps.setBackend (success case)', (done) ->
uid = 'appbackendsettestesttes'
env.data.apps.create {name: 'myapp'}, {id: 13}, (err, app) ->
env.data.apps.setBackend app.key, 'backend', {somekey: 'somevalue'}, (err) ->
expect(err).toBeUndefined()
env.data.redis.get 'a:' + app.id + ':backend:name', (err, name) ->
expect(err).toBeNull()
expect(name).toBe('backend')
env.data.redis.get 'a:' + app.id + ':backend:value', (err, value) ->
expect(err).toBeNull()
try
value = JSON.parse(value)
catch error
expect(error).toBeUndefined()
finally
expect(typeof value).toBe('object')
expect(value.somekey).toBe('somevalue')
done()
it 'Application backend retrieval - env.data.apps.getBackend (success case)', (done) ->
uid = 'appbackendgettestesttes'
env.data.apps.create {name: 'myapp'}, {id: 13}, (err, app) ->
env.data.apps.setBackend app.key, 'backend', {somekey: 'somevalue'}, (err) ->
env.data.apps.getBackend app.key, (err, backend) ->
expect(err).toBeNull()
expect(typeof backend).toBe('object')
expect(backend.name).toBe('backend')
expect(typeof backend.value).toBe('object')
expect(backend.value.somekey).toBe('somevalue')
done()
it 'Application backend removal - env.data.apps.remBackend (success case)', (done) ->
uid = 'appbackendrmttestesttes'
env.data.apps.create {name: 'myapp'}, {id: 13}, (err, app) ->
env.data.apps.setBackend app.key, 'backend', {somekey: 'somevalue'}, (err) ->
env.data.apps.remBackend app.key, (err) ->
expect(err).toBeUndefined()
env.data.redis.mget ['a:' + app.id + ':backend:name', 'a:' + app.id + ':backend:value'], (err, result) ->
expect(err).toBeNull()
expect(result[0]).toBeNull()
expect(result[1]).toBeNull()
done()
it 'Application keyset add - env.data.apps.addKeyset (success case)', (done) ->
uid = 'appkeysetaddttestesttes'
env.data.apps.create {name: 'myapp'}, {id: 13}, (err, app) ->
env.data.apps.addKeyset app.key, 'someprovider', { parameters: { hello: 'world' } }, (err) ->
expect(err).toBeUndefined()
env.data.redis.get 'a:' + app.id + ':k:someprovider', (err, data) ->
expect(err).toBeNull()
try
keyset = JSON.parse data
catch error
expect(error).toBeUndefined()
finally
expect(keyset.hello).toBe('world')
done()
it 'Application keysets retrieval - env.data.apps.getKeysets (success case)', (done) ->
uid = 'appkeysetsgetttestesttes'
env.data.apps.create {name: 'myapp'}, {id: 13}, (err, app) ->
env.data.apps.addKeyset app.key, 'someprovider', { parameters: { hello: 'world' } }, (err) ->
expect(err).toBeUndefined()
env.data.apps.getKeysets app.key, (err, keysets) ->
expect(err).toBeNull()
expect(keysets.length).toBe(1)
expect(keysets[0]).toBe('someprovider')
done()
xit 'Application keyset removal - env.data.apps.remKeyset', (done) ->
done()
xit 'Application keyset retrieval with response type - env.data.apps.getKeysetWithResponseType', (done) ->
done()
it 'Application keyset retrieval with right response_types - env.data.apps.getKeysets (success case)', (done) ->
async.series [
(next) ->
uid = 'appkeysetgettttestesttes'
env.data.apps.create {name: 'myapp'}, {id: 13}, (err, app) ->
env.data.apps.addKeyset app.key, 'someprovider', { parameters: { hello: 'world' } }, (err) ->
expect(err).toBeUndefined()
env.data.apps.getKeyset app.key, 'someprovider', (err, keyset) ->
expect(err).toBeNull()
expect(keyset.parameters).toBeDefined()
expect(keyset.parameters.hello).toBe('world')
expect(keyset.response_type).toBe('both')
next()
(next) ->
uid = 'appkeysetget2ttestesttes'
env.data.apps.create {name: 'myapp'}, {id: 13}, (err, app) ->
env.data.apps.setBackend app.key, 'php', {}, (err) ->
env.data.apps.addKeyset app.key, 'someprovider', { parameters: { hello: 'world' } }, (err) ->
expect(err).toBeUndefined()
env.data.apps.getKeyset app.key, 'someprovider', (err, keyset) ->
expect(err).toBeNull()
expect(keyset.parameters).toBeDefined()
expect(keyset.parameters.hello).toBe('world')
expect(keyset.response_type).toBe('code')
next()
(next) ->
uid = 'appkeysetget3ttestesttes'
env.data.apps.create {name: 'myapp'}, {id: 13}, (err, app) ->
env.data.apps.setBackend app.key, 'php', { client_side: true }, (err) ->
env.data.apps.addKeyset app.key, 'someprovider', { parameters: { hello: 'world' } }, (err) ->
expect(err).toBeUndefined()
env.data.apps.getKeyset app.key, 'someprovider', (err, keyset) ->
expect(err).toBeNull()
expect(keyset.parameters).toBeDefined()
expect(keyset.parameters.hello).toBe('world')
expect(keyset.response_type).toBe('both')
next()
], () ->
done()
xit 'Application domain verification - env.data.apps.checkDomain', (done) ->
done()
it 'Application owner retrieval - env.data.apps.getOwner', (done) ->
uid = 'appownerretrievaltestest'
env.data.apps.create {name: 'myapp'}, {id: 54}, (err, app) ->
env.data.apps.getOwner app.key, (err, user) ->
expect(err).toBeNull()
expect(typeof user).toBe('object')
expect(user.id).toBeDefined()
expect(user.id).toBe(54)
done()
it 'Application secret check - env.data.apps.checkSecret (success case)', (done) ->
uid = 'appsecretchecktestesteste'
env.data.apps.create {name: 'myapp'}, { id: 55 }, (err, app) ->
env.data.apps.checkSecret app.key, uid, (err, bool) ->
expect(err).toBeNull()
expect(bool).toBe(true)
done()
it 'Application secret check - env.data.apps.checkSecret (error cases)', (done) ->
uid = 'appsecretchecktestesteste'
uid2 = 'appsecretchecktestesseste'
env.data.apps.create {name: 'myapp'}, { id: 55 }, (err, app) ->
env.data.apps.checkSecret app.key, uid2, (err, bool) ->
expect(err).toBeNull()
expect(bool).toBe(false)
done()
|
[
{
"context": ".tokenizeLine(\"@name '@name' @name's @name. @name, (@name) [@name]\")\n expect(tokens[0]).toEqual value: \"",
"end": 636,
"score": 0.8746283054351807,
"start": 630,
"tag": "USERNAME",
"value": "(@name"
},
{
"context": "]\n\n {tokens} = grammar.tokenizeLine(\"a username @1337_hubot with numbers, letters and underscores\")\n expec",
"end": 3746,
"score": 0.9832702279090881,
"start": 3735,
"tag": "USERNAME",
"value": "@1337_hubot"
},
{
"context": "]\n\n {tokens} = grammar.tokenizeLine(\"a username @1337-hubot with numbers, letters and hyphens\")\n expect(to",
"end": 4210,
"score": 0.9847206473350525,
"start": 4199,
"tag": "USERNAME",
"value": "@1337-hubot"
},
{
"context": " {tokens} = grammar.tokenizeLine(\"any email like you@domain.com shouldn't mistakenly be matched as a mention\")\n ",
"end": 5009,
"score": 0.9998754262924194,
"start": 4995,
"tag": "EMAIL",
"value": "you@domain.com"
},
{
"context": " expect(tokens[0]).toEqual value: \"any email like you@domain.com shouldn't mistakenly be matched as a mention\", sc",
"end": 5124,
"score": 0.9998804330825806,
"start": 5110,
"tag": "EMAIL",
"value": "you@domain.com"
},
{
"context": "\"source.gfm\"]\n\n {tokens} = grammar.tokenizeLine(\"@person's\")\n expect(tokens[0]).toEqual value: \"@\", sco",
"end": 5240,
"score": 0.894988477230072,
"start": 5233,
"tag": "USERNAME",
"value": "@person"
}
] | spec/pfm-new-spec.coffee | andraus/language-alt-pfm | 1 | describe "Pandoc Flavored Markdown grammar", ->
grammar = null
beforeEach ->
waitsForPromise ->
atom.packages.activatePackage("language-alt-pfm")
runs ->
grammar = atom.grammars.grammarForScopeName("source.gfm")
it "parses the grammar", ->
expect(grammar).toBeDefined()
expect(grammar.scopeName).toBe "source.gfm"
it "tokenizes mentions", ->
{tokens} = grammar.tokenizeLine("sentence with no space before@name ")
expect(tokens[0]).toEqual value: "sentence with no space before@name ", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("@name '@name' @name's @name. @name, (@name) [@name]")
expect(tokens[0]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"]
expect(tokens[1]).toEqual value: "name", scopes: ["source.gfm", "string.username.gfm"]
expect(tokens[2]).toEqual value: " '", scopes: ["source.gfm"]
expect(tokens[3]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"]
expect(tokens[4]).toEqual value: "name", scopes: ["source.gfm", "string.username.gfm"]
expect(tokens[5]).toEqual value: "' ", scopes: ["source.gfm"]
expect(tokens[6]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"]
expect(tokens[7]).toEqual value: "name", scopes: ["source.gfm", "string.username.gfm"]
expect(tokens[8]).toEqual value: "'s ", scopes: ["source.gfm"]
expect(tokens[9]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"]
expect(tokens[10]).toEqual value: "name", scopes: ["source.gfm", "string.username.gfm"]
expect(tokens[11]).toEqual value: ". ", scopes: ["source.gfm"]
expect(tokens[12]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"]
expect(tokens[13]).toEqual value: "name", scopes: ["source.gfm", "string.username.gfm"]
expect(tokens[14]).toEqual value: ", (", scopes: ["source.gfm"]
expect(tokens[15]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"]
expect(tokens[16]).toEqual value: "name", scopes: ["source.gfm", "string.username.gfm"]
expect(tokens[17]).toEqual value: ") ", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine('"@name"')
expect(tokens[0]).toEqual value: '"', scopes: ["source.gfm"]
expect(tokens[1]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"]
expect(tokens[2]).toEqual value: "name", scopes: ["source.gfm", "string.username.gfm"]
expect(tokens[3]).toEqual value: '"', scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("sentence with a space before @name/ and an invalid symbol after")
expect(tokens[0]).toEqual value: "sentence with a space before @name/ and an invalid symbol after", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("sentence with a space before @name that continues")
expect(tokens[0]).toEqual value: "sentence with a space before ", scopes: ["source.gfm"]
expect(tokens[1]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"]
expect(tokens[2]).toEqual value: "name", scopes: ["source.gfm", "string.username.gfm"]
expect(tokens[3]).toEqual value: " that continues", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("* @name at the start of an unordered list")
expect(tokens[0]).toEqual value: "*", scopes: ["source.gfm", "variable.unordered.list.gfm"]
expect(tokens[1]).toEqual value: " ", scopes: ["source.gfm"]
expect(tokens[2]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"]
expect(tokens[3]).toEqual value: "name", scopes: ["source.gfm", "string.username.gfm"]
expect(tokens[4]).toEqual value: " at the start of an unordered list", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("a username @1337_hubot with numbers, letters and underscores")
expect(tokens[0]).toEqual value: "a username ", scopes: ["source.gfm"]
expect(tokens[1]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"]
expect(tokens[2]).toEqual value: "1337_hubot", scopes: ["source.gfm", "string.username.gfm"]
expect(tokens[3]).toEqual value: " with numbers, letters and underscores", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("a username @1337-hubot with numbers, letters and hyphens")
expect(tokens[0]).toEqual value: "a username ", scopes: ["source.gfm"]
expect(tokens[1]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"]
expect(tokens[2]).toEqual value: "1337-hubot", scopes: ["source.gfm", "string.username.gfm"]
expect(tokens[3]).toEqual value: " with numbers, letters and hyphens", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("@name at the start of a line")
expect(tokens[0]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"]
expect(tokens[1]).toEqual value: "name", scopes: ["source.gfm", "string.username.gfm"]
expect(tokens[2]).toEqual value: " at the start of a line", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("any email like you@domain.com shouldn't mistakenly be matched as a mention")
expect(tokens[0]).toEqual value: "any email like you@domain.com shouldn't mistakenly be matched as a mention", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("@person's")
expect(tokens[0]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"]
expect(tokens[1]).toEqual value: "person", scopes: ["source.gfm", "string.username.gfm"]
expect(tokens[2]).toEqual value: "'s", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("@person;")
expect(tokens[0]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"]
expect(tokens[1]).toEqual value: "person", scopes: ["source.gfm", "string.username.gfm"]
expect(tokens[2]).toEqual value: ";", scopes: ["source.gfm"]
| 73297 | describe "Pandoc Flavored Markdown grammar", ->
grammar = null
beforeEach ->
waitsForPromise ->
atom.packages.activatePackage("language-alt-pfm")
runs ->
grammar = atom.grammars.grammarForScopeName("source.gfm")
it "parses the grammar", ->
expect(grammar).toBeDefined()
expect(grammar.scopeName).toBe "source.gfm"
it "tokenizes mentions", ->
{tokens} = grammar.tokenizeLine("sentence with no space before@name ")
expect(tokens[0]).toEqual value: "sentence with no space before@name ", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("@name '@name' @name's @name. @name, (@name) [@name]")
expect(tokens[0]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"]
expect(tokens[1]).toEqual value: "name", scopes: ["source.gfm", "string.username.gfm"]
expect(tokens[2]).toEqual value: " '", scopes: ["source.gfm"]
expect(tokens[3]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"]
expect(tokens[4]).toEqual value: "name", scopes: ["source.gfm", "string.username.gfm"]
expect(tokens[5]).toEqual value: "' ", scopes: ["source.gfm"]
expect(tokens[6]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"]
expect(tokens[7]).toEqual value: "name", scopes: ["source.gfm", "string.username.gfm"]
expect(tokens[8]).toEqual value: "'s ", scopes: ["source.gfm"]
expect(tokens[9]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"]
expect(tokens[10]).toEqual value: "name", scopes: ["source.gfm", "string.username.gfm"]
expect(tokens[11]).toEqual value: ". ", scopes: ["source.gfm"]
expect(tokens[12]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"]
expect(tokens[13]).toEqual value: "name", scopes: ["source.gfm", "string.username.gfm"]
expect(tokens[14]).toEqual value: ", (", scopes: ["source.gfm"]
expect(tokens[15]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"]
expect(tokens[16]).toEqual value: "name", scopes: ["source.gfm", "string.username.gfm"]
expect(tokens[17]).toEqual value: ") ", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine('"@name"')
expect(tokens[0]).toEqual value: '"', scopes: ["source.gfm"]
expect(tokens[1]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"]
expect(tokens[2]).toEqual value: "name", scopes: ["source.gfm", "string.username.gfm"]
expect(tokens[3]).toEqual value: '"', scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("sentence with a space before @name/ and an invalid symbol after")
expect(tokens[0]).toEqual value: "sentence with a space before @name/ and an invalid symbol after", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("sentence with a space before @name that continues")
expect(tokens[0]).toEqual value: "sentence with a space before ", scopes: ["source.gfm"]
expect(tokens[1]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"]
expect(tokens[2]).toEqual value: "name", scopes: ["source.gfm", "string.username.gfm"]
expect(tokens[3]).toEqual value: " that continues", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("* @name at the start of an unordered list")
expect(tokens[0]).toEqual value: "*", scopes: ["source.gfm", "variable.unordered.list.gfm"]
expect(tokens[1]).toEqual value: " ", scopes: ["source.gfm"]
expect(tokens[2]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"]
expect(tokens[3]).toEqual value: "name", scopes: ["source.gfm", "string.username.gfm"]
expect(tokens[4]).toEqual value: " at the start of an unordered list", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("a username @1337_hubot with numbers, letters and underscores")
expect(tokens[0]).toEqual value: "a username ", scopes: ["source.gfm"]
expect(tokens[1]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"]
expect(tokens[2]).toEqual value: "1337_hubot", scopes: ["source.gfm", "string.username.gfm"]
expect(tokens[3]).toEqual value: " with numbers, letters and underscores", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("a username @1337-hubot with numbers, letters and hyphens")
expect(tokens[0]).toEqual value: "a username ", scopes: ["source.gfm"]
expect(tokens[1]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"]
expect(tokens[2]).toEqual value: "1337-hubot", scopes: ["source.gfm", "string.username.gfm"]
expect(tokens[3]).toEqual value: " with numbers, letters and hyphens", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("@name at the start of a line")
expect(tokens[0]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"]
expect(tokens[1]).toEqual value: "name", scopes: ["source.gfm", "string.username.gfm"]
expect(tokens[2]).toEqual value: " at the start of a line", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("any email like <EMAIL> shouldn't mistakenly be matched as a mention")
expect(tokens[0]).toEqual value: "any email like <EMAIL> shouldn't mistakenly be matched as a mention", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("@person's")
expect(tokens[0]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"]
expect(tokens[1]).toEqual value: "person", scopes: ["source.gfm", "string.username.gfm"]
expect(tokens[2]).toEqual value: "'s", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("@person;")
expect(tokens[0]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"]
expect(tokens[1]).toEqual value: "person", scopes: ["source.gfm", "string.username.gfm"]
expect(tokens[2]).toEqual value: ";", scopes: ["source.gfm"]
| true | describe "Pandoc Flavored Markdown grammar", ->
grammar = null
beforeEach ->
waitsForPromise ->
atom.packages.activatePackage("language-alt-pfm")
runs ->
grammar = atom.grammars.grammarForScopeName("source.gfm")
it "parses the grammar", ->
expect(grammar).toBeDefined()
expect(grammar.scopeName).toBe "source.gfm"
it "tokenizes mentions", ->
{tokens} = grammar.tokenizeLine("sentence with no space before@name ")
expect(tokens[0]).toEqual value: "sentence with no space before@name ", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("@name '@name' @name's @name. @name, (@name) [@name]")
expect(tokens[0]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"]
expect(tokens[1]).toEqual value: "name", scopes: ["source.gfm", "string.username.gfm"]
expect(tokens[2]).toEqual value: " '", scopes: ["source.gfm"]
expect(tokens[3]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"]
expect(tokens[4]).toEqual value: "name", scopes: ["source.gfm", "string.username.gfm"]
expect(tokens[5]).toEqual value: "' ", scopes: ["source.gfm"]
expect(tokens[6]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"]
expect(tokens[7]).toEqual value: "name", scopes: ["source.gfm", "string.username.gfm"]
expect(tokens[8]).toEqual value: "'s ", scopes: ["source.gfm"]
expect(tokens[9]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"]
expect(tokens[10]).toEqual value: "name", scopes: ["source.gfm", "string.username.gfm"]
expect(tokens[11]).toEqual value: ". ", scopes: ["source.gfm"]
expect(tokens[12]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"]
expect(tokens[13]).toEqual value: "name", scopes: ["source.gfm", "string.username.gfm"]
expect(tokens[14]).toEqual value: ", (", scopes: ["source.gfm"]
expect(tokens[15]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"]
expect(tokens[16]).toEqual value: "name", scopes: ["source.gfm", "string.username.gfm"]
expect(tokens[17]).toEqual value: ") ", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine('"@name"')
expect(tokens[0]).toEqual value: '"', scopes: ["source.gfm"]
expect(tokens[1]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"]
expect(tokens[2]).toEqual value: "name", scopes: ["source.gfm", "string.username.gfm"]
expect(tokens[3]).toEqual value: '"', scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("sentence with a space before @name/ and an invalid symbol after")
expect(tokens[0]).toEqual value: "sentence with a space before @name/ and an invalid symbol after", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("sentence with a space before @name that continues")
expect(tokens[0]).toEqual value: "sentence with a space before ", scopes: ["source.gfm"]
expect(tokens[1]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"]
expect(tokens[2]).toEqual value: "name", scopes: ["source.gfm", "string.username.gfm"]
expect(tokens[3]).toEqual value: " that continues", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("* @name at the start of an unordered list")
expect(tokens[0]).toEqual value: "*", scopes: ["source.gfm", "variable.unordered.list.gfm"]
expect(tokens[1]).toEqual value: " ", scopes: ["source.gfm"]
expect(tokens[2]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"]
expect(tokens[3]).toEqual value: "name", scopes: ["source.gfm", "string.username.gfm"]
expect(tokens[4]).toEqual value: " at the start of an unordered list", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("a username @1337_hubot with numbers, letters and underscores")
expect(tokens[0]).toEqual value: "a username ", scopes: ["source.gfm"]
expect(tokens[1]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"]
expect(tokens[2]).toEqual value: "1337_hubot", scopes: ["source.gfm", "string.username.gfm"]
expect(tokens[3]).toEqual value: " with numbers, letters and underscores", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("a username @1337-hubot with numbers, letters and hyphens")
expect(tokens[0]).toEqual value: "a username ", scopes: ["source.gfm"]
expect(tokens[1]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"]
expect(tokens[2]).toEqual value: "1337-hubot", scopes: ["source.gfm", "string.username.gfm"]
expect(tokens[3]).toEqual value: " with numbers, letters and hyphens", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("@name at the start of a line")
expect(tokens[0]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"]
expect(tokens[1]).toEqual value: "name", scopes: ["source.gfm", "string.username.gfm"]
expect(tokens[2]).toEqual value: " at the start of a line", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("any email like PI:EMAIL:<EMAIL>END_PI shouldn't mistakenly be matched as a mention")
expect(tokens[0]).toEqual value: "any email like PI:EMAIL:<EMAIL>END_PI shouldn't mistakenly be matched as a mention", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("@person's")
expect(tokens[0]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"]
expect(tokens[1]).toEqual value: "person", scopes: ["source.gfm", "string.username.gfm"]
expect(tokens[2]).toEqual value: "'s", scopes: ["source.gfm"]
{tokens} = grammar.tokenizeLine("@person;")
expect(tokens[0]).toEqual value: "@", scopes: ["source.gfm", "variable.mention.gfm"]
expect(tokens[1]).toEqual value: "person", scopes: ["source.gfm", "string.username.gfm"]
expect(tokens[2]).toEqual value: ";", scopes: ["source.gfm"]
|
[
{
"context": "hSemantics\n attributes: [\n name: 'fachgebiet'\n filter: (v)-> f1:v\n ,\n ",
"end": 347,
"score": 0.8879376649856567,
"start": 337,
"tag": "NAME",
"value": "fachgebiet"
},
{
"context": "to the full-text search\", ->\n args = query:{q:\"bienen\"},type:'project'\n expect(semantics.apply args,",
"end": 794,
"score": 0.8181963562965393,
"start": 788,
"tag": "NAME",
"value": "bienen"
},
{
"context": "ing a filter expression\", ->\n args = query:{q:\"bienen\", fachgebiet: \"42,43\"},type:'project'\n query=s",
"end": 1942,
"score": 0.7356646060943604,
"start": 1936,
"tag": "NAME",
"value": "bienen"
}
] | spec/config-types/search-semantics-spec.coffee | lxfrdl/irma | 0 | describe "The Search Semantic", ->
SearchSemantics = require "../../src/config-types/search-semantics"
ConfigNode = require "../../src/config-node"
settings = undefined
semantics = undefined
beforeEach ->
settings =
types:project:
searchSemantics: new SearchSemantics
attributes: [
name: 'fachgebiet'
filter: (v)-> f1:v
,
field: 'q2'
query: true
,
field: 'q3'
boost: 1.5
query: true
,
field: 'q4'
query: true
]
semantics = settings.types.project.searchSemantics
xit "builds a query using the given parser, query semantics and the fields of all attributes that contribute to the full-text search", ->
args = query:{q:"bienen"},type:'project'
expect(semantics.apply args, settings).to.eql
ast: parsed: "bienen"
fields: ['q2','q3','q4']
query: bool:
must:
testQuery:
ast: parsed: "bienen"
opts: fields:
q2:1
q3:1.5
q4:1
filter: []
it "ommits the query part if given an empty query string", ->
args = query:{q:""}, type:'project'
expect(semantics.apply(args,settings)).to.eql
fields: ['q2','q3','q4']
query: bool:
filter: []
it "ommits the query part if given a query string containing only whitespace", ->
args = query:{q:" "},type:'project'
expect(semantics.apply(args, settings)).to.eql
fields: ['q2','q3','q4']
query: bool:
filter: []
it "ommits the query part if given something falsy instead of a query string", ->
args = query:{},type:'project'
expect(semantics.apply(args, settings)).to.eql
fields: ['q2','q3','q4']
query: bool:
filter: []
it "supports filtering by attributes for attributes providing a filter expression", ->
args = query:{q:"bienen", fachgebiet: "42,43"},type:'project'
query=semantics.apply args, settings
expect(query.query.bool.filter).to.eql [
f1:"42,43"
]
| 121294 | describe "The Search Semantic", ->
SearchSemantics = require "../../src/config-types/search-semantics"
ConfigNode = require "../../src/config-node"
settings = undefined
semantics = undefined
beforeEach ->
settings =
types:project:
searchSemantics: new SearchSemantics
attributes: [
name: '<NAME>'
filter: (v)-> f1:v
,
field: 'q2'
query: true
,
field: 'q3'
boost: 1.5
query: true
,
field: 'q4'
query: true
]
semantics = settings.types.project.searchSemantics
xit "builds a query using the given parser, query semantics and the fields of all attributes that contribute to the full-text search", ->
args = query:{q:"<NAME>"},type:'project'
expect(semantics.apply args, settings).to.eql
ast: parsed: "bienen"
fields: ['q2','q3','q4']
query: bool:
must:
testQuery:
ast: parsed: "bienen"
opts: fields:
q2:1
q3:1.5
q4:1
filter: []
it "ommits the query part if given an empty query string", ->
args = query:{q:""}, type:'project'
expect(semantics.apply(args,settings)).to.eql
fields: ['q2','q3','q4']
query: bool:
filter: []
it "ommits the query part if given a query string containing only whitespace", ->
args = query:{q:" "},type:'project'
expect(semantics.apply(args, settings)).to.eql
fields: ['q2','q3','q4']
query: bool:
filter: []
it "ommits the query part if given something falsy instead of a query string", ->
args = query:{},type:'project'
expect(semantics.apply(args, settings)).to.eql
fields: ['q2','q3','q4']
query: bool:
filter: []
it "supports filtering by attributes for attributes providing a filter expression", ->
args = query:{q:"<NAME>", fachgebiet: "42,43"},type:'project'
query=semantics.apply args, settings
expect(query.query.bool.filter).to.eql [
f1:"42,43"
]
| true | describe "The Search Semantic", ->
SearchSemantics = require "../../src/config-types/search-semantics"
ConfigNode = require "../../src/config-node"
settings = undefined
semantics = undefined
beforeEach ->
settings =
types:project:
searchSemantics: new SearchSemantics
attributes: [
name: 'PI:NAME:<NAME>END_PI'
filter: (v)-> f1:v
,
field: 'q2'
query: true
,
field: 'q3'
boost: 1.5
query: true
,
field: 'q4'
query: true
]
semantics = settings.types.project.searchSemantics
xit "builds a query using the given parser, query semantics and the fields of all attributes that contribute to the full-text search", ->
args = query:{q:"PI:NAME:<NAME>END_PI"},type:'project'
expect(semantics.apply args, settings).to.eql
ast: parsed: "bienen"
fields: ['q2','q3','q4']
query: bool:
must:
testQuery:
ast: parsed: "bienen"
opts: fields:
q2:1
q3:1.5
q4:1
filter: []
it "ommits the query part if given an empty query string", ->
args = query:{q:""}, type:'project'
expect(semantics.apply(args,settings)).to.eql
fields: ['q2','q3','q4']
query: bool:
filter: []
it "ommits the query part if given a query string containing only whitespace", ->
args = query:{q:" "},type:'project'
expect(semantics.apply(args, settings)).to.eql
fields: ['q2','q3','q4']
query: bool:
filter: []
it "ommits the query part if given something falsy instead of a query string", ->
args = query:{},type:'project'
expect(semantics.apply(args, settings)).to.eql
fields: ['q2','q3','q4']
query: bool:
filter: []
it "supports filtering by attributes for attributes providing a filter expression", ->
args = query:{q:"PI:NAME:<NAME>END_PI", fachgebiet: "42,43"},type:'project'
query=semantics.apply args, settings
expect(query.query.bool.filter).to.eql [
f1:"42,43"
]
|
[
{
"context": "ple.com/'\n customer:\n firstName: 'Dan'\n )\n\n customerParams =\n customer",
"end": 455,
"score": 0.9997889399528503,
"start": 452,
"tag": "NAME",
"value": "Dan"
},
{
"context": "rParams =\n customer:\n last_name: 'Smith'\n\n specHelper.simulateTrFormPost url, trData",
"end": 534,
"score": 0.9996670484542847,
"start": 529,
"tag": "NAME",
"value": "Smith"
},
{
"context": " assert.equal(response.customer.firstName, 'Dan')\n assert.equal(response.customer.lastNa",
"end": 839,
"score": 0.9998165369033813,
"start": 836,
"tag": "NAME",
"value": "Dan"
},
{
"context": " assert.equal(response.customer.lastName, 'Smith')\n\n done()\n\n it \"can include the cred",
"end": 899,
"score": 0.9996393918991089,
"start": 894,
"tag": "NAME",
"value": "Smith"
},
{
"context": "ple.com/'\n customer:\n firstName: 'Dan'\n creditCard:\n cardholderName",
"end": 1160,
"score": 0.9998447895050049,
"start": 1157,
"tag": "NAME",
"value": "Dan"
},
{
"context": " creditCard:\n cardholderName: 'Cardholder'\n billingAddress:\n street",
"end": 1223,
"score": 0.9937331676483154,
"start": 1213,
"tag": "NAME",
"value": "Cardholder"
},
{
"context": "rParams =\n customer:\n last_name: 'Smith'\n creditCard:\n number: '51051",
"end": 1375,
"score": 0.9996570348739624,
"start": 1370,
"tag": "NAME",
"value": "Smith"
},
{
"context": " assert.equal(response.customer.firstName, 'Dan')\n assert.equal(response.customer.credit",
"end": 1881,
"score": 0.999846875667572,
"start": 1878,
"tag": "NAME",
"value": "Dan"
},
{
"context": "response.customer.creditCards[0].cardholderName, 'Cardholder')\n assert.equal(response.customer.credit",
"end": 1967,
"score": 0.9899364709854126,
"start": 1957,
"tag": "NAME",
"value": "Cardholder"
},
{
"context": " assert.equal(response.customer.lastName, 'Smith')\n assert.equal(response.customer.credit",
"end": 2130,
"score": 0.9996705651283264,
"start": 2125,
"tag": "NAME",
"value": "Smith"
},
{
"context": "ne) ->\n customerParams =\n firstName: 'Old First Name'\n lastName: 'Old Last Name'\n\n specHel",
"end": 2634,
"score": 0.9920536875724792,
"start": 2620,
"tag": "NAME",
"value": "Old First Name"
},
{
"context": " firstName: 'Old First Name'\n lastName: 'Old Last Name'\n\n specHelper.defaultGateway.customer.create",
"end": 2668,
"score": 0.9847033619880676,
"start": 2655,
"tag": "NAME",
"value": "Old Last Name"
},
{
"context": "er.id\n customer:\n firstName: 'New First Name'\n )\n\n updateParams =\n cust",
"end": 2987,
"score": 0.9732102751731873,
"start": 2973,
"tag": "NAME",
"value": "New First Name"
},
{
"context": "rams =\n customer:\n lastName: 'New Last Name'\n\n specHelper.simulateTrFormPost url, trDa",
"end": 3079,
"score": 0.970446765422821,
"start": 3066,
"tag": "NAME",
"value": "New Last Name"
},
{
"context": " assert.equal(response.customer.firstName, 'New First Name')\n assert.equal(response.customer.last",
"end": 3403,
"score": 0.8859319686889648,
"start": 3389,
"tag": "NAME",
"value": "New First Name"
},
{
"context": " assert.equal(response.customer.lastName, 'New Last Name')\n\n done()\n\n describe \"transactionDat",
"end": 3473,
"score": 0.8879504799842834,
"start": 3460,
"tag": "NAME",
"value": "New Last Name"
},
{
"context": "efaultGateway.customer.create firstName: 'Customer First Name', (err, response) ->\n trData = specHelper.",
"end": 4560,
"score": 0.8003658056259155,
"start": 4550,
"tag": "NAME",
"value": "First Name"
},
{
"context": "response.customer.id\n cardholderName: 'Dan'\n )\n\n creditCardParams =\n ",
"end": 4815,
"score": 0.9997835755348206,
"start": 4812,
"tag": "NAME",
"value": "Dan"
},
{
"context": "assert.equal(response.creditCard.cardholderName, 'Dan')\n assert.equal(response.creditCard.ma",
"end": 5276,
"score": 0.999336838722229,
"start": 5273,
"tag": "NAME",
"value": "Dan"
},
{
"context": "ne) ->\n customerParams =\n firstName: 'Customer First Name'\n creditCard:\n cardholderName: 'O",
"end": 5517,
"score": 0.856065571308136,
"start": 5498,
"tag": "NAME",
"value": "Customer First Name"
},
{
"context": "e'\n creditCard:\n cardholderName: 'Old Cardholder Name'\n number: '5105105105105100'\n e",
"end": 5585,
"score": 0.9742658734321594,
"start": 5566,
"tag": "NAME",
"value": "Old Cardholder Name"
},
{
"context": " creditCard:\n cardholderName: 'New Cardholder Name'\n )\n\n creditCardParams =\n ",
"end": 6017,
"score": 0.9485280513763428,
"start": 5998,
"tag": "NAME",
"value": "New Cardholder Name"
},
{
"context": "assert.equal(response.creditCard.cardholderName, 'New Cardholder Name')\n assert.equal(response.creditCard.ma",
"end": 6456,
"score": 0.9901885986328125,
"start": 6437,
"tag": "NAME",
"value": "New Cardholder Name"
}
] | spec/integration/braintree/transparent_redirect_gateway_spec.coffee | StreamCo/braintree_node | 0 | require('../../spec_helper')
{_} = require('underscore')
braintree = specHelper.braintree
describe "TransparentRedirectGateway", ->
url = specHelper.defaultGateway.transparentRedirect.url
describe "createCustomerData", ->
it "generates tr data for the customer", (done) ->
trData = specHelper.defaultGateway.transparentRedirect.createCustomerData(
redirectUrl: 'http://www.example.com/'
customer:
firstName: 'Dan'
)
customerParams =
customer:
last_name: 'Smith'
specHelper.simulateTrFormPost url, trData, customerParams, (err, response) ->
specHelper.defaultGateway.transparentRedirect.confirm response, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.customer.firstName, 'Dan')
assert.equal(response.customer.lastName, 'Smith')
done()
it "can include the credit card and billing address", (done) ->
trData = specHelper.defaultGateway.transparentRedirect.createCustomerData(
redirectUrl: 'http://www.example.com/'
customer:
firstName: 'Dan'
creditCard:
cardholderName: 'Cardholder'
billingAddress:
streetAddress: '123 E Fake St'
)
customerParams =
customer:
last_name: 'Smith'
creditCard:
number: '5105105105105100'
expirationMonth: '05'
expirationYear: '2017'
billingAddress:
extendedAddress: '5th Floor'
specHelper.simulateTrFormPost url, trData, customerParams, (err, response) ->
specHelper.defaultGateway.transparentRedirect.confirm response, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.customer.firstName, 'Dan')
assert.equal(response.customer.creditCards[0].cardholderName, 'Cardholder')
assert.equal(response.customer.creditCards[0].billingAddress.streetAddress, '123 E Fake St')
assert.equal(response.customer.lastName, 'Smith')
assert.equal(response.customer.creditCards[0].maskedNumber, '510510******5100')
assert.equal(response.customer.creditCards[0].expirationMonth, '05')
assert.equal(response.customer.creditCards[0].expirationYear, '2017')
assert.equal(response.customer.creditCards[0].billingAddress.extendedAddress, '5th Floor')
done()
describe "updateCustomerData", ->
it "updates a customer", (done) ->
customerParams =
firstName: 'Old First Name'
lastName: 'Old Last Name'
specHelper.defaultGateway.customer.create customerParams, (err, response) ->
trData = specHelper.defaultGateway.transparentRedirect.updateCustomerData(
redirectUrl: 'http://www.example.com/'
customerId: response.customer.id
customer:
firstName: 'New First Name'
)
updateParams =
customer:
lastName: 'New Last Name'
specHelper.simulateTrFormPost url, trData, updateParams, (err, response) ->
specHelper.defaultGateway.transparentRedirect.confirm response, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.customer.firstName, 'New First Name')
assert.equal(response.customer.lastName, 'New Last Name')
done()
describe "transactionData", ->
it "creates a transaction", (done) ->
trData = specHelper.defaultGateway.transparentRedirect.transactionData(
redirectUrl: 'http://www.example.com/'
transaction:
amount: 50.00
type: 'sale'
)
transactionParams =
transaction:
creditCard:
number: '5105105105105100'
expirationDate: '05/2012'
specHelper.simulateTrFormPost url, trData, transactionParams, (err, response) ->
specHelper.defaultGateway.transparentRedirect.confirm response, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.transaction.status, 'authorized')
assert.equal(response.transaction.amount, '50.00')
assert.equal(response.transaction.creditCard.maskedNumber, '510510******5100')
done()
describe "createCreditCard", ->
it "creates a credit card", (done) ->
specHelper.defaultGateway.customer.create firstName: 'Customer First Name', (err, response) ->
trData = specHelper.defaultGateway.transparentRedirect.createCreditCardData(
redirectUrl: 'http://www.example.com/'
creditCard:
customerId: response.customer.id
cardholderName: 'Dan'
)
creditCardParams =
creditCard:
number: '5105105105105100'
expirationDate: '05/2017'
specHelper.simulateTrFormPost url, trData, creditCardParams, (err, response) ->
specHelper.defaultGateway.transparentRedirect.confirm response, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.creditCard.cardholderName, 'Dan')
assert.equal(response.creditCard.maskedNumber, '510510******5100')
done()
describe "updateCreditCard", ->
it "updates a credit card", (done) ->
customerParams =
firstName: 'Customer First Name'
creditCard:
cardholderName: 'Old Cardholder Name'
number: '5105105105105100'
expirationDate: '05/2017'
specHelper.defaultGateway.customer.create customerParams, (err, response) ->
trData = specHelper.defaultGateway.transparentRedirect.updateCreditCardData(
redirectUrl: 'http://www.example.com/'
paymentMethodToken: response.customer.creditCards[0].token
creditCard:
cardholderName: 'New Cardholder Name'
)
creditCardParams =
creditCard:
number: '4111111111111111'
specHelper.simulateTrFormPost url, trData, creditCardParams, (err, response) ->
specHelper.defaultGateway.transparentRedirect.confirm response, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.creditCard.cardholderName, 'New Cardholder Name')
assert.equal(response.creditCard.maskedNumber, '411111******1111')
done()
describe "confirm", ->
it "handles invalid hashes", (done) ->
specHelper.defaultGateway.transparentRedirect.confirm 'a=b&hash=invalid', (err, response) ->
assert.equal(err.type, braintree.errorTypes.invalidTransparentRedirectHashError)
done()
it "handles status 401", (done) ->
specHelper.defaultGateway.transparentRedirect.confirm 'http_status=401&hash=none', (err, response) ->
assert.equal(err.type, braintree.errorTypes.authenticationError)
done()
it "handles status 403", (done) ->
specHelper.defaultGateway.transparentRedirect.confirm 'http_status=403&hash=irrelevant', (err, response) ->
assert.equal(err.type, braintree.errorTypes.authorizationError)
done()
it "handles status 426", (done) ->
specHelper.defaultGateway.transparentRedirect.confirm 'http_status=426&hash=irrelevant', (err, response) ->
assert.equal(err.type, braintree.errorTypes.upgradeRequired)
done()
it "handles status 500", (done) ->
specHelper.defaultGateway.transparentRedirect.confirm 'http_status=500&hash=irrelevant', (err, response) ->
assert.equal(err.type, braintree.errorTypes.serverError)
done()
it "handles status 503", (done) ->
specHelper.defaultGateway.transparentRedirect.confirm 'http_status=503&hash=irrelevant', (err, response) ->
assert.equal(err.type, braintree.errorTypes.downForMaintenanceError)
done()
| 192369 | require('../../spec_helper')
{_} = require('underscore')
braintree = specHelper.braintree
describe "TransparentRedirectGateway", ->
url = specHelper.defaultGateway.transparentRedirect.url
describe "createCustomerData", ->
it "generates tr data for the customer", (done) ->
trData = specHelper.defaultGateway.transparentRedirect.createCustomerData(
redirectUrl: 'http://www.example.com/'
customer:
firstName: '<NAME>'
)
customerParams =
customer:
last_name: '<NAME>'
specHelper.simulateTrFormPost url, trData, customerParams, (err, response) ->
specHelper.defaultGateway.transparentRedirect.confirm response, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.customer.firstName, '<NAME>')
assert.equal(response.customer.lastName, '<NAME>')
done()
it "can include the credit card and billing address", (done) ->
trData = specHelper.defaultGateway.transparentRedirect.createCustomerData(
redirectUrl: 'http://www.example.com/'
customer:
firstName: '<NAME>'
creditCard:
cardholderName: '<NAME>'
billingAddress:
streetAddress: '123 E Fake St'
)
customerParams =
customer:
last_name: '<NAME>'
creditCard:
number: '5105105105105100'
expirationMonth: '05'
expirationYear: '2017'
billingAddress:
extendedAddress: '5th Floor'
specHelper.simulateTrFormPost url, trData, customerParams, (err, response) ->
specHelper.defaultGateway.transparentRedirect.confirm response, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.customer.firstName, '<NAME>')
assert.equal(response.customer.creditCards[0].cardholderName, '<NAME>')
assert.equal(response.customer.creditCards[0].billingAddress.streetAddress, '123 E Fake St')
assert.equal(response.customer.lastName, '<NAME>')
assert.equal(response.customer.creditCards[0].maskedNumber, '510510******5100')
assert.equal(response.customer.creditCards[0].expirationMonth, '05')
assert.equal(response.customer.creditCards[0].expirationYear, '2017')
assert.equal(response.customer.creditCards[0].billingAddress.extendedAddress, '5th Floor')
done()
describe "updateCustomerData", ->
it "updates a customer", (done) ->
customerParams =
firstName: '<NAME>'
lastName: '<NAME>'
specHelper.defaultGateway.customer.create customerParams, (err, response) ->
trData = specHelper.defaultGateway.transparentRedirect.updateCustomerData(
redirectUrl: 'http://www.example.com/'
customerId: response.customer.id
customer:
firstName: '<NAME>'
)
updateParams =
customer:
lastName: '<NAME>'
specHelper.simulateTrFormPost url, trData, updateParams, (err, response) ->
specHelper.defaultGateway.transparentRedirect.confirm response, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.customer.firstName, '<NAME>')
assert.equal(response.customer.lastName, '<NAME>')
done()
describe "transactionData", ->
it "creates a transaction", (done) ->
trData = specHelper.defaultGateway.transparentRedirect.transactionData(
redirectUrl: 'http://www.example.com/'
transaction:
amount: 50.00
type: 'sale'
)
transactionParams =
transaction:
creditCard:
number: '5105105105105100'
expirationDate: '05/2012'
specHelper.simulateTrFormPost url, trData, transactionParams, (err, response) ->
specHelper.defaultGateway.transparentRedirect.confirm response, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.transaction.status, 'authorized')
assert.equal(response.transaction.amount, '50.00')
assert.equal(response.transaction.creditCard.maskedNumber, '510510******5100')
done()
describe "createCreditCard", ->
it "creates a credit card", (done) ->
specHelper.defaultGateway.customer.create firstName: 'Customer <NAME>', (err, response) ->
trData = specHelper.defaultGateway.transparentRedirect.createCreditCardData(
redirectUrl: 'http://www.example.com/'
creditCard:
customerId: response.customer.id
cardholderName: '<NAME>'
)
creditCardParams =
creditCard:
number: '5105105105105100'
expirationDate: '05/2017'
specHelper.simulateTrFormPost url, trData, creditCardParams, (err, response) ->
specHelper.defaultGateway.transparentRedirect.confirm response, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.creditCard.cardholderName, '<NAME>')
assert.equal(response.creditCard.maskedNumber, '510510******5100')
done()
describe "updateCreditCard", ->
it "updates a credit card", (done) ->
customerParams =
firstName: '<NAME>'
creditCard:
cardholderName: '<NAME>'
number: '5105105105105100'
expirationDate: '05/2017'
specHelper.defaultGateway.customer.create customerParams, (err, response) ->
trData = specHelper.defaultGateway.transparentRedirect.updateCreditCardData(
redirectUrl: 'http://www.example.com/'
paymentMethodToken: response.customer.creditCards[0].token
creditCard:
cardholderName: '<NAME>'
)
creditCardParams =
creditCard:
number: '4111111111111111'
specHelper.simulateTrFormPost url, trData, creditCardParams, (err, response) ->
specHelper.defaultGateway.transparentRedirect.confirm response, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.creditCard.cardholderName, '<NAME>')
assert.equal(response.creditCard.maskedNumber, '411111******1111')
done()
describe "confirm", ->
it "handles invalid hashes", (done) ->
specHelper.defaultGateway.transparentRedirect.confirm 'a=b&hash=invalid', (err, response) ->
assert.equal(err.type, braintree.errorTypes.invalidTransparentRedirectHashError)
done()
it "handles status 401", (done) ->
specHelper.defaultGateway.transparentRedirect.confirm 'http_status=401&hash=none', (err, response) ->
assert.equal(err.type, braintree.errorTypes.authenticationError)
done()
it "handles status 403", (done) ->
specHelper.defaultGateway.transparentRedirect.confirm 'http_status=403&hash=irrelevant', (err, response) ->
assert.equal(err.type, braintree.errorTypes.authorizationError)
done()
it "handles status 426", (done) ->
specHelper.defaultGateway.transparentRedirect.confirm 'http_status=426&hash=irrelevant', (err, response) ->
assert.equal(err.type, braintree.errorTypes.upgradeRequired)
done()
it "handles status 500", (done) ->
specHelper.defaultGateway.transparentRedirect.confirm 'http_status=500&hash=irrelevant', (err, response) ->
assert.equal(err.type, braintree.errorTypes.serverError)
done()
it "handles status 503", (done) ->
specHelper.defaultGateway.transparentRedirect.confirm 'http_status=503&hash=irrelevant', (err, response) ->
assert.equal(err.type, braintree.errorTypes.downForMaintenanceError)
done()
| true | require('../../spec_helper')
{_} = require('underscore')
braintree = specHelper.braintree
describe "TransparentRedirectGateway", ->
url = specHelper.defaultGateway.transparentRedirect.url
describe "createCustomerData", ->
it "generates tr data for the customer", (done) ->
trData = specHelper.defaultGateway.transparentRedirect.createCustomerData(
redirectUrl: 'http://www.example.com/'
customer:
firstName: 'PI:NAME:<NAME>END_PI'
)
customerParams =
customer:
last_name: 'PI:NAME:<NAME>END_PI'
specHelper.simulateTrFormPost url, trData, customerParams, (err, response) ->
specHelper.defaultGateway.transparentRedirect.confirm response, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.customer.firstName, 'PI:NAME:<NAME>END_PI')
assert.equal(response.customer.lastName, 'PI:NAME:<NAME>END_PI')
done()
it "can include the credit card and billing address", (done) ->
trData = specHelper.defaultGateway.transparentRedirect.createCustomerData(
redirectUrl: 'http://www.example.com/'
customer:
firstName: 'PI:NAME:<NAME>END_PI'
creditCard:
cardholderName: 'PI:NAME:<NAME>END_PI'
billingAddress:
streetAddress: '123 E Fake St'
)
customerParams =
customer:
last_name: 'PI:NAME:<NAME>END_PI'
creditCard:
number: '5105105105105100'
expirationMonth: '05'
expirationYear: '2017'
billingAddress:
extendedAddress: '5th Floor'
specHelper.simulateTrFormPost url, trData, customerParams, (err, response) ->
specHelper.defaultGateway.transparentRedirect.confirm response, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.customer.firstName, 'PI:NAME:<NAME>END_PI')
assert.equal(response.customer.creditCards[0].cardholderName, 'PI:NAME:<NAME>END_PI')
assert.equal(response.customer.creditCards[0].billingAddress.streetAddress, '123 E Fake St')
assert.equal(response.customer.lastName, 'PI:NAME:<NAME>END_PI')
assert.equal(response.customer.creditCards[0].maskedNumber, '510510******5100')
assert.equal(response.customer.creditCards[0].expirationMonth, '05')
assert.equal(response.customer.creditCards[0].expirationYear, '2017')
assert.equal(response.customer.creditCards[0].billingAddress.extendedAddress, '5th Floor')
done()
describe "updateCustomerData", ->
it "updates a customer", (done) ->
customerParams =
firstName: 'PI:NAME:<NAME>END_PI'
lastName: 'PI:NAME:<NAME>END_PI'
specHelper.defaultGateway.customer.create customerParams, (err, response) ->
trData = specHelper.defaultGateway.transparentRedirect.updateCustomerData(
redirectUrl: 'http://www.example.com/'
customerId: response.customer.id
customer:
firstName: 'PI:NAME:<NAME>END_PI'
)
updateParams =
customer:
lastName: 'PI:NAME:<NAME>END_PI'
specHelper.simulateTrFormPost url, trData, updateParams, (err, response) ->
specHelper.defaultGateway.transparentRedirect.confirm response, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.customer.firstName, 'PI:NAME:<NAME>END_PI')
assert.equal(response.customer.lastName, 'PI:NAME:<NAME>END_PI')
done()
describe "transactionData", ->
it "creates a transaction", (done) ->
trData = specHelper.defaultGateway.transparentRedirect.transactionData(
redirectUrl: 'http://www.example.com/'
transaction:
amount: 50.00
type: 'sale'
)
transactionParams =
transaction:
creditCard:
number: '5105105105105100'
expirationDate: '05/2012'
specHelper.simulateTrFormPost url, trData, transactionParams, (err, response) ->
specHelper.defaultGateway.transparentRedirect.confirm response, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.transaction.status, 'authorized')
assert.equal(response.transaction.amount, '50.00')
assert.equal(response.transaction.creditCard.maskedNumber, '510510******5100')
done()
describe "createCreditCard", ->
it "creates a credit card", (done) ->
specHelper.defaultGateway.customer.create firstName: 'Customer PI:NAME:<NAME>END_PI', (err, response) ->
trData = specHelper.defaultGateway.transparentRedirect.createCreditCardData(
redirectUrl: 'http://www.example.com/'
creditCard:
customerId: response.customer.id
cardholderName: 'PI:NAME:<NAME>END_PI'
)
creditCardParams =
creditCard:
number: '5105105105105100'
expirationDate: '05/2017'
specHelper.simulateTrFormPost url, trData, creditCardParams, (err, response) ->
specHelper.defaultGateway.transparentRedirect.confirm response, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.creditCard.cardholderName, 'PI:NAME:<NAME>END_PI')
assert.equal(response.creditCard.maskedNumber, '510510******5100')
done()
describe "updateCreditCard", ->
it "updates a credit card", (done) ->
customerParams =
firstName: 'PI:NAME:<NAME>END_PI'
creditCard:
cardholderName: 'PI:NAME:<NAME>END_PI'
number: '5105105105105100'
expirationDate: '05/2017'
specHelper.defaultGateway.customer.create customerParams, (err, response) ->
trData = specHelper.defaultGateway.transparentRedirect.updateCreditCardData(
redirectUrl: 'http://www.example.com/'
paymentMethodToken: response.customer.creditCards[0].token
creditCard:
cardholderName: 'PI:NAME:<NAME>END_PI'
)
creditCardParams =
creditCard:
number: '4111111111111111'
specHelper.simulateTrFormPost url, trData, creditCardParams, (err, response) ->
specHelper.defaultGateway.transparentRedirect.confirm response, (err, response) ->
assert.isNull(err)
assert.isTrue(response.success)
assert.equal(response.creditCard.cardholderName, 'PI:NAME:<NAME>END_PI')
assert.equal(response.creditCard.maskedNumber, '411111******1111')
done()
describe "confirm", ->
it "handles invalid hashes", (done) ->
specHelper.defaultGateway.transparentRedirect.confirm 'a=b&hash=invalid', (err, response) ->
assert.equal(err.type, braintree.errorTypes.invalidTransparentRedirectHashError)
done()
it "handles status 401", (done) ->
specHelper.defaultGateway.transparentRedirect.confirm 'http_status=401&hash=none', (err, response) ->
assert.equal(err.type, braintree.errorTypes.authenticationError)
done()
it "handles status 403", (done) ->
specHelper.defaultGateway.transparentRedirect.confirm 'http_status=403&hash=irrelevant', (err, response) ->
assert.equal(err.type, braintree.errorTypes.authorizationError)
done()
it "handles status 426", (done) ->
specHelper.defaultGateway.transparentRedirect.confirm 'http_status=426&hash=irrelevant', (err, response) ->
assert.equal(err.type, braintree.errorTypes.upgradeRequired)
done()
it "handles status 500", (done) ->
specHelper.defaultGateway.transparentRedirect.confirm 'http_status=500&hash=irrelevant', (err, response) ->
assert.equal(err.type, braintree.errorTypes.serverError)
done()
it "handles status 503", (done) ->
specHelper.defaultGateway.transparentRedirect.confirm 'http_status=503&hash=irrelevant', (err, response) ->
assert.equal(err.type, braintree.errorTypes.downForMaintenanceError)
done()
|
[
{
"context": "ry(institution: \"016\", transit: \"123\", account: \"123456789\")\n\n it \"returns the right error\", ->\n ",
"end": 4537,
"score": 0.7640320062637329,
"start": 4529,
"tag": "KEY",
"value": "23456789"
}
] | test/canadian_bank_account_test.coffee | wealthsimple/canadian-bank-account | 9 | describe "CanadianBankAccount", ->
factory = (attributes = {}) ->
new CanadianBankAccount({
institution: attributes.institution ? "001"
transit: attributes.transit ? "12345"
account: attributes.account ? "1234567"
})
describe "#hasValidations", ->
context "institution is known", ->
it "returns true", ->
expect(factory().hasValidations()).toBe(true)
context "institution is unknown", ->
it "returns false", ->
expect(factory(institution: "789").hasValidations()).toBe(false)
describe "#isAccountValid", ->
context "with a Scotiabank account", ->
it "returns true for valid account numbers", ->
expect(factory(institution: "002", account: "1234567").isAccountValid()).toBe(true)
expect(factory(institution: "002", account: "123456789123").isAccountValid()).toBe(true)
it "returns false for invalid account numbers", ->
expect(factory(institution: "002", account: "").isAccountValid()).toBe(false)
expect(factory(institution: "002", account: "123").isAccountValid()).toBe(false)
expect(factory(institution: "002", account: "12345678").isAccountValid()).toBe(false)
context "with a BMO account", ->
it "returns true for valid account number", ->
expect(factory(institution: "001", account: "1234567").isAccountValid()).toBe(true)
it "returns false for invalid account number", ->
expect(factory(institution: "001", account: "").isAccountValid()).toBe(false)
expect(factory(institution: "001", account: "123").isAccountValid()).toBe(false)
expect(factory(institution: "001", account: "12345678").isAccountValid()).toBe(false)
context "for an unknown institution", ->
it "returns true", ->
expect(factory(institution: "789").isAccountValid()).toBe(true)
describe "#isTransitValid", ->
context "with a BMO account", ->
it "returns true for any 5-digit string", ->
expect(factory(institution: "001", transit: "12345").isTransitValid()).toBe(true)
it "return false for any non-5-digit string", ->
expect(factory(institution: "001", transit: "").isTransitValid()).toBe(false)
expect(factory(institution: "001", transit: "123").isTransitValid()).toBe(false)
expect(factory(institution: "001", transit: "123456").isTransitValid()).toBe(false)
context "with an HSBC account", ->
it "returns true for any 5-digit string starting with 10", ->
expect(factory(institution: "016", transit: "10345").isTransitValid()).toBe(true)
it "returns false for any invalid transit number", ->
expect(factory(institution: "016", transit: "").isTransitValid()).toBe(false)
expect(factory(institution: "016", transit: "10").isTransitValid()).toBe(false)
expect(factory(institution: "016", transit: "12345").isTransitValid()).toBe(false)
expect(factory(institution: "016", transit: "103456").isTransitValid()).toBe(false)
context "for an unknown institution", ->
it "uses the default transit regex", ->
expect(factory(institution: "789", transit: "123").isTransitValid()).toBe(false)
expect(factory(institution: "789", transit: "12345").isTransitValid()).toBe(true)
describe "errors", ->
context "all provided numbers are valid", ->
beforeEach ->
@subject = factory()
it "returns an empty array", ->
expect(@subject.accountErrors()).toEqual([])
expect(@subject.transitErrors()).toEqual([])
expect(@subject.errors()).toEqual([])
context "account number is invalid", ->
beforeEach ->
@subject = factory(account: "123")
it "returns the right error", ->
expect(@subject.accountErrors()).toEqual(["Bank of Montreal account number must be 7 digits long."])
expect(@subject.transitErrors()).toEqual([])
expect(@subject.errors()).toEqual(["Bank of Montreal account number must be 7 digits long."])
context "transit number is invalid for BMO", ->
beforeEach ->
@subject = factory(transit: "123")
it "returns the right error", ->
expect(@subject.transitErrors()).toEqual(["Transit number must be 5 digits long."])
expect(@subject.accountErrors()).toEqual([])
expect(@subject.errors()).toEqual(["Transit number must be 5 digits long."])
context "transit number is invalid for HSBC", ->
beforeEach ->
@subject = factory(institution: "016", transit: "123", account: "123456789")
it "returns the right error", ->
expect(@subject.transitErrors()).toEqual(["HSBC Bank of Canada transit number must begin with 10XXX."])
expect(@subject.accountErrors()).toEqual([])
expect(@subject.errors()).toEqual(["HSBC Bank of Canada transit number must begin with 10XXX."])
context "transit number is invalid for an unknown bank", ->
beforeEach ->
@subject = factory(institution: "123", transit: "123")
it "returns the right error", ->
expect(@subject.transitErrors()).toEqual(["Transit number must be 5 digits long."])
| 182925 | describe "CanadianBankAccount", ->
factory = (attributes = {}) ->
new CanadianBankAccount({
institution: attributes.institution ? "001"
transit: attributes.transit ? "12345"
account: attributes.account ? "1234567"
})
describe "#hasValidations", ->
context "institution is known", ->
it "returns true", ->
expect(factory().hasValidations()).toBe(true)
context "institution is unknown", ->
it "returns false", ->
expect(factory(institution: "789").hasValidations()).toBe(false)
describe "#isAccountValid", ->
context "with a Scotiabank account", ->
it "returns true for valid account numbers", ->
expect(factory(institution: "002", account: "1234567").isAccountValid()).toBe(true)
expect(factory(institution: "002", account: "123456789123").isAccountValid()).toBe(true)
it "returns false for invalid account numbers", ->
expect(factory(institution: "002", account: "").isAccountValid()).toBe(false)
expect(factory(institution: "002", account: "123").isAccountValid()).toBe(false)
expect(factory(institution: "002", account: "12345678").isAccountValid()).toBe(false)
context "with a BMO account", ->
it "returns true for valid account number", ->
expect(factory(institution: "001", account: "1234567").isAccountValid()).toBe(true)
it "returns false for invalid account number", ->
expect(factory(institution: "001", account: "").isAccountValid()).toBe(false)
expect(factory(institution: "001", account: "123").isAccountValid()).toBe(false)
expect(factory(institution: "001", account: "12345678").isAccountValid()).toBe(false)
context "for an unknown institution", ->
it "returns true", ->
expect(factory(institution: "789").isAccountValid()).toBe(true)
describe "#isTransitValid", ->
context "with a BMO account", ->
it "returns true for any 5-digit string", ->
expect(factory(institution: "001", transit: "12345").isTransitValid()).toBe(true)
it "return false for any non-5-digit string", ->
expect(factory(institution: "001", transit: "").isTransitValid()).toBe(false)
expect(factory(institution: "001", transit: "123").isTransitValid()).toBe(false)
expect(factory(institution: "001", transit: "123456").isTransitValid()).toBe(false)
context "with an HSBC account", ->
it "returns true for any 5-digit string starting with 10", ->
expect(factory(institution: "016", transit: "10345").isTransitValid()).toBe(true)
it "returns false for any invalid transit number", ->
expect(factory(institution: "016", transit: "").isTransitValid()).toBe(false)
expect(factory(institution: "016", transit: "10").isTransitValid()).toBe(false)
expect(factory(institution: "016", transit: "12345").isTransitValid()).toBe(false)
expect(factory(institution: "016", transit: "103456").isTransitValid()).toBe(false)
context "for an unknown institution", ->
it "uses the default transit regex", ->
expect(factory(institution: "789", transit: "123").isTransitValid()).toBe(false)
expect(factory(institution: "789", transit: "12345").isTransitValid()).toBe(true)
describe "errors", ->
context "all provided numbers are valid", ->
beforeEach ->
@subject = factory()
it "returns an empty array", ->
expect(@subject.accountErrors()).toEqual([])
expect(@subject.transitErrors()).toEqual([])
expect(@subject.errors()).toEqual([])
context "account number is invalid", ->
beforeEach ->
@subject = factory(account: "123")
it "returns the right error", ->
expect(@subject.accountErrors()).toEqual(["Bank of Montreal account number must be 7 digits long."])
expect(@subject.transitErrors()).toEqual([])
expect(@subject.errors()).toEqual(["Bank of Montreal account number must be 7 digits long."])
context "transit number is invalid for BMO", ->
beforeEach ->
@subject = factory(transit: "123")
it "returns the right error", ->
expect(@subject.transitErrors()).toEqual(["Transit number must be 5 digits long."])
expect(@subject.accountErrors()).toEqual([])
expect(@subject.errors()).toEqual(["Transit number must be 5 digits long."])
context "transit number is invalid for HSBC", ->
beforeEach ->
@subject = factory(institution: "016", transit: "123", account: "1<KEY>")
it "returns the right error", ->
expect(@subject.transitErrors()).toEqual(["HSBC Bank of Canada transit number must begin with 10XXX."])
expect(@subject.accountErrors()).toEqual([])
expect(@subject.errors()).toEqual(["HSBC Bank of Canada transit number must begin with 10XXX."])
context "transit number is invalid for an unknown bank", ->
beforeEach ->
@subject = factory(institution: "123", transit: "123")
it "returns the right error", ->
expect(@subject.transitErrors()).toEqual(["Transit number must be 5 digits long."])
| true | describe "CanadianBankAccount", ->
factory = (attributes = {}) ->
new CanadianBankAccount({
institution: attributes.institution ? "001"
transit: attributes.transit ? "12345"
account: attributes.account ? "1234567"
})
describe "#hasValidations", ->
context "institution is known", ->
it "returns true", ->
expect(factory().hasValidations()).toBe(true)
context "institution is unknown", ->
it "returns false", ->
expect(factory(institution: "789").hasValidations()).toBe(false)
describe "#isAccountValid", ->
context "with a Scotiabank account", ->
it "returns true for valid account numbers", ->
expect(factory(institution: "002", account: "1234567").isAccountValid()).toBe(true)
expect(factory(institution: "002", account: "123456789123").isAccountValid()).toBe(true)
it "returns false for invalid account numbers", ->
expect(factory(institution: "002", account: "").isAccountValid()).toBe(false)
expect(factory(institution: "002", account: "123").isAccountValid()).toBe(false)
expect(factory(institution: "002", account: "12345678").isAccountValid()).toBe(false)
context "with a BMO account", ->
it "returns true for valid account number", ->
expect(factory(institution: "001", account: "1234567").isAccountValid()).toBe(true)
it "returns false for invalid account number", ->
expect(factory(institution: "001", account: "").isAccountValid()).toBe(false)
expect(factory(institution: "001", account: "123").isAccountValid()).toBe(false)
expect(factory(institution: "001", account: "12345678").isAccountValid()).toBe(false)
context "for an unknown institution", ->
it "returns true", ->
expect(factory(institution: "789").isAccountValid()).toBe(true)
describe "#isTransitValid", ->
context "with a BMO account", ->
it "returns true for any 5-digit string", ->
expect(factory(institution: "001", transit: "12345").isTransitValid()).toBe(true)
it "return false for any non-5-digit string", ->
expect(factory(institution: "001", transit: "").isTransitValid()).toBe(false)
expect(factory(institution: "001", transit: "123").isTransitValid()).toBe(false)
expect(factory(institution: "001", transit: "123456").isTransitValid()).toBe(false)
context "with an HSBC account", ->
it "returns true for any 5-digit string starting with 10", ->
expect(factory(institution: "016", transit: "10345").isTransitValid()).toBe(true)
it "returns false for any invalid transit number", ->
expect(factory(institution: "016", transit: "").isTransitValid()).toBe(false)
expect(factory(institution: "016", transit: "10").isTransitValid()).toBe(false)
expect(factory(institution: "016", transit: "12345").isTransitValid()).toBe(false)
expect(factory(institution: "016", transit: "103456").isTransitValid()).toBe(false)
context "for an unknown institution", ->
it "uses the default transit regex", ->
expect(factory(institution: "789", transit: "123").isTransitValid()).toBe(false)
expect(factory(institution: "789", transit: "12345").isTransitValid()).toBe(true)
describe "errors", ->
context "all provided numbers are valid", ->
beforeEach ->
@subject = factory()
it "returns an empty array", ->
expect(@subject.accountErrors()).toEqual([])
expect(@subject.transitErrors()).toEqual([])
expect(@subject.errors()).toEqual([])
context "account number is invalid", ->
beforeEach ->
@subject = factory(account: "123")
it "returns the right error", ->
expect(@subject.accountErrors()).toEqual(["Bank of Montreal account number must be 7 digits long."])
expect(@subject.transitErrors()).toEqual([])
expect(@subject.errors()).toEqual(["Bank of Montreal account number must be 7 digits long."])
context "transit number is invalid for BMO", ->
beforeEach ->
@subject = factory(transit: "123")
it "returns the right error", ->
expect(@subject.transitErrors()).toEqual(["Transit number must be 5 digits long."])
expect(@subject.accountErrors()).toEqual([])
expect(@subject.errors()).toEqual(["Transit number must be 5 digits long."])
context "transit number is invalid for HSBC", ->
beforeEach ->
@subject = factory(institution: "016", transit: "123", account: "1PI:KEY:<KEY>END_PI")
it "returns the right error", ->
expect(@subject.transitErrors()).toEqual(["HSBC Bank of Canada transit number must begin with 10XXX."])
expect(@subject.accountErrors()).toEqual([])
expect(@subject.errors()).toEqual(["HSBC Bank of Canada transit number must begin with 10XXX."])
context "transit number is invalid for an unknown bank", ->
beforeEach ->
@subject = factory(institution: "123", transit: "123")
it "returns the right error", ->
expect(@subject.transitErrors()).toEqual(["Transit number must be 5 digits long."])
|
[
{
"context": "orts = lemon.Component {\n package: 'wg'\n name: 'StarWine'\n class: 'webgradient'\n\n data: {\n positi",
"end": 177,
"score": 0.6472846865653992,
"start": 173,
"tag": "NAME",
"value": "Star"
},
{
"context": " = lemon.Component {\n package: 'wg'\n name: 'StarWine'\n class: 'webgradient'\n\n data: {\n position: ",
"end": 181,
"score": 0.9398624897003174,
"start": 177,
"tag": "USERNAME",
"value": "Wine"
}
] | StarWine.coffee | lemon/lemonjs-wg | 0 |
# dependencies
Path = require 'path'
# stylesheet
require Path.resolve(__dirname, 'gradient.css')
# component
module.exports = lemon.Component {
package: 'wg'
name: 'StarWine'
class: 'webgradient'
data: {
position: null
}
template: (data) ->
div class: data.position, style: "background-image: linear-gradient(to right, #b8cbb8 0%, #b8cbb8 0%, #b465da 0%, #cf6cc9 33%, #ee609c 66%, #ee609c 100%);"
} | 150793 |
# dependencies
Path = require 'path'
# stylesheet
require Path.resolve(__dirname, 'gradient.css')
# component
module.exports = lemon.Component {
package: 'wg'
name: '<NAME>Wine'
class: 'webgradient'
data: {
position: null
}
template: (data) ->
div class: data.position, style: "background-image: linear-gradient(to right, #b8cbb8 0%, #b8cbb8 0%, #b465da 0%, #cf6cc9 33%, #ee609c 66%, #ee609c 100%);"
} | true |
# dependencies
Path = require 'path'
# stylesheet
require Path.resolve(__dirname, 'gradient.css')
# component
module.exports = lemon.Component {
package: 'wg'
name: 'PI:NAME:<NAME>END_PIWine'
class: 'webgradient'
data: {
position: null
}
template: (data) ->
div class: data.position, style: "background-image: linear-gradient(to right, #b8cbb8 0%, #b8cbb8 0%, #b465da 0%, #cf6cc9 33%, #ee609c 66%, #ee609c 100%);"
} |
[
{
"context": " @deviceModel.findVerified query: query, password: password, deviceFindCallback\n\nmodule.exports = SessionCont",
"end": 1246,
"score": 0.9980790615081787,
"start": 1238,
"tag": "PASSWORD",
"value": "password"
}
] | app/controllers/session-controller.coffee | iotrentil/rentil-authenticator-email-password | 0 | debug = require('debug')('meshblu-authenticator-email-password:sessions-controller')
url = require 'url'
class SessionController
constructor: ({@meshbluHttp, @deviceModel}) ->
create: (request, response) =>
{email,password,callbackUrl} = request.body
query = {}
email = email.toLowerCase()
query[@deviceModel.authenticatorUuid + '.id'] = email
deviceFindCallback = (error, foundDevice) =>
debug 'device find error', error if error?
debug 'device find', foundDevice
return response.status(401).send error?.message unless foundDevice
debug 'about to generateAndStoreToken', foundDevice.uuid
@meshbluHttp.generateAndStoreToken foundDevice.uuid, (error, device) =>
return response.sendError error if error?
return response.status(201).send(device:device) unless callbackUrl?
uriParams = url.parse callbackUrl, true
delete uriParams.search
uriParams.query ?= {}
uriParams.query.uuid = device.uuid
uriParams.query.token = device.token
uri = decodeURIComponent url.format(uriParams)
response.status(201).location(uri).send(device: device, callbackUrl: uri)
@deviceModel.findVerified query: query, password: password, deviceFindCallback
module.exports = SessionController
| 8258 | debug = require('debug')('meshblu-authenticator-email-password:sessions-controller')
url = require 'url'
class SessionController
constructor: ({@meshbluHttp, @deviceModel}) ->
create: (request, response) =>
{email,password,callbackUrl} = request.body
query = {}
email = email.toLowerCase()
query[@deviceModel.authenticatorUuid + '.id'] = email
deviceFindCallback = (error, foundDevice) =>
debug 'device find error', error if error?
debug 'device find', foundDevice
return response.status(401).send error?.message unless foundDevice
debug 'about to generateAndStoreToken', foundDevice.uuid
@meshbluHttp.generateAndStoreToken foundDevice.uuid, (error, device) =>
return response.sendError error if error?
return response.status(201).send(device:device) unless callbackUrl?
uriParams = url.parse callbackUrl, true
delete uriParams.search
uriParams.query ?= {}
uriParams.query.uuid = device.uuid
uriParams.query.token = device.token
uri = decodeURIComponent url.format(uriParams)
response.status(201).location(uri).send(device: device, callbackUrl: uri)
@deviceModel.findVerified query: query, password: <PASSWORD>, deviceFindCallback
module.exports = SessionController
| true | debug = require('debug')('meshblu-authenticator-email-password:sessions-controller')
url = require 'url'
class SessionController
constructor: ({@meshbluHttp, @deviceModel}) ->
create: (request, response) =>
{email,password,callbackUrl} = request.body
query = {}
email = email.toLowerCase()
query[@deviceModel.authenticatorUuid + '.id'] = email
deviceFindCallback = (error, foundDevice) =>
debug 'device find error', error if error?
debug 'device find', foundDevice
return response.status(401).send error?.message unless foundDevice
debug 'about to generateAndStoreToken', foundDevice.uuid
@meshbluHttp.generateAndStoreToken foundDevice.uuid, (error, device) =>
return response.sendError error if error?
return response.status(201).send(device:device) unless callbackUrl?
uriParams = url.parse callbackUrl, true
delete uriParams.search
uriParams.query ?= {}
uriParams.query.uuid = device.uuid
uriParams.query.token = device.token
uri = decodeURIComponent url.format(uriParams)
response.status(201).location(uri).send(device: device, callbackUrl: uri)
@deviceModel.findVerified query: query, password: PI:PASSWORD:<PASSWORD>END_PI, deviceFindCallback
module.exports = SessionController
|
[
{
"context": "\n\n@module joukou-api/persona/circle/routes\n@author Isaac Johnston <isaac.johnston@joukou.com>\n###\nCircleModel = re",
"end": 819,
"score": 0.999883770942688,
"start": 805,
"tag": "NAME",
"value": "Isaac Johnston"
},
{
"context": "api/persona/circle/routes\n@author Isaac Johnston <isaac.johnston@joukou.com>\n###\nCircleModel = require( '../../circle/model'",
"end": 846,
"score": 0.9999337196350098,
"start": 821,
"tag": "EMAIL",
"value": "isaac.johnston@joukou.com"
}
] | src/persona/circle/routes.coffee | joukou/joukou-api | 0 | "use strict"
###*
Copyright 2014 Joukou Ltd
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
###
###*
{@link module:joukou-api/persona/circle/Model|Circle} APIs provide information
about the available Circles. In future the ability to create, sell and buy
Circles will be added.
@module joukou-api/persona/circle/routes
@author Isaac Johnston <isaac.johnston@joukou.com>
###
CircleModel = require( '../../circle/model' )
_ = require( 'lodash' )
CircleRoutes = require( '../../circle/routes' )
authz = require( '../../authz' )
authn = require( '../../authn' )
module.exports = self =
registerRoutes: ( server ) ->
server.get( '/persona/:personaKey/circle', authn.authenticate, self.index )
server.get( '/persona/:personaKey/circle/:key', authn.authenticate, self.retrieve )
return
retrieve: (req, res, next) ->
authz.hasPersona(req.user, req.params.personaKey)
.then( ( persona ) ->
CircleRoutes.retrieve(req, res, next)
)
.fail(next)
index: ( req, res, next ) ->
authz.hasPersona(req.user, req.params.personaKey)
.then( ( persona ) ->
CircleModel.retrieveByPersona(req.params.personaKey)
.then((circles) ->
representation = {}
if req.accepts('application/hal+json')
representation["_embedded"] = {
"joukou:circle": _.map(circles, (circle) ->
value = circle.getValue()
value.key = circle.getKey()
value._links =
self:
href: "/persona/#{req.params.personaKey}/circle/#{circle.getKey()}"
'joukou:persona':
href: "/persona/#{req.params.personaKey}"
return value
)
}
else
representation.circles = _.map(circles, (circle) ->
return circle.getValue()
)
res.send(200, representation)
)
)
.fail(next)
| 52528 | "use strict"
###*
Copyright 2014 Joukou Ltd
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
###
###*
{@link module:joukou-api/persona/circle/Model|Circle} APIs provide information
about the available Circles. In future the ability to create, sell and buy
Circles will be added.
@module joukou-api/persona/circle/routes
@author <NAME> <<EMAIL>>
###
CircleModel = require( '../../circle/model' )
_ = require( 'lodash' )
CircleRoutes = require( '../../circle/routes' )
authz = require( '../../authz' )
authn = require( '../../authn' )
module.exports = self =
registerRoutes: ( server ) ->
server.get( '/persona/:personaKey/circle', authn.authenticate, self.index )
server.get( '/persona/:personaKey/circle/:key', authn.authenticate, self.retrieve )
return
retrieve: (req, res, next) ->
authz.hasPersona(req.user, req.params.personaKey)
.then( ( persona ) ->
CircleRoutes.retrieve(req, res, next)
)
.fail(next)
index: ( req, res, next ) ->
authz.hasPersona(req.user, req.params.personaKey)
.then( ( persona ) ->
CircleModel.retrieveByPersona(req.params.personaKey)
.then((circles) ->
representation = {}
if req.accepts('application/hal+json')
representation["_embedded"] = {
"joukou:circle": _.map(circles, (circle) ->
value = circle.getValue()
value.key = circle.getKey()
value._links =
self:
href: "/persona/#{req.params.personaKey}/circle/#{circle.getKey()}"
'joukou:persona':
href: "/persona/#{req.params.personaKey}"
return value
)
}
else
representation.circles = _.map(circles, (circle) ->
return circle.getValue()
)
res.send(200, representation)
)
)
.fail(next)
| true | "use strict"
###*
Copyright 2014 Joukou Ltd
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
###
###*
{@link module:joukou-api/persona/circle/Model|Circle} APIs provide information
about the available Circles. In future the ability to create, sell and buy
Circles will be added.
@module joukou-api/persona/circle/routes
@author PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI>
###
CircleModel = require( '../../circle/model' )
_ = require( 'lodash' )
CircleRoutes = require( '../../circle/routes' )
authz = require( '../../authz' )
authn = require( '../../authn' )
module.exports = self =
registerRoutes: ( server ) ->
server.get( '/persona/:personaKey/circle', authn.authenticate, self.index )
server.get( '/persona/:personaKey/circle/:key', authn.authenticate, self.retrieve )
return
retrieve: (req, res, next) ->
authz.hasPersona(req.user, req.params.personaKey)
.then( ( persona ) ->
CircleRoutes.retrieve(req, res, next)
)
.fail(next)
index: ( req, res, next ) ->
authz.hasPersona(req.user, req.params.personaKey)
.then( ( persona ) ->
CircleModel.retrieveByPersona(req.params.personaKey)
.then((circles) ->
representation = {}
if req.accepts('application/hal+json')
representation["_embedded"] = {
"joukou:circle": _.map(circles, (circle) ->
value = circle.getValue()
value.key = circle.getKey()
value._links =
self:
href: "/persona/#{req.params.personaKey}/circle/#{circle.getKey()}"
'joukou:persona':
href: "/persona/#{req.params.personaKey}"
return value
)
}
else
representation.circles = _.map(circles, (circle) ->
return circle.getValue()
)
res.send(200, representation)
)
)
.fail(next)
|
[
{
"context": ")\n\n###*\n@module joukou-fbpp/protocols/base\n@author Fabian Cook <fabian.cook@joukou.com>\n###\n\nclass BaseProtocol\n",
"end": 790,
"score": 0.9998464584350586,
"start": 779,
"tag": "NAME",
"value": "Fabian Cook"
},
{
"context": "e joukou-fbpp/protocols/base\n@author Fabian Cook <fabian.cook@joukou.com>\n###\n\nclass BaseProtocol\n protocol: null\n filte",
"end": 814,
"score": 0.9999223947525024,
"start": 792,
"tag": "EMAIL",
"value": "fabian.cook@joukou.com"
}
] | src/protocols/base/index.coffee | joukou/joukou-flow | 0 | ###
Copyright 2014 Joukou Ltd
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
###
_ = require( 'lodash' )
Q = require( 'q' )
schemajs = require( 'schemajs' )
CommandResponse = require( '../../runtime/command-response' )
###*
@module joukou-fbpp/protocols/base
@author Fabian Cook <fabian.cook@joukou.com>
###
class BaseProtocol
protocol: null
filterCommands: null
commands: null
constructor: ( @protocol, @context ) ->
@filterCommands = [ ]
@commands = { }
getCommandKeys: ->
return _.keys( @commands )
getHandler: ( command ) ->
if typeof command isnt 'string'
return
return @commands[ command.toLowerCase( ) ]
addCommandSchemas: ( @commandSchemas ) ->
@commandSchemasLower ?= {}
for key, value of @commandSchemas
if not @commandSchemas.hasOwnProperty( key )
continue
@commandSchemasLower[ key.toLowerCase() ] = value
_resolvePromise: ( data ) ->
deferred = Q.defer()
if (
not data? or
not data.then? or
not data.fail?
)
return Q.resolve( data )
data
.then( deferred.resolve )
.fail( deferred.reject )
return deferred.promise
send: ( command, payload = undefined ) ->
response = null
if command instanceof CommandResponse
response = command
else
response = new CommandResponse(
command.toLowerCase( ),
payload
)
unless response.hasProtocol( )
response.setProtocol( @protocol )
return @context.send( response )
sendAll: ( command, payload ) ->
if not @context or @context.socket
return
@context.sendAll({
protocol: @protocol
command: command.toLowerCase(),
payload: payload
})
receive: ( command, payload ) ->
deferred = Q.defer()
handler = @commands[ command ]
if not handler
return Q.reject( )
try
promise = handler( payload, @context )
promise = @_resolvePromise( promise )
promise
.then( ( data ) =>
# Resolve command response here to ensure it has a protocol
if data not instanceof CommandResponse
data = new CommandResponse(
command,
if data? then data else payload,
@protocol
)
else if not data.hasProtocol( )
# Don't set if there already is a protocol
data.setProtocol( @protocol )
return data
)
.then( deferred.resolve )
.fail( deferred.reject )
catch e
return Q.reject( e )
return deferred.promise
command: ( name, command, route, methods ) ->
if not _.isArray( methods )
methods = [ methods ]
handler = ( payload, context ) =>
## Schema validation
unless handler.hasSchema( )
return command.call( @, payload, context )
form = handler.validate(
payload
)
if not form.valid
return Q.reject(
form.errors
)
return command.call( @, form.data, context )
handler.command = command
handler.route = route
handler.methods = methods
handler.getSchema = ->
return @commandSchemasLower?[ name.toLowerCase( ) ]
handler.hasSchema = ->
return !!handler.getSchema( )
handler.validate = ( payload ) ->
if handler.$schema
return handler.$schema.validate(
payload
)
schema = @commandSchemasLower?[ name.toLowerCase( ) ]
if not schema
return {
valid: true,
data: payload
errors: []
}
handler.$schema = schemajs.create(
schema
)
return handler.$schema.validate(
payload
)
@commands[ name.toLowerCase( ) ] = handler
if @[ name ]
@[ name ] = handler
reject: ( error ) ->
return Q.reject(
error or new Error( "Unknown" )
)
module.exports = BaseProtocol | 97097 | ###
Copyright 2014 Joukou Ltd
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
###
_ = require( 'lodash' )
Q = require( 'q' )
schemajs = require( 'schemajs' )
CommandResponse = require( '../../runtime/command-response' )
###*
@module joukou-fbpp/protocols/base
@author <NAME> <<EMAIL>>
###
class BaseProtocol
protocol: null
filterCommands: null
commands: null
constructor: ( @protocol, @context ) ->
@filterCommands = [ ]
@commands = { }
getCommandKeys: ->
return _.keys( @commands )
getHandler: ( command ) ->
if typeof command isnt 'string'
return
return @commands[ command.toLowerCase( ) ]
addCommandSchemas: ( @commandSchemas ) ->
@commandSchemasLower ?= {}
for key, value of @commandSchemas
if not @commandSchemas.hasOwnProperty( key )
continue
@commandSchemasLower[ key.toLowerCase() ] = value
_resolvePromise: ( data ) ->
deferred = Q.defer()
if (
not data? or
not data.then? or
not data.fail?
)
return Q.resolve( data )
data
.then( deferred.resolve )
.fail( deferred.reject )
return deferred.promise
send: ( command, payload = undefined ) ->
response = null
if command instanceof CommandResponse
response = command
else
response = new CommandResponse(
command.toLowerCase( ),
payload
)
unless response.hasProtocol( )
response.setProtocol( @protocol )
return @context.send( response )
sendAll: ( command, payload ) ->
if not @context or @context.socket
return
@context.sendAll({
protocol: @protocol
command: command.toLowerCase(),
payload: payload
})
receive: ( command, payload ) ->
deferred = Q.defer()
handler = @commands[ command ]
if not handler
return Q.reject( )
try
promise = handler( payload, @context )
promise = @_resolvePromise( promise )
promise
.then( ( data ) =>
# Resolve command response here to ensure it has a protocol
if data not instanceof CommandResponse
data = new CommandResponse(
command,
if data? then data else payload,
@protocol
)
else if not data.hasProtocol( )
# Don't set if there already is a protocol
data.setProtocol( @protocol )
return data
)
.then( deferred.resolve )
.fail( deferred.reject )
catch e
return Q.reject( e )
return deferred.promise
command: ( name, command, route, methods ) ->
if not _.isArray( methods )
methods = [ methods ]
handler = ( payload, context ) =>
## Schema validation
unless handler.hasSchema( )
return command.call( @, payload, context )
form = handler.validate(
payload
)
if not form.valid
return Q.reject(
form.errors
)
return command.call( @, form.data, context )
handler.command = command
handler.route = route
handler.methods = methods
handler.getSchema = ->
return @commandSchemasLower?[ name.toLowerCase( ) ]
handler.hasSchema = ->
return !!handler.getSchema( )
handler.validate = ( payload ) ->
if handler.$schema
return handler.$schema.validate(
payload
)
schema = @commandSchemasLower?[ name.toLowerCase( ) ]
if not schema
return {
valid: true,
data: payload
errors: []
}
handler.$schema = schemajs.create(
schema
)
return handler.$schema.validate(
payload
)
@commands[ name.toLowerCase( ) ] = handler
if @[ name ]
@[ name ] = handler
reject: ( error ) ->
return Q.reject(
error or new Error( "Unknown" )
)
module.exports = BaseProtocol | true | ###
Copyright 2014 Joukou Ltd
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
###
_ = require( 'lodash' )
Q = require( 'q' )
schemajs = require( 'schemajs' )
CommandResponse = require( '../../runtime/command-response' )
###*
@module joukou-fbpp/protocols/base
@author PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI>
###
class BaseProtocol
protocol: null
filterCommands: null
commands: null
constructor: ( @protocol, @context ) ->
@filterCommands = [ ]
@commands = { }
getCommandKeys: ->
return _.keys( @commands )
getHandler: ( command ) ->
if typeof command isnt 'string'
return
return @commands[ command.toLowerCase( ) ]
addCommandSchemas: ( @commandSchemas ) ->
@commandSchemasLower ?= {}
for key, value of @commandSchemas
if not @commandSchemas.hasOwnProperty( key )
continue
@commandSchemasLower[ key.toLowerCase() ] = value
_resolvePromise: ( data ) ->
deferred = Q.defer()
if (
not data? or
not data.then? or
not data.fail?
)
return Q.resolve( data )
data
.then( deferred.resolve )
.fail( deferred.reject )
return deferred.promise
send: ( command, payload = undefined ) ->
response = null
if command instanceof CommandResponse
response = command
else
response = new CommandResponse(
command.toLowerCase( ),
payload
)
unless response.hasProtocol( )
response.setProtocol( @protocol )
return @context.send( response )
sendAll: ( command, payload ) ->
if not @context or @context.socket
return
@context.sendAll({
protocol: @protocol
command: command.toLowerCase(),
payload: payload
})
receive: ( command, payload ) ->
deferred = Q.defer()
handler = @commands[ command ]
if not handler
return Q.reject( )
try
promise = handler( payload, @context )
promise = @_resolvePromise( promise )
promise
.then( ( data ) =>
# Resolve command response here to ensure it has a protocol
if data not instanceof CommandResponse
data = new CommandResponse(
command,
if data? then data else payload,
@protocol
)
else if not data.hasProtocol( )
# Don't set if there already is a protocol
data.setProtocol( @protocol )
return data
)
.then( deferred.resolve )
.fail( deferred.reject )
catch e
return Q.reject( e )
return deferred.promise
command: ( name, command, route, methods ) ->
if not _.isArray( methods )
methods = [ methods ]
handler = ( payload, context ) =>
## Schema validation
unless handler.hasSchema( )
return command.call( @, payload, context )
form = handler.validate(
payload
)
if not form.valid
return Q.reject(
form.errors
)
return command.call( @, form.data, context )
handler.command = command
handler.route = route
handler.methods = methods
handler.getSchema = ->
return @commandSchemasLower?[ name.toLowerCase( ) ]
handler.hasSchema = ->
return !!handler.getSchema( )
handler.validate = ( payload ) ->
if handler.$schema
return handler.$schema.validate(
payload
)
schema = @commandSchemasLower?[ name.toLowerCase( ) ]
if not schema
return {
valid: true,
data: payload
errors: []
}
handler.$schema = schemajs.create(
schema
)
return handler.$schema.validate(
payload
)
@commands[ name.toLowerCase( ) ] = handler
if @[ name ]
@[ name ] = handler
reject: ( error ) ->
return Q.reject(
error or new Error( "Unknown" )
)
module.exports = BaseProtocol |
[
{
"context": "g scoring algorithm.\n *\n * Copyright (C) 2009-2011 Joshaven Potter <yourtech@gmail.com>\n * Copyright (C) 2010-2011 Y",
"end": 121,
"score": 0.9999073147773743,
"start": 106,
"tag": "NAME",
"value": "Joshaven Potter"
},
{
"context": "m.\n *\n * Copyright (C) 2009-2011 Joshaven Potter <yourtech@gmail.com>\n * Copyright (C) 2010-2011 Yesudeep Mangalapilly",
"end": 141,
"score": 0.9999286532402039,
"start": 123,
"tag": "EMAIL",
"value": "yourtech@gmail.com"
},
{
"context": "er <yourtech@gmail.com>\n * Copyright (C) 2010-2011 Yesudeep Mangalapilly <yesudeep@gmail.com>\n * MIT license: http://www.o",
"end": 191,
"score": 0.9998992681503296,
"start": 170,
"tag": "NAME",
"value": "Yesudeep Mangalapilly"
},
{
"context": " * Copyright (C) 2010-2011 Yesudeep Mangalapilly <yesudeep@gmail.com>\n * MIT license: http://www.opensource.org/licens",
"end": 211,
"score": 0.9999293088912964,
"start": 193,
"tag": "EMAIL",
"value": "yesudeep@gmail.com"
},
{
"context": "censes/mit-license.php\n ###\n\n# Special thanks to Lachie Cox and Quicksilver for inspiration.\n#\n# Compilation ",
"end": 316,
"score": 0.8518228530883789,
"start": 307,
"tag": "NAME",
"value": "achie Cox"
}
] | assets/vendor/string_score.coffee | garrett/cockpit-project.github.io | 85 | ---
---
###!
* string_score.js: Quicksilver-like string scoring algorithm.
*
* Copyright (C) 2009-2011 Joshaven Potter <yourtech@gmail.com>
* Copyright (C) 2010-2011 Yesudeep Mangalapilly <yesudeep@gmail.com>
* MIT license: http://www.opensource.org/licenses/mit-license.php
###
# Special thanks to Lachie Cox and Quicksilver for inspiration.
#
# Compilation notes:
#
# 1. Compile with the `-b -c` flags to the coffee-script compiler
# `String.prototype.score`
# ------------------------
String::score = (abbreviation) ->
# **Size optimization notes**:
# Declaring `string` before checking for an exact match
# does not affect the speed and reduces size because `this`
# occurs only once in the code as a result.
string = this
# Perfect match if the string equals the abbreviation.
return 1.0 if string == abbreviation
# Initializing variables.
string_length = string.length
total_character_score = 0
# Awarded only if the string and the abbreviation have a common prefix.
should_award_common_prefix_bonus = 0 #no
#### Sum character scores
# Add up scores for each character in the abbreviation.
for c, i in abbreviation
# Find the index of current character (case-insensitive) in remaining part of string.
index_c_lowercase = string.indexOf c.toLowerCase()
index_c_uppercase = string.indexOf c.toUpperCase()
min_index = Math.min index_c_lowercase, index_c_uppercase
index_in_string = if min_index > -1 then min_index else Math.max index_c_lowercase, index_c_uppercase
#### Identical strings
# Bail out if current character is not found (case-insensitive) in remaining part of string.
#
# **Possible size optimization**:
# Replace `index_in_string == -1` with `index_in_string < 0`
# which has fewer characters and should have identical performance.
return 0 if index_in_string == -1
# Set base score for current character.
character_score = 0.1
#### Case-match bonus
# If the current abbreviation character has the same case
# as that of the character in the string, we add a bonus.
#
# **Optimization notes**:
# `charAt` was replaced with an index lookup here because
# the latter results in smaller and faster code without
# breaking any tests.
if string[index_in_string] == c
character_score += 0.1
#### Consecutive character match and common prefix bonuses
# Increase the score when each consecutive character of
# the abbreviation matches the first character of the
# remaining string.
#
# **Size optimization disabled (truthiness shortened)**:
# It produces smaller code but is slower.
#
# if !index_in_string
if index_in_string == 0
character_score += 0.8
# String and abbreviation have common prefix, so award bonus.
#
# **Size optimization disabled (truthiness shortened)**:
# It produces smaller code but is slower.
#
# if !i
if i == 0
should_award_common_prefix_bonus = 1 #yes
#### Acronym bonus
# Typing the first character of an acronym is as
# though you preceded it with two perfect character
# matches.
#
# **Size optimization disabled**:
# `string.charAt(index)` wasn't replaced with `string[index]`
# in this case even though the latter results in smaller
# code (when minified) because the former is faster, and
# the gain out of replacing it is negligible.
if string.charAt(index_in_string - 1) == ' '
character_score += 0.8 # * Math.min(index_in_string, 5) # Cap bonus at 0.4 * 5
# Left trim the matched part of the string
# (forces sequential matching).
string = string.substring(index_in_string + 1, string_length)
# Add to total character score.
total_character_score += character_score
# **Feature disabled**:
# Uncomment the following to weigh smaller words higher.
#
# return total_character_score / string_length
abbreviation_length = abbreviation.length
abbreviation_score = total_character_score / abbreviation_length
#### Reduce penalty for longer strings
# **Optimization notes (code inlined)**:
#
# percentage_of_matched_string = abbreviation_length / string_length
# word_score = abbreviation_score * percentage_of_matched_string
# final_score = (word_score + abbreviation_score) / 2
final_score = ((abbreviation_score * (abbreviation_length / string_length)) + abbreviation_score) / 2
#### Award common prefix bonus
if should_award_common_prefix_bonus and (final_score + 0.1 < 1)
final_score += 0.1
return final_score
| 138850 | ---
---
###!
* string_score.js: Quicksilver-like string scoring algorithm.
*
* Copyright (C) 2009-2011 <NAME> <<EMAIL>>
* Copyright (C) 2010-2011 <NAME> <<EMAIL>>
* MIT license: http://www.opensource.org/licenses/mit-license.php
###
# Special thanks to L<NAME> and Quicksilver for inspiration.
#
# Compilation notes:
#
# 1. Compile with the `-b -c` flags to the coffee-script compiler
# `String.prototype.score`
# ------------------------
String::score = (abbreviation) ->
# **Size optimization notes**:
# Declaring `string` before checking for an exact match
# does not affect the speed and reduces size because `this`
# occurs only once in the code as a result.
string = this
# Perfect match if the string equals the abbreviation.
return 1.0 if string == abbreviation
# Initializing variables.
string_length = string.length
total_character_score = 0
# Awarded only if the string and the abbreviation have a common prefix.
should_award_common_prefix_bonus = 0 #no
#### Sum character scores
# Add up scores for each character in the abbreviation.
for c, i in abbreviation
# Find the index of current character (case-insensitive) in remaining part of string.
index_c_lowercase = string.indexOf c.toLowerCase()
index_c_uppercase = string.indexOf c.toUpperCase()
min_index = Math.min index_c_lowercase, index_c_uppercase
index_in_string = if min_index > -1 then min_index else Math.max index_c_lowercase, index_c_uppercase
#### Identical strings
# Bail out if current character is not found (case-insensitive) in remaining part of string.
#
# **Possible size optimization**:
# Replace `index_in_string == -1` with `index_in_string < 0`
# which has fewer characters and should have identical performance.
return 0 if index_in_string == -1
# Set base score for current character.
character_score = 0.1
#### Case-match bonus
# If the current abbreviation character has the same case
# as that of the character in the string, we add a bonus.
#
# **Optimization notes**:
# `charAt` was replaced with an index lookup here because
# the latter results in smaller and faster code without
# breaking any tests.
if string[index_in_string] == c
character_score += 0.1
#### Consecutive character match and common prefix bonuses
# Increase the score when each consecutive character of
# the abbreviation matches the first character of the
# remaining string.
#
# **Size optimization disabled (truthiness shortened)**:
# It produces smaller code but is slower.
#
# if !index_in_string
if index_in_string == 0
character_score += 0.8
# String and abbreviation have common prefix, so award bonus.
#
# **Size optimization disabled (truthiness shortened)**:
# It produces smaller code but is slower.
#
# if !i
if i == 0
should_award_common_prefix_bonus = 1 #yes
#### Acronym bonus
# Typing the first character of an acronym is as
# though you preceded it with two perfect character
# matches.
#
# **Size optimization disabled**:
# `string.charAt(index)` wasn't replaced with `string[index]`
# in this case even though the latter results in smaller
# code (when minified) because the former is faster, and
# the gain out of replacing it is negligible.
if string.charAt(index_in_string - 1) == ' '
character_score += 0.8 # * Math.min(index_in_string, 5) # Cap bonus at 0.4 * 5
# Left trim the matched part of the string
# (forces sequential matching).
string = string.substring(index_in_string + 1, string_length)
# Add to total character score.
total_character_score += character_score
# **Feature disabled**:
# Uncomment the following to weigh smaller words higher.
#
# return total_character_score / string_length
abbreviation_length = abbreviation.length
abbreviation_score = total_character_score / abbreviation_length
#### Reduce penalty for longer strings
# **Optimization notes (code inlined)**:
#
# percentage_of_matched_string = abbreviation_length / string_length
# word_score = abbreviation_score * percentage_of_matched_string
# final_score = (word_score + abbreviation_score) / 2
final_score = ((abbreviation_score * (abbreviation_length / string_length)) + abbreviation_score) / 2
#### Award common prefix bonus
if should_award_common_prefix_bonus and (final_score + 0.1 < 1)
final_score += 0.1
return final_score
| true | ---
---
###!
* string_score.js: Quicksilver-like string scoring algorithm.
*
* Copyright (C) 2009-2011 PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI>
* Copyright (C) 2010-2011 PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI>
* MIT license: http://www.opensource.org/licenses/mit-license.php
###
# Special thanks to LPI:NAME:<NAME>END_PI and Quicksilver for inspiration.
#
# Compilation notes:
#
# 1. Compile with the `-b -c` flags to the coffee-script compiler
# `String.prototype.score`
# ------------------------
String::score = (abbreviation) ->
# **Size optimization notes**:
# Declaring `string` before checking for an exact match
# does not affect the speed and reduces size because `this`
# occurs only once in the code as a result.
string = this
# Perfect match if the string equals the abbreviation.
return 1.0 if string == abbreviation
# Initializing variables.
string_length = string.length
total_character_score = 0
# Awarded only if the string and the abbreviation have a common prefix.
should_award_common_prefix_bonus = 0 #no
#### Sum character scores
# Add up scores for each character in the abbreviation.
for c, i in abbreviation
# Find the index of current character (case-insensitive) in remaining part of string.
index_c_lowercase = string.indexOf c.toLowerCase()
index_c_uppercase = string.indexOf c.toUpperCase()
min_index = Math.min index_c_lowercase, index_c_uppercase
index_in_string = if min_index > -1 then min_index else Math.max index_c_lowercase, index_c_uppercase
#### Identical strings
# Bail out if current character is not found (case-insensitive) in remaining part of string.
#
# **Possible size optimization**:
# Replace `index_in_string == -1` with `index_in_string < 0`
# which has fewer characters and should have identical performance.
return 0 if index_in_string == -1
# Set base score for current character.
character_score = 0.1
#### Case-match bonus
# If the current abbreviation character has the same case
# as that of the character in the string, we add a bonus.
#
# **Optimization notes**:
# `charAt` was replaced with an index lookup here because
# the latter results in smaller and faster code without
# breaking any tests.
if string[index_in_string] == c
character_score += 0.1
#### Consecutive character match and common prefix bonuses
# Increase the score when each consecutive character of
# the abbreviation matches the first character of the
# remaining string.
#
# **Size optimization disabled (truthiness shortened)**:
# It produces smaller code but is slower.
#
# if !index_in_string
if index_in_string == 0
character_score += 0.8
# String and abbreviation have common prefix, so award bonus.
#
# **Size optimization disabled (truthiness shortened)**:
# It produces smaller code but is slower.
#
# if !i
if i == 0
should_award_common_prefix_bonus = 1 #yes
#### Acronym bonus
# Typing the first character of an acronym is as
# though you preceded it with two perfect character
# matches.
#
# **Size optimization disabled**:
# `string.charAt(index)` wasn't replaced with `string[index]`
# in this case even though the latter results in smaller
# code (when minified) because the former is faster, and
# the gain out of replacing it is negligible.
if string.charAt(index_in_string - 1) == ' '
character_score += 0.8 # * Math.min(index_in_string, 5) # Cap bonus at 0.4 * 5
# Left trim the matched part of the string
# (forces sequential matching).
string = string.substring(index_in_string + 1, string_length)
# Add to total character score.
total_character_score += character_score
# **Feature disabled**:
# Uncomment the following to weigh smaller words higher.
#
# return total_character_score / string_length
abbreviation_length = abbreviation.length
abbreviation_score = total_character_score / abbreviation_length
#### Reduce penalty for longer strings
# **Optimization notes (code inlined)**:
#
# percentage_of_matched_string = abbreviation_length / string_length
# word_score = abbreviation_score * percentage_of_matched_string
# final_score = (word_score + abbreviation_score) / 2
final_score = ((abbreviation_score * (abbreviation_length / string_length)) + abbreviation_score) / 2
#### Award common prefix bonus
if should_award_common_prefix_bonus and (final_score + 0.1 < 1)
final_score += 0.1
return final_score
|
[
{
"context": "\"\"\n\n if prop?\n key = \"#{prefix}[#{key}]\" if prefix?\n s.push @default_seri",
"end": 1776,
"score": 0.7541777491569519,
"start": 1774,
"tag": "KEY",
"value": "#{"
},
{
"context": " if prop?\n key = \"#{prefix}[#{key}]\" if prefix?\n s.push @default_serialize",
"end": 1779,
"score": 0.825883686542511,
"start": 1779,
"tag": "KEY",
"value": ""
}
] | lib/routes.js.coffee | jondavidjohn/js-routes | 0 | ###
File generated by js-routes GEM_VERSION
Based on Rails RAILS_VERSION routes of APP_CLASS
###
root = (exports ? this)
ParameterMissing = (message, fileName, lineNumber) ->
instance = new Error(message, fileName, lineNumber)
if Object.setPrototypeOf
Object.setPrototypeOf instance, Object.getPrototypeOf(this)
else
instance.__proto__ = this.__proto__
if Error.captureStackTrace
Error.captureStackTrace instance, ParameterMissing
instance
ParameterMissing.prototype = Object.create(Error.prototype, constructor:
value: Error
enumerable: false
writable: true
configurable: true
)
if Object.setPrototypeOf
Object.setPrototypeOf(ParameterMissing, Error)
else
ParameterMissing.__proto__ = Error
NodeTypes = NODE_TYPES
DeprecatedGlobbingBehavior = DEPRECATED_GLOBBING_BEHAVIOR
SpecialOptionsKey = SPECIAL_OPTIONS_KEY
UriEncoderSegmentRegex = /[^a-zA-Z0-9\-\._~!\$&'\(\)\*\+,;=:@]/g # this is ActionDispatch::Journey::Router::Utils::UriEncoder::SEGMENT
ReservedOptions = [
'anchor'
'trailing_slash'
'subdomain'
'host'
'port'
'protocol'
]
Utils =
configuration:
prefix: PREFIX
default_url_options: DEFAULT_URL_OPTIONS
special_options_key: SPECIAL_OPTIONS_KEY
serializer: SERIALIZER
default_serializer: (object, prefix = null) ->
return "" unless object?
if !prefix and !(@get_object_type(object) is "object")
throw new Error("Url parameters should be a javascript hash")
s = []
switch @get_object_type(object)
when "array"
for element, i in object
s.push @default_serializer(element, prefix + "[]")
when "object"
for own key, prop of object
if !prop? and prefix?
prop = ""
if prop?
key = "#{prefix}[#{key}]" if prefix?
s.push @default_serializer(prop, key)
else
if object?
s.push "#{encodeURIComponent(prefix.toString())}=#{encodeURIComponent(object.toString())}"
return "" unless s.length
s.join("&")
serialize: (object) ->
custom_serializer = @configuration.serializer
if custom_serializer? and @get_object_type(custom_serializer) is "function"
custom_serializer(object)
else
@default_serializer(object)
clean_path: (path) ->
path = path.split("://")
last_index = path.length - 1
path[last_index] = path[last_index].replace(/\/+/g, "/")
path.join "://"
extract_options: (number_of_params, args) ->
last_el = args[args.length - 1]
if (args.length > number_of_params and last_el == undefined) or (last_el? and "object" is @get_object_type(last_el) and !@looks_like_serialized_model(last_el))
options = args.pop() || {}
delete options[@configuration.special_options_key]
options
else
{}
looks_like_serialized_model: (object) ->
!object[@configuration.special_options_key] and ("id" of object or "to_param" of object)
path_identifier: (object) ->
return "0" if object is 0
# null, undefined, false or ''
return "" unless object
property = object
if @get_object_type(object) is "object"
if "to_param" of object
throw new ParameterMissing("Route parameter missing: to_param") unless object.to_param?
property = object.to_param
else if "id" of object
throw new ParameterMissing("Route parameter missing: id") unless object.id?
property = object.id
else
property = object
property = property.call(object) if @get_object_type(property) is "function"
property.toString()
clone: (obj) ->
return obj if !obj? or "object" isnt @get_object_type(obj)
copy = obj.constructor()
copy[key] = attr for own key, attr of obj
copy
merge: (xs...) ->
tap = (o, fn) -> fn(o); o
if xs?.length > 0
tap {}, (m) -> m[k] = v for k, v of x for x in xs
normalize_options: (parts, required_parts, default_options, actual_parameters) ->
options = @extract_options(parts.length, actual_parameters)
if actual_parameters.length > parts.length
throw new Error("Too many parameters provided for path")
use_all_parts = actual_parameters.length > required_parts.length
parts_options = {}
for own key of options
use_all_parts = true
if @indexOf(parts, key) >= 0
parts_options[key] = value
options = @merge(@configuration.default_url_options, default_options, options)
result = {}
url_parameters = {}
result['url_parameters'] = url_parameters
for own key, value of options
if @indexOf(ReservedOptions, key) >= 0
result[key] = value
else
url_parameters[key] = value
route_parts = if use_all_parts then parts else required_parts
i = 0
for part in route_parts when i < actual_parameters.length
unless parts_options.hasOwnProperty(part)
url_parameters[part] = actual_parameters[i]
++i
result
build_route: (parts, required_parts, default_options, route, full_url, args) ->
args = Array::slice.call(args)
options = @normalize_options(parts, required_parts, default_options, args)
parameters = options['url_parameters']
# path
result = "#{@get_prefix()}#{@visit(route, parameters)}"
url = Utils.clean_path(result)
# set trailing_slash
url = url.replace(/(.*?)[\/]?$/, "$1/") if options['trailing_slash'] is true
# set additional url params
if (url_params = @serialize(parameters)).length
url += "?#{url_params}"
# set anchor
url += if options.anchor then "##{options.anchor}" else ""
if full_url
url = @route_url(options) + url
url
#
# This function is JavaScript impelementation of the
# Journey::Visitors::Formatter that builds route by given parameters
# from route binary tree.
# Binary tree is serialized in the following way:
# [node type, left node, right node ]
#
# @param {Boolean} optional Marks the currently visited branch as optional.
# If set to `true`, this method will not throw when encountering
# a missing parameter (used in recursive calls).
#
visit: (route, parameters, optional = false) ->
[type, left, right] = route
switch type
when NodeTypes.GROUP
@visit left, parameters, true
when NodeTypes.STAR
@visit_globbing left, parameters, true
when NodeTypes.LITERAL, NodeTypes.SLASH, NodeTypes.DOT
left
when NodeTypes.CAT
left_part = @visit(left, parameters, optional)
right_part = @visit(right, parameters, optional)
if optional and ((@is_optional_node(left[0]) and not left_part) or
((@is_optional_node(right[0])) and not right_part))
return ""
"#{left_part}#{right_part}"
when NodeTypes.SYMBOL
value = parameters[left]
delete parameters[left]
if value?
return @encode_segment(@path_identifier(value))
if optional
"" # missing parameter
else
throw new ParameterMissing("Route parameter missing: #{left}")
#
# I don't know what is this node type
# Please send your PR if you do
#
# when NodeTypes.OR:
else
throw new Error("Unknown Rails node type")
encode_segment: (segment) -> segment.replace(UriEncoderSegmentRegex, (str) -> encodeURIComponent(str))
is_optional_node: (node) -> @indexOf([NodeTypes.STAR, NodeTypes.SYMBOL, NodeTypes.CAT], node) >= 0
#
# This method build spec for route
#
build_path_spec: (route, wildcard=false) ->
[type, left, right] = route
switch type
when NodeTypes.GROUP
"(#{@build_path_spec(left)})"
when NodeTypes.CAT
"#{@build_path_spec(left)}#{@build_path_spec(right)}"
when NodeTypes.STAR
@build_path_spec(left, true)
when NodeTypes.SYMBOL
if wildcard is true
"#{if left[0] is '*' then '' else '*'}#{left}"
else
":#{left}"
when NodeTypes.SLASH, NodeTypes.DOT, NodeTypes.LITERAL
left
# Not sure about this one
# when NodeTypes.OR
else throw new Error("Unknown Rails node type")
#
# This method convert value for globbing in right value for rails route
#
visit_globbing: (route, parameters, optional) ->
[type, left, right] = route
value = parameters[left]
delete parameters[left]
return @visit(route, parameters, optional) unless value?
value = switch @get_object_type(value)
when "array"
value.join("/")
else
value
if DeprecatedGlobbingBehavior
@path_identifier(value)
else
encodeURI(@path_identifier(value))
#
# This method check and return prefix from options
#
get_prefix: ->
prefix = @configuration.prefix
prefix = (if prefix.match("/$") then prefix else "#{prefix}/") if prefix isnt ""
prefix
#
# route function: create route path function and add spec to it
#
route: (parts_table, default_options, route_spec, full_url) ->
required_parts = []
parts = []
for [part, required] in parts_table
parts.push(part)
required_parts.push(part) if required
path_fn = -> Utils.build_route(
parts, required_parts, default_options, route_spec, full_url, arguments
)
path_fn.required_params = required_parts
path_fn.toString = -> Utils.build_path_spec(route_spec)
path_fn
route_url: (route_defaults) ->
return route_defaults if typeof route_defaults == 'string'
hostname = route_defaults.host || Utils.current_host()
return '' unless hostname
subdomain = if route_defaults.subdomain then route_defaults.subdomain + '.' else ''
protocol = route_defaults.protocol || Utils.current_protocol()
port = route_defaults.port || (Utils.current_port() unless route_defaults.host)
port = if port then ":#{port}" else ''
protocol + "://" + subdomain + hostname + port
has_location: -> window?.location?
current_host: ->
if @has_location() then window.location.hostname else null
current_protocol: () ->
if @has_location() && window.location.protocol != ''
# location.protocol includes the colon character
window.location.protocol.replace(/:$/, '')
else
'http'
current_port: () ->
if @has_location() && window.location.port != ''
window.location.port
else
''
#
# This is helper method to define object type.
# The typeof operator is probably the biggest design flaw of JavaScript, simply because it's basically completely broken.
#
# Value Class Type
# -------------------------------------
# "foo" String string
# new String("foo") String object
# 1.2 Number number
# new Number(1.2) Number object
# true Boolean boolean
# new Boolean(true) Boolean object
# new Date() Date object
# new Error() Error object
# [1,2,3] Array object
# new Array(1, 2, 3) Array object
# new Function("") Function function
# /abc/g RegExp object
# new RegExp("meow") RegExp object
# {} Object object
# new Object() Object object
#
# What is why I use Object.prototype.toString() to know better type of variable. Or use jQuery.type, if it available.
# _classToTypeCache used for perfomance cache of types map (underscore at the beginning mean private method - of course it doesn't realy private).
#
_classToTypeCache: null
_classToType: ->
return @_classToTypeCache if @_classToTypeCache?
@_classToTypeCache = {}
for name in "Boolean Number String Function Array Date RegExp Object Error".split(" ")
@_classToTypeCache["[object #{name}]"] = name.toLowerCase()
@_classToTypeCache
get_object_type: (obj) ->
return root.jQuery.type(obj) if root.jQuery and root.jQuery.type?
return "#{obj}" unless obj?
(if typeof obj is "object" or typeof obj is "function" then @_classToType()[Object::toString.call(obj)] or "object" else typeof obj)
# indexOf helper
indexOf: (array, element) -> if Array::indexOf then array.indexOf(element) else @indexOfImplementation(array, element)
indexOfImplementation: (array, element) ->
result = -1
(result = i for el, i in array when el is element)
result
namespace: (root, namespace, routes) ->
parts = if namespace then namespace.split(".") else []
return routes if parts.length == 0
for part, index in parts
if index < parts.length - 1
root = (root[part] or= {})
else
return root[part] = routes
configure: (new_config) ->
@configuration = @merge(@configuration, new_config)
config: ->
@clone(@configuration)
make: ->
routes = ROUTES
routes.configure = (config) -> Utils.configure(config)
routes.config = -> Utils.config()
routes.default_serializer = (object, prefix) ->
Utils.default_serializer(object, prefix)
# Browser globals
Utils.namespace(root, NAMESPACE, routes)
Object.assign({default: routes}, routes)
result = Utils.make()
# Set up Routes appropriately for the environment.
if typeof define is "function" and define.amd
# AMD
define [], -> result
return result
| 102879 | ###
File generated by js-routes GEM_VERSION
Based on Rails RAILS_VERSION routes of APP_CLASS
###
root = (exports ? this)
ParameterMissing = (message, fileName, lineNumber) ->
instance = new Error(message, fileName, lineNumber)
if Object.setPrototypeOf
Object.setPrototypeOf instance, Object.getPrototypeOf(this)
else
instance.__proto__ = this.__proto__
if Error.captureStackTrace
Error.captureStackTrace instance, ParameterMissing
instance
ParameterMissing.prototype = Object.create(Error.prototype, constructor:
value: Error
enumerable: false
writable: true
configurable: true
)
if Object.setPrototypeOf
Object.setPrototypeOf(ParameterMissing, Error)
else
ParameterMissing.__proto__ = Error
NodeTypes = NODE_TYPES
DeprecatedGlobbingBehavior = DEPRECATED_GLOBBING_BEHAVIOR
SpecialOptionsKey = SPECIAL_OPTIONS_KEY
UriEncoderSegmentRegex = /[^a-zA-Z0-9\-\._~!\$&'\(\)\*\+,;=:@]/g # this is ActionDispatch::Journey::Router::Utils::UriEncoder::SEGMENT
ReservedOptions = [
'anchor'
'trailing_slash'
'subdomain'
'host'
'port'
'protocol'
]
Utils =
configuration:
prefix: PREFIX
default_url_options: DEFAULT_URL_OPTIONS
special_options_key: SPECIAL_OPTIONS_KEY
serializer: SERIALIZER
default_serializer: (object, prefix = null) ->
return "" unless object?
if !prefix and !(@get_object_type(object) is "object")
throw new Error("Url parameters should be a javascript hash")
s = []
switch @get_object_type(object)
when "array"
for element, i in object
s.push @default_serializer(element, prefix + "[]")
when "object"
for own key, prop of object
if !prop? and prefix?
prop = ""
if prop?
key = "#{prefix}[<KEY>key<KEY>}]" if prefix?
s.push @default_serializer(prop, key)
else
if object?
s.push "#{encodeURIComponent(prefix.toString())}=#{encodeURIComponent(object.toString())}"
return "" unless s.length
s.join("&")
serialize: (object) ->
custom_serializer = @configuration.serializer
if custom_serializer? and @get_object_type(custom_serializer) is "function"
custom_serializer(object)
else
@default_serializer(object)
clean_path: (path) ->
path = path.split("://")
last_index = path.length - 1
path[last_index] = path[last_index].replace(/\/+/g, "/")
path.join "://"
extract_options: (number_of_params, args) ->
last_el = args[args.length - 1]
if (args.length > number_of_params and last_el == undefined) or (last_el? and "object" is @get_object_type(last_el) and !@looks_like_serialized_model(last_el))
options = args.pop() || {}
delete options[@configuration.special_options_key]
options
else
{}
looks_like_serialized_model: (object) ->
!object[@configuration.special_options_key] and ("id" of object or "to_param" of object)
path_identifier: (object) ->
return "0" if object is 0
# null, undefined, false or ''
return "" unless object
property = object
if @get_object_type(object) is "object"
if "to_param" of object
throw new ParameterMissing("Route parameter missing: to_param") unless object.to_param?
property = object.to_param
else if "id" of object
throw new ParameterMissing("Route parameter missing: id") unless object.id?
property = object.id
else
property = object
property = property.call(object) if @get_object_type(property) is "function"
property.toString()
clone: (obj) ->
return obj if !obj? or "object" isnt @get_object_type(obj)
copy = obj.constructor()
copy[key] = attr for own key, attr of obj
copy
merge: (xs...) ->
tap = (o, fn) -> fn(o); o
if xs?.length > 0
tap {}, (m) -> m[k] = v for k, v of x for x in xs
normalize_options: (parts, required_parts, default_options, actual_parameters) ->
options = @extract_options(parts.length, actual_parameters)
if actual_parameters.length > parts.length
throw new Error("Too many parameters provided for path")
use_all_parts = actual_parameters.length > required_parts.length
parts_options = {}
for own key of options
use_all_parts = true
if @indexOf(parts, key) >= 0
parts_options[key] = value
options = @merge(@configuration.default_url_options, default_options, options)
result = {}
url_parameters = {}
result['url_parameters'] = url_parameters
for own key, value of options
if @indexOf(ReservedOptions, key) >= 0
result[key] = value
else
url_parameters[key] = value
route_parts = if use_all_parts then parts else required_parts
i = 0
for part in route_parts when i < actual_parameters.length
unless parts_options.hasOwnProperty(part)
url_parameters[part] = actual_parameters[i]
++i
result
build_route: (parts, required_parts, default_options, route, full_url, args) ->
args = Array::slice.call(args)
options = @normalize_options(parts, required_parts, default_options, args)
parameters = options['url_parameters']
# path
result = "#{@get_prefix()}#{@visit(route, parameters)}"
url = Utils.clean_path(result)
# set trailing_slash
url = url.replace(/(.*?)[\/]?$/, "$1/") if options['trailing_slash'] is true
# set additional url params
if (url_params = @serialize(parameters)).length
url += "?#{url_params}"
# set anchor
url += if options.anchor then "##{options.anchor}" else ""
if full_url
url = @route_url(options) + url
url
#
# This function is JavaScript impelementation of the
# Journey::Visitors::Formatter that builds route by given parameters
# from route binary tree.
# Binary tree is serialized in the following way:
# [node type, left node, right node ]
#
# @param {Boolean} optional Marks the currently visited branch as optional.
# If set to `true`, this method will not throw when encountering
# a missing parameter (used in recursive calls).
#
visit: (route, parameters, optional = false) ->
[type, left, right] = route
switch type
when NodeTypes.GROUP
@visit left, parameters, true
when NodeTypes.STAR
@visit_globbing left, parameters, true
when NodeTypes.LITERAL, NodeTypes.SLASH, NodeTypes.DOT
left
when NodeTypes.CAT
left_part = @visit(left, parameters, optional)
right_part = @visit(right, parameters, optional)
if optional and ((@is_optional_node(left[0]) and not left_part) or
((@is_optional_node(right[0])) and not right_part))
return ""
"#{left_part}#{right_part}"
when NodeTypes.SYMBOL
value = parameters[left]
delete parameters[left]
if value?
return @encode_segment(@path_identifier(value))
if optional
"" # missing parameter
else
throw new ParameterMissing("Route parameter missing: #{left}")
#
# I don't know what is this node type
# Please send your PR if you do
#
# when NodeTypes.OR:
else
throw new Error("Unknown Rails node type")
encode_segment: (segment) -> segment.replace(UriEncoderSegmentRegex, (str) -> encodeURIComponent(str))
is_optional_node: (node) -> @indexOf([NodeTypes.STAR, NodeTypes.SYMBOL, NodeTypes.CAT], node) >= 0
#
# This method build spec for route
#
build_path_spec: (route, wildcard=false) ->
[type, left, right] = route
switch type
when NodeTypes.GROUP
"(#{@build_path_spec(left)})"
when NodeTypes.CAT
"#{@build_path_spec(left)}#{@build_path_spec(right)}"
when NodeTypes.STAR
@build_path_spec(left, true)
when NodeTypes.SYMBOL
if wildcard is true
"#{if left[0] is '*' then '' else '*'}#{left}"
else
":#{left}"
when NodeTypes.SLASH, NodeTypes.DOT, NodeTypes.LITERAL
left
# Not sure about this one
# when NodeTypes.OR
else throw new Error("Unknown Rails node type")
#
# This method convert value for globbing in right value for rails route
#
visit_globbing: (route, parameters, optional) ->
[type, left, right] = route
value = parameters[left]
delete parameters[left]
return @visit(route, parameters, optional) unless value?
value = switch @get_object_type(value)
when "array"
value.join("/")
else
value
if DeprecatedGlobbingBehavior
@path_identifier(value)
else
encodeURI(@path_identifier(value))
#
# This method check and return prefix from options
#
get_prefix: ->
prefix = @configuration.prefix
prefix = (if prefix.match("/$") then prefix else "#{prefix}/") if prefix isnt ""
prefix
#
# route function: create route path function and add spec to it
#
route: (parts_table, default_options, route_spec, full_url) ->
required_parts = []
parts = []
for [part, required] in parts_table
parts.push(part)
required_parts.push(part) if required
path_fn = -> Utils.build_route(
parts, required_parts, default_options, route_spec, full_url, arguments
)
path_fn.required_params = required_parts
path_fn.toString = -> Utils.build_path_spec(route_spec)
path_fn
route_url: (route_defaults) ->
return route_defaults if typeof route_defaults == 'string'
hostname = route_defaults.host || Utils.current_host()
return '' unless hostname
subdomain = if route_defaults.subdomain then route_defaults.subdomain + '.' else ''
protocol = route_defaults.protocol || Utils.current_protocol()
port = route_defaults.port || (Utils.current_port() unless route_defaults.host)
port = if port then ":#{port}" else ''
protocol + "://" + subdomain + hostname + port
has_location: -> window?.location?
current_host: ->
if @has_location() then window.location.hostname else null
current_protocol: () ->
if @has_location() && window.location.protocol != ''
# location.protocol includes the colon character
window.location.protocol.replace(/:$/, '')
else
'http'
current_port: () ->
if @has_location() && window.location.port != ''
window.location.port
else
''
#
# This is helper method to define object type.
# The typeof operator is probably the biggest design flaw of JavaScript, simply because it's basically completely broken.
#
# Value Class Type
# -------------------------------------
# "foo" String string
# new String("foo") String object
# 1.2 Number number
# new Number(1.2) Number object
# true Boolean boolean
# new Boolean(true) Boolean object
# new Date() Date object
# new Error() Error object
# [1,2,3] Array object
# new Array(1, 2, 3) Array object
# new Function("") Function function
# /abc/g RegExp object
# new RegExp("meow") RegExp object
# {} Object object
# new Object() Object object
#
# What is why I use Object.prototype.toString() to know better type of variable. Or use jQuery.type, if it available.
# _classToTypeCache used for perfomance cache of types map (underscore at the beginning mean private method - of course it doesn't realy private).
#
_classToTypeCache: null
_classToType: ->
return @_classToTypeCache if @_classToTypeCache?
@_classToTypeCache = {}
for name in "Boolean Number String Function Array Date RegExp Object Error".split(" ")
@_classToTypeCache["[object #{name}]"] = name.toLowerCase()
@_classToTypeCache
get_object_type: (obj) ->
return root.jQuery.type(obj) if root.jQuery and root.jQuery.type?
return "#{obj}" unless obj?
(if typeof obj is "object" or typeof obj is "function" then @_classToType()[Object::toString.call(obj)] or "object" else typeof obj)
# indexOf helper
indexOf: (array, element) -> if Array::indexOf then array.indexOf(element) else @indexOfImplementation(array, element)
indexOfImplementation: (array, element) ->
result = -1
(result = i for el, i in array when el is element)
result
namespace: (root, namespace, routes) ->
parts = if namespace then namespace.split(".") else []
return routes if parts.length == 0
for part, index in parts
if index < parts.length - 1
root = (root[part] or= {})
else
return root[part] = routes
configure: (new_config) ->
@configuration = @merge(@configuration, new_config)
config: ->
@clone(@configuration)
make: ->
routes = ROUTES
routes.configure = (config) -> Utils.configure(config)
routes.config = -> Utils.config()
routes.default_serializer = (object, prefix) ->
Utils.default_serializer(object, prefix)
# Browser globals
Utils.namespace(root, NAMESPACE, routes)
Object.assign({default: routes}, routes)
result = Utils.make()
# Set up Routes appropriately for the environment.
if typeof define is "function" and define.amd
# AMD
define [], -> result
return result
| true | ###
File generated by js-routes GEM_VERSION
Based on Rails RAILS_VERSION routes of APP_CLASS
###
root = (exports ? this)
ParameterMissing = (message, fileName, lineNumber) ->
instance = new Error(message, fileName, lineNumber)
if Object.setPrototypeOf
Object.setPrototypeOf instance, Object.getPrototypeOf(this)
else
instance.__proto__ = this.__proto__
if Error.captureStackTrace
Error.captureStackTrace instance, ParameterMissing
instance
ParameterMissing.prototype = Object.create(Error.prototype, constructor:
value: Error
enumerable: false
writable: true
configurable: true
)
if Object.setPrototypeOf
Object.setPrototypeOf(ParameterMissing, Error)
else
ParameterMissing.__proto__ = Error
NodeTypes = NODE_TYPES
DeprecatedGlobbingBehavior = DEPRECATED_GLOBBING_BEHAVIOR
SpecialOptionsKey = SPECIAL_OPTIONS_KEY
UriEncoderSegmentRegex = /[^a-zA-Z0-9\-\._~!\$&'\(\)\*\+,;=:@]/g # this is ActionDispatch::Journey::Router::Utils::UriEncoder::SEGMENT
ReservedOptions = [
'anchor'
'trailing_slash'
'subdomain'
'host'
'port'
'protocol'
]
Utils =
configuration:
prefix: PREFIX
default_url_options: DEFAULT_URL_OPTIONS
special_options_key: SPECIAL_OPTIONS_KEY
serializer: SERIALIZER
default_serializer: (object, prefix = null) ->
return "" unless object?
if !prefix and !(@get_object_type(object) is "object")
throw new Error("Url parameters should be a javascript hash")
s = []
switch @get_object_type(object)
when "array"
for element, i in object
s.push @default_serializer(element, prefix + "[]")
when "object"
for own key, prop of object
if !prop? and prefix?
prop = ""
if prop?
key = "#{prefix}[PI:KEY:<KEY>END_PIkeyPI:KEY:<KEY>END_PI}]" if prefix?
s.push @default_serializer(prop, key)
else
if object?
s.push "#{encodeURIComponent(prefix.toString())}=#{encodeURIComponent(object.toString())}"
return "" unless s.length
s.join("&")
serialize: (object) ->
custom_serializer = @configuration.serializer
if custom_serializer? and @get_object_type(custom_serializer) is "function"
custom_serializer(object)
else
@default_serializer(object)
clean_path: (path) ->
path = path.split("://")
last_index = path.length - 1
path[last_index] = path[last_index].replace(/\/+/g, "/")
path.join "://"
extract_options: (number_of_params, args) ->
last_el = args[args.length - 1]
if (args.length > number_of_params and last_el == undefined) or (last_el? and "object" is @get_object_type(last_el) and !@looks_like_serialized_model(last_el))
options = args.pop() || {}
delete options[@configuration.special_options_key]
options
else
{}
looks_like_serialized_model: (object) ->
!object[@configuration.special_options_key] and ("id" of object or "to_param" of object)
path_identifier: (object) ->
return "0" if object is 0
# null, undefined, false or ''
return "" unless object
property = object
if @get_object_type(object) is "object"
if "to_param" of object
throw new ParameterMissing("Route parameter missing: to_param") unless object.to_param?
property = object.to_param
else if "id" of object
throw new ParameterMissing("Route parameter missing: id") unless object.id?
property = object.id
else
property = object
property = property.call(object) if @get_object_type(property) is "function"
property.toString()
clone: (obj) ->
return obj if !obj? or "object" isnt @get_object_type(obj)
copy = obj.constructor()
copy[key] = attr for own key, attr of obj
copy
merge: (xs...) ->
tap = (o, fn) -> fn(o); o
if xs?.length > 0
tap {}, (m) -> m[k] = v for k, v of x for x in xs
normalize_options: (parts, required_parts, default_options, actual_parameters) ->
options = @extract_options(parts.length, actual_parameters)
if actual_parameters.length > parts.length
throw new Error("Too many parameters provided for path")
use_all_parts = actual_parameters.length > required_parts.length
parts_options = {}
for own key of options
use_all_parts = true
if @indexOf(parts, key) >= 0
parts_options[key] = value
options = @merge(@configuration.default_url_options, default_options, options)
result = {}
url_parameters = {}
result['url_parameters'] = url_parameters
for own key, value of options
if @indexOf(ReservedOptions, key) >= 0
result[key] = value
else
url_parameters[key] = value
route_parts = if use_all_parts then parts else required_parts
i = 0
for part in route_parts when i < actual_parameters.length
unless parts_options.hasOwnProperty(part)
url_parameters[part] = actual_parameters[i]
++i
result
build_route: (parts, required_parts, default_options, route, full_url, args) ->
args = Array::slice.call(args)
options = @normalize_options(parts, required_parts, default_options, args)
parameters = options['url_parameters']
# path
result = "#{@get_prefix()}#{@visit(route, parameters)}"
url = Utils.clean_path(result)
# set trailing_slash
url = url.replace(/(.*?)[\/]?$/, "$1/") if options['trailing_slash'] is true
# set additional url params
if (url_params = @serialize(parameters)).length
url += "?#{url_params}"
# set anchor
url += if options.anchor then "##{options.anchor}" else ""
if full_url
url = @route_url(options) + url
url
#
# This function is JavaScript impelementation of the
# Journey::Visitors::Formatter that builds route by given parameters
# from route binary tree.
# Binary tree is serialized in the following way:
# [node type, left node, right node ]
#
# @param {Boolean} optional Marks the currently visited branch as optional.
# If set to `true`, this method will not throw when encountering
# a missing parameter (used in recursive calls).
#
visit: (route, parameters, optional = false) ->
[type, left, right] = route
switch type
when NodeTypes.GROUP
@visit left, parameters, true
when NodeTypes.STAR
@visit_globbing left, parameters, true
when NodeTypes.LITERAL, NodeTypes.SLASH, NodeTypes.DOT
left
when NodeTypes.CAT
left_part = @visit(left, parameters, optional)
right_part = @visit(right, parameters, optional)
if optional and ((@is_optional_node(left[0]) and not left_part) or
((@is_optional_node(right[0])) and not right_part))
return ""
"#{left_part}#{right_part}"
when NodeTypes.SYMBOL
value = parameters[left]
delete parameters[left]
if value?
return @encode_segment(@path_identifier(value))
if optional
"" # missing parameter
else
throw new ParameterMissing("Route parameter missing: #{left}")
#
# I don't know what is this node type
# Please send your PR if you do
#
# when NodeTypes.OR:
else
throw new Error("Unknown Rails node type")
encode_segment: (segment) -> segment.replace(UriEncoderSegmentRegex, (str) -> encodeURIComponent(str))
is_optional_node: (node) -> @indexOf([NodeTypes.STAR, NodeTypes.SYMBOL, NodeTypes.CAT], node) >= 0
#
# This method build spec for route
#
build_path_spec: (route, wildcard=false) ->
[type, left, right] = route
switch type
when NodeTypes.GROUP
"(#{@build_path_spec(left)})"
when NodeTypes.CAT
"#{@build_path_spec(left)}#{@build_path_spec(right)}"
when NodeTypes.STAR
@build_path_spec(left, true)
when NodeTypes.SYMBOL
if wildcard is true
"#{if left[0] is '*' then '' else '*'}#{left}"
else
":#{left}"
when NodeTypes.SLASH, NodeTypes.DOT, NodeTypes.LITERAL
left
# Not sure about this one
# when NodeTypes.OR
else throw new Error("Unknown Rails node type")
#
# This method convert value for globbing in right value for rails route
#
visit_globbing: (route, parameters, optional) ->
[type, left, right] = route
value = parameters[left]
delete parameters[left]
return @visit(route, parameters, optional) unless value?
value = switch @get_object_type(value)
when "array"
value.join("/")
else
value
if DeprecatedGlobbingBehavior
@path_identifier(value)
else
encodeURI(@path_identifier(value))
#
# This method check and return prefix from options
#
get_prefix: ->
prefix = @configuration.prefix
prefix = (if prefix.match("/$") then prefix else "#{prefix}/") if prefix isnt ""
prefix
#
# route function: create route path function and add spec to it
#
route: (parts_table, default_options, route_spec, full_url) ->
required_parts = []
parts = []
for [part, required] in parts_table
parts.push(part)
required_parts.push(part) if required
path_fn = -> Utils.build_route(
parts, required_parts, default_options, route_spec, full_url, arguments
)
path_fn.required_params = required_parts
path_fn.toString = -> Utils.build_path_spec(route_spec)
path_fn
route_url: (route_defaults) ->
return route_defaults if typeof route_defaults == 'string'
hostname = route_defaults.host || Utils.current_host()
return '' unless hostname
subdomain = if route_defaults.subdomain then route_defaults.subdomain + '.' else ''
protocol = route_defaults.protocol || Utils.current_protocol()
port = route_defaults.port || (Utils.current_port() unless route_defaults.host)
port = if port then ":#{port}" else ''
protocol + "://" + subdomain + hostname + port
has_location: -> window?.location?
current_host: ->
if @has_location() then window.location.hostname else null
current_protocol: () ->
if @has_location() && window.location.protocol != ''
# location.protocol includes the colon character
window.location.protocol.replace(/:$/, '')
else
'http'
current_port: () ->
if @has_location() && window.location.port != ''
window.location.port
else
''
#
# This is helper method to define object type.
# The typeof operator is probably the biggest design flaw of JavaScript, simply because it's basically completely broken.
#
# Value Class Type
# -------------------------------------
# "foo" String string
# new String("foo") String object
# 1.2 Number number
# new Number(1.2) Number object
# true Boolean boolean
# new Boolean(true) Boolean object
# new Date() Date object
# new Error() Error object
# [1,2,3] Array object
# new Array(1, 2, 3) Array object
# new Function("") Function function
# /abc/g RegExp object
# new RegExp("meow") RegExp object
# {} Object object
# new Object() Object object
#
# What is why I use Object.prototype.toString() to know better type of variable. Or use jQuery.type, if it available.
# _classToTypeCache used for perfomance cache of types map (underscore at the beginning mean private method - of course it doesn't realy private).
#
_classToTypeCache: null
_classToType: ->
return @_classToTypeCache if @_classToTypeCache?
@_classToTypeCache = {}
for name in "Boolean Number String Function Array Date RegExp Object Error".split(" ")
@_classToTypeCache["[object #{name}]"] = name.toLowerCase()
@_classToTypeCache
get_object_type: (obj) ->
return root.jQuery.type(obj) if root.jQuery and root.jQuery.type?
return "#{obj}" unless obj?
(if typeof obj is "object" or typeof obj is "function" then @_classToType()[Object::toString.call(obj)] or "object" else typeof obj)
# indexOf helper
indexOf: (array, element) -> if Array::indexOf then array.indexOf(element) else @indexOfImplementation(array, element)
indexOfImplementation: (array, element) ->
result = -1
(result = i for el, i in array when el is element)
result
namespace: (root, namespace, routes) ->
parts = if namespace then namespace.split(".") else []
return routes if parts.length == 0
for part, index in parts
if index < parts.length - 1
root = (root[part] or= {})
else
return root[part] = routes
configure: (new_config) ->
@configuration = @merge(@configuration, new_config)
config: ->
@clone(@configuration)
make: ->
routes = ROUTES
routes.configure = (config) -> Utils.configure(config)
routes.config = -> Utils.config()
routes.default_serializer = (object, prefix) ->
Utils.default_serializer(object, prefix)
# Browser globals
Utils.namespace(root, NAMESPACE, routes)
Object.assign({default: routes}, routes)
result = Utils.make()
# Set up Routes appropriately for the environment.
if typeof define is "function" and define.amd
# AMD
define [], -> result
return result
|
[
{
"context": "#!/usr/bin/coffee\n\n###\n#@author rankun203@gmail.com\n#List files with specified extension in a specifi",
"end": 51,
"score": 0.9999111890792847,
"start": 32,
"tag": "EMAIL",
"value": "rankun203@gmail.com"
}
] | node2/node5filteredLs.coffee | rankun203/ModernWebStudy | 0 | #!/usr/bin/coffee
###
#@author rankun203@gmail.com
#List files with specified extension in a specified folder.
###
fs = require 'fs'
folder = process.argv[2] ? './'
ext = process.argv[3] ? '.*'
reg = new RegExp "^.*\.#{ext}$"
fs.readdir folder, (err, files) ->
throw err if err
for file in files
console.log file if file.match reg
| 147586 | #!/usr/bin/coffee
###
#@author <EMAIL>
#List files with specified extension in a specified folder.
###
fs = require 'fs'
folder = process.argv[2] ? './'
ext = process.argv[3] ? '.*'
reg = new RegExp "^.*\.#{ext}$"
fs.readdir folder, (err, files) ->
throw err if err
for file in files
console.log file if file.match reg
| true | #!/usr/bin/coffee
###
#@author PI:EMAIL:<EMAIL>END_PI
#List files with specified extension in a specified folder.
###
fs = require 'fs'
folder = process.argv[2] ? './'
ext = process.argv[3] ? '.*'
reg = new RegExp "^.*\.#{ext}$"
fs.readdir folder, (err, files) ->
throw err if err
for file in files
console.log file if file.match reg
|
[
{
"context": "empUser: (callback) ->\n userData = {username: 'temp', password: '2'}\n cookies = null\n req = req",
"end": 254,
"score": 0.9939067959785461,
"start": 250,
"tag": "USERNAME",
"value": "temp"
},
{
"context": ") ->\n userData = {username: 'temp', password: '2'}\n cookies = null\n req = request.get('/auth",
"end": 269,
"score": 0.9994352459907532,
"start": 268,
"tag": "PASSWORD",
"value": "2"
}
] | lobby/test/int/helpers.coffee | towerstorm/game | 11 | app = require('../../lib/app.coffee')
request = require("supertest")(app)
_ = require 'lodash'
querystring = require 'querystring'
tdb = require('database')
User = tdb.models.User
helpers =
createTempUser: (callback) ->
userData = {username: 'temp', password: '2'}
cookies = null
req = request.get('/auth/temp')
req.send(userData)
.expect('Content-Type', /json/)
.expect(200).end (err, res) ->
if err then return callback(err)
cookies = res.headers['set-cookie'].pop().split(';')[0];
userInfo = JSON.parse(res.text)
userInfo = _.merge(userInfo, {cookies})
callback(null, userInfo)
createRegisteredUser: (callback) ->
@createTempUser (err, userInfo) ->
if err then return callback(err)
User.findById userInfo.id, (err, user) ->
if err then return callback(err)
user.set('role', 'user')
user.save (err, result) ->
if err then return callback(err)
return callback(null, userInfo)
getUserInfo: (userCookies, callback) ->
req = request.get('/user/')
req.cookies = userCookies
req.expect(200).end (err, res) ->
if err then return callback(err)
userInfo = JSON.parse(res.text)
return callback(null, userInfo)
addFriend: (userInfo, friendInfo, callback) ->
req = request.get('/user/friends/add/' + friendInfo.username)
req.cookies = userInfo.cookies
req.expect(200).end (err, res) ->
if err then return callback(err)
req = request.get('/user/friends/accept/' + userInfo.id)
req.cookies = friendInfo.cookies
req.expect(200).end (err, res) ->
if err then return callback(err)
callback(null, true)
createLobby: (hostCookies, params, callback) ->
if arguments.length == 2
callback = params
params = {}
req = request.get('/lobby/create/?' + querystring.stringify(params))
req.cookies = hostCookies
req.expect(200)
.end (err, res) ->
if err then return callback(err)
lobbyInfo = JSON.parse(res.text)
return callback(null, lobbyInfo)
createQueuer: (hostCookies, params, callback) ->
if arguments.length == 2
callback = params
params = {}
req = request.get('/lobby/create/?' + querystring.stringify(params))
req.cookies = hostCookies
req.expect(200)
.end (err, res) ->
if err then return callback(err)
lobbyInfo = JSON.parse(res.text)
return callback(null, lobbyInfo)
module.exports = helpers | 186912 | app = require('../../lib/app.coffee')
request = require("supertest")(app)
_ = require 'lodash'
querystring = require 'querystring'
tdb = require('database')
User = tdb.models.User
helpers =
createTempUser: (callback) ->
userData = {username: 'temp', password: '<PASSWORD>'}
cookies = null
req = request.get('/auth/temp')
req.send(userData)
.expect('Content-Type', /json/)
.expect(200).end (err, res) ->
if err then return callback(err)
cookies = res.headers['set-cookie'].pop().split(';')[0];
userInfo = JSON.parse(res.text)
userInfo = _.merge(userInfo, {cookies})
callback(null, userInfo)
createRegisteredUser: (callback) ->
@createTempUser (err, userInfo) ->
if err then return callback(err)
User.findById userInfo.id, (err, user) ->
if err then return callback(err)
user.set('role', 'user')
user.save (err, result) ->
if err then return callback(err)
return callback(null, userInfo)
getUserInfo: (userCookies, callback) ->
req = request.get('/user/')
req.cookies = userCookies
req.expect(200).end (err, res) ->
if err then return callback(err)
userInfo = JSON.parse(res.text)
return callback(null, userInfo)
addFriend: (userInfo, friendInfo, callback) ->
req = request.get('/user/friends/add/' + friendInfo.username)
req.cookies = userInfo.cookies
req.expect(200).end (err, res) ->
if err then return callback(err)
req = request.get('/user/friends/accept/' + userInfo.id)
req.cookies = friendInfo.cookies
req.expect(200).end (err, res) ->
if err then return callback(err)
callback(null, true)
createLobby: (hostCookies, params, callback) ->
if arguments.length == 2
callback = params
params = {}
req = request.get('/lobby/create/?' + querystring.stringify(params))
req.cookies = hostCookies
req.expect(200)
.end (err, res) ->
if err then return callback(err)
lobbyInfo = JSON.parse(res.text)
return callback(null, lobbyInfo)
createQueuer: (hostCookies, params, callback) ->
if arguments.length == 2
callback = params
params = {}
req = request.get('/lobby/create/?' + querystring.stringify(params))
req.cookies = hostCookies
req.expect(200)
.end (err, res) ->
if err then return callback(err)
lobbyInfo = JSON.parse(res.text)
return callback(null, lobbyInfo)
module.exports = helpers | true | app = require('../../lib/app.coffee')
request = require("supertest")(app)
_ = require 'lodash'
querystring = require 'querystring'
tdb = require('database')
User = tdb.models.User
helpers =
createTempUser: (callback) ->
userData = {username: 'temp', password: 'PI:PASSWORD:<PASSWORD>END_PI'}
cookies = null
req = request.get('/auth/temp')
req.send(userData)
.expect('Content-Type', /json/)
.expect(200).end (err, res) ->
if err then return callback(err)
cookies = res.headers['set-cookie'].pop().split(';')[0];
userInfo = JSON.parse(res.text)
userInfo = _.merge(userInfo, {cookies})
callback(null, userInfo)
createRegisteredUser: (callback) ->
@createTempUser (err, userInfo) ->
if err then return callback(err)
User.findById userInfo.id, (err, user) ->
if err then return callback(err)
user.set('role', 'user')
user.save (err, result) ->
if err then return callback(err)
return callback(null, userInfo)
getUserInfo: (userCookies, callback) ->
req = request.get('/user/')
req.cookies = userCookies
req.expect(200).end (err, res) ->
if err then return callback(err)
userInfo = JSON.parse(res.text)
return callback(null, userInfo)
addFriend: (userInfo, friendInfo, callback) ->
req = request.get('/user/friends/add/' + friendInfo.username)
req.cookies = userInfo.cookies
req.expect(200).end (err, res) ->
if err then return callback(err)
req = request.get('/user/friends/accept/' + userInfo.id)
req.cookies = friendInfo.cookies
req.expect(200).end (err, res) ->
if err then return callback(err)
callback(null, true)
createLobby: (hostCookies, params, callback) ->
if arguments.length == 2
callback = params
params = {}
req = request.get('/lobby/create/?' + querystring.stringify(params))
req.cookies = hostCookies
req.expect(200)
.end (err, res) ->
if err then return callback(err)
lobbyInfo = JSON.parse(res.text)
return callback(null, lobbyInfo)
createQueuer: (hostCookies, params, callback) ->
if arguments.length == 2
callback = params
params = {}
req = request.get('/lobby/create/?' + querystring.stringify(params))
req.cookies = hostCookies
req.expect(200)
.end (err, res) ->
if err then return callback(err)
lobbyInfo = JSON.parse(res.text)
return callback(null, lobbyInfo)
module.exports = helpers |
[
{
"context": "g.createPost\n\t\t\t\ttitle\t: \texpected\n\t\t\t\tauthor \t:\t'Mehfuz Hossain'\n\t\t\t\tbody\t:\t'Empty body'\n\t\t\tpromise.then (result)",
"end": 288,
"score": 0.9999035000801086,
"start": 274,
"tag": "NAME",
"value": "Mehfuz Hossain"
},
{
"context": "g.createPost\n\t\t\t\ttitle\t: \texpected\n\t\t\t\tauthor \t:\t'Mehfuz Hossain'\n\t\t\t\tbody\t:\t'Empty body'\n\t\t\t\tpublish :\tfalse\n\t\t\tp",
"end": 801,
"score": 0.9999014735221863,
"start": 787,
"tag": "NAME",
"value": "Mehfuz Hossain"
},
{
"context": "createPost\n\t\t\t\ttitle\t\t: \texpected\n\t\t\t\tauthor \t\t:\t'Mehfuz Hossain'\n\t\t\t\tbody\t\t:\t'Empty body'\n\t\t\t\tpermaLink \t: \t'1900",
"end": 1311,
"score": 0.9999045133590698,
"start": 1297,
"tag": "NAME",
"value": "Mehfuz Hossain"
}
] | applications/lighter/test/blog.coffee | nodebenchmark/benchmarks | 13 | require 'should'
helper = (require '../helper')()
blog = (require __dirname + '/init').blog
describe 'Blog', ->
describe 'find post', ->
expected = 'test post'
_id = ''
beforeEach (done)->
promise = blog.createPost
title : expected
author : 'Mehfuz Hossain'
body : 'Empty body'
promise.then (result) =>
_id = result._id
done()
it 'should return expected for permaLink', (done)->
promise = blog.findPost helper.linkify('test post')
promise.then (data) ->
data.post.title.should.equal expected
done()
afterEach (done)->
blog.deletePost _id, ()->
done()
describe 'list post', ->
expected = 'test post'
id = ''
beforeEach (done)->
promise = blog.createPost
title : expected
author : 'Mehfuz Hossain'
body : 'Empty body'
publish : false
promise.then (result) =>
id = result._id
done()
it 'should skip draft posts', (done)->
promise= blog.find ''
promise.then (data) ->
for post in data.posts
post._id.should.not.equal id
done()
afterEach (done)->
blog.deletePost id, ()->
done()
describe 'update post', ->
id = ''
expected = 'test post'
beforeEach (done)->
promise = blog.createPost
title : expected
author : 'Mehfuz Hossain'
body : 'Empty body'
permaLink : '1900/01/test'
promise.then (result) =>
id = result._id
done()
it 'should not update permaLink when title is same', (done)->
body = 'updated'
promise = blog.updatePost
id : id
title : expected
body : body
promise.then (result)=>
result.permaLink.should.equal '1900/01/test'
result.body.should.equal body
done()
it 'should update the permalink when title is different', (done)->
promise = blog.updatePost
id : id
title : 'updated post'
body : 'nothing'
promise.then (result)=>
result.permaLink.should.equal helper.linkify('updated post')
done()
afterEach (done)->
blog.deletePost id, ()->
done()
| 217128 | require 'should'
helper = (require '../helper')()
blog = (require __dirname + '/init').blog
describe 'Blog', ->
describe 'find post', ->
expected = 'test post'
_id = ''
beforeEach (done)->
promise = blog.createPost
title : expected
author : '<NAME>'
body : 'Empty body'
promise.then (result) =>
_id = result._id
done()
it 'should return expected for permaLink', (done)->
promise = blog.findPost helper.linkify('test post')
promise.then (data) ->
data.post.title.should.equal expected
done()
afterEach (done)->
blog.deletePost _id, ()->
done()
describe 'list post', ->
expected = 'test post'
id = ''
beforeEach (done)->
promise = blog.createPost
title : expected
author : '<NAME>'
body : 'Empty body'
publish : false
promise.then (result) =>
id = result._id
done()
it 'should skip draft posts', (done)->
promise= blog.find ''
promise.then (data) ->
for post in data.posts
post._id.should.not.equal id
done()
afterEach (done)->
blog.deletePost id, ()->
done()
describe 'update post', ->
id = ''
expected = 'test post'
beforeEach (done)->
promise = blog.createPost
title : expected
author : '<NAME>'
body : 'Empty body'
permaLink : '1900/01/test'
promise.then (result) =>
id = result._id
done()
it 'should not update permaLink when title is same', (done)->
body = 'updated'
promise = blog.updatePost
id : id
title : expected
body : body
promise.then (result)=>
result.permaLink.should.equal '1900/01/test'
result.body.should.equal body
done()
it 'should update the permalink when title is different', (done)->
promise = blog.updatePost
id : id
title : 'updated post'
body : 'nothing'
promise.then (result)=>
result.permaLink.should.equal helper.linkify('updated post')
done()
afterEach (done)->
blog.deletePost id, ()->
done()
| true | require 'should'
helper = (require '../helper')()
blog = (require __dirname + '/init').blog
describe 'Blog', ->
describe 'find post', ->
expected = 'test post'
_id = ''
beforeEach (done)->
promise = blog.createPost
title : expected
author : 'PI:NAME:<NAME>END_PI'
body : 'Empty body'
promise.then (result) =>
_id = result._id
done()
it 'should return expected for permaLink', (done)->
promise = blog.findPost helper.linkify('test post')
promise.then (data) ->
data.post.title.should.equal expected
done()
afterEach (done)->
blog.deletePost _id, ()->
done()
describe 'list post', ->
expected = 'test post'
id = ''
beforeEach (done)->
promise = blog.createPost
title : expected
author : 'PI:NAME:<NAME>END_PI'
body : 'Empty body'
publish : false
promise.then (result) =>
id = result._id
done()
it 'should skip draft posts', (done)->
promise= blog.find ''
promise.then (data) ->
for post in data.posts
post._id.should.not.equal id
done()
afterEach (done)->
blog.deletePost id, ()->
done()
describe 'update post', ->
id = ''
expected = 'test post'
beforeEach (done)->
promise = blog.createPost
title : expected
author : 'PI:NAME:<NAME>END_PI'
body : 'Empty body'
permaLink : '1900/01/test'
promise.then (result) =>
id = result._id
done()
it 'should not update permaLink when title is same', (done)->
body = 'updated'
promise = blog.updatePost
id : id
title : expected
body : body
promise.then (result)=>
result.permaLink.should.equal '1900/01/test'
result.body.should.equal body
done()
it 'should update the permalink when title is different', (done)->
promise = blog.updatePost
id : id
title : 'updated post'
body : 'nothing'
promise.then (result)=>
result.permaLink.should.equal helper.linkify('updated post')
done()
afterEach (done)->
blog.deletePost id, ()->
done()
|
[
{
"context": "itten', kittySchema\n\nfluffy = new Kitten({ name: 'fluffy' });\n# fluffy.speak()\nx = new Kitten null\n# x.sp",
"end": 494,
"score": 0.9952356219291687,
"start": 488,
"tag": "NAME",
"value": "fluffy"
},
{
"context": "if err?\n # p kittens\n\n\n# Kitten.find({ name: /^Fluff/ }, callback)\n# Kitten.find { name: /^Fluff/ }, p",
"end": 733,
"score": 0.9860857725143433,
"start": 728,
"tag": "NAME",
"value": "Fluff"
},
{
"context": "me: /^Fluff/ }, callback)\n# Kitten.find { name: /^Fluff/ }, p \"callback\"\n\n\n\ntimelogSchema = mongoose.Sche",
"end": 777,
"score": 0.5586409568786621,
"start": 772,
"tag": "NAME",
"value": "Fluff"
},
{
"context": "oli', timelogSchema\n\ntimelog = new Timelog {name:\"zhaoli\"}\ntimelog.tag = \"new time record\"\ntimelog.start_t",
"end": 990,
"score": 0.8788135051727295,
"start": 984,
"tag": "USERNAME",
"value": "zhaoli"
}
] | js/npm_try/mongoose/mongoose_try.coffee | zhaoace/codecraft | 0 | {exec} = require 'child_process'
p = (x) -> console.log x
mongoose = require 'mongoose'
mongoose.connect 'mongodb://localhost/test'
db = mongoose.connection
db.on "error", console.error.bind console, 'connection error:'
kittySchema = mongoose.Schema {name: String }
kittySchema.methods.speak = () ->
greeting = if this.name then "Meow name is " + this.name else "I don't have a name"
p greeting
Kitten = mongoose.model 'Kitten', kittySchema
fluffy = new Kitten({ name: 'fluffy' });
# fluffy.speak()
x = new Kitten null
# x.speak()
fluffy.save (err, fluffy) ->
return p err if err?
# fluffy.speak()
Kitten.find (err, kittens) ->
return p err if err?
# p kittens
# Kitten.find({ name: /^Fluff/ }, callback)
# Kitten.find { name: /^Fluff/ }, p "callback"
timelogSchema = mongoose.Schema { name: String , tag: String, start_time: Date, end_time: Date }
p "xxx"
Timelog = mongoose.model 'zhaoli', timelogSchema
timelog = new Timelog {name:"zhaoli"}
timelog.tag = "new time record"
timelog.start_time = new Date()
timelog.end_time = new Date()
p timelog
timelog.save (err) ->
Timelog.find (err, timelogs) ->
db.close()
p timelogs
p timelogs.lenth
# db.tianyc04.insert({mark:1, mark_time:new Date()})
| 128037 | {exec} = require 'child_process'
p = (x) -> console.log x
mongoose = require 'mongoose'
mongoose.connect 'mongodb://localhost/test'
db = mongoose.connection
db.on "error", console.error.bind console, 'connection error:'
kittySchema = mongoose.Schema {name: String }
kittySchema.methods.speak = () ->
greeting = if this.name then "Meow name is " + this.name else "I don't have a name"
p greeting
Kitten = mongoose.model 'Kitten', kittySchema
fluffy = new Kitten({ name: '<NAME>' });
# fluffy.speak()
x = new Kitten null
# x.speak()
fluffy.save (err, fluffy) ->
return p err if err?
# fluffy.speak()
Kitten.find (err, kittens) ->
return p err if err?
# p kittens
# Kitten.find({ name: /^<NAME>/ }, callback)
# Kitten.find { name: /^<NAME>/ }, p "callback"
timelogSchema = mongoose.Schema { name: String , tag: String, start_time: Date, end_time: Date }
p "xxx"
Timelog = mongoose.model 'zhaoli', timelogSchema
timelog = new Timelog {name:"zhaoli"}
timelog.tag = "new time record"
timelog.start_time = new Date()
timelog.end_time = new Date()
p timelog
timelog.save (err) ->
Timelog.find (err, timelogs) ->
db.close()
p timelogs
p timelogs.lenth
# db.tianyc04.insert({mark:1, mark_time:new Date()})
| true | {exec} = require 'child_process'
p = (x) -> console.log x
mongoose = require 'mongoose'
mongoose.connect 'mongodb://localhost/test'
db = mongoose.connection
db.on "error", console.error.bind console, 'connection error:'
kittySchema = mongoose.Schema {name: String }
kittySchema.methods.speak = () ->
greeting = if this.name then "Meow name is " + this.name else "I don't have a name"
p greeting
Kitten = mongoose.model 'Kitten', kittySchema
fluffy = new Kitten({ name: 'PI:NAME:<NAME>END_PI' });
# fluffy.speak()
x = new Kitten null
# x.speak()
fluffy.save (err, fluffy) ->
return p err if err?
# fluffy.speak()
Kitten.find (err, kittens) ->
return p err if err?
# p kittens
# Kitten.find({ name: /^PI:NAME:<NAME>END_PI/ }, callback)
# Kitten.find { name: /^PI:NAME:<NAME>END_PI/ }, p "callback"
timelogSchema = mongoose.Schema { name: String , tag: String, start_time: Date, end_time: Date }
p "xxx"
Timelog = mongoose.model 'zhaoli', timelogSchema
timelog = new Timelog {name:"zhaoli"}
timelog.tag = "new time record"
timelog.start_time = new Date()
timelog.end_time = new Date()
p timelog
timelog.save (err) ->
Timelog.find (err, timelogs) ->
db.close()
p timelogs
p timelogs.lenth
# db.tianyc04.insert({mark:1, mark_time:new Date()})
|
[
{
"context": "ricContractsSettings = (company, type) ->\n\tkey = \"contracts#{type}\"\n\tsettings = company.flags[key]\n\tif not settings\n\t",
"end": 5726,
"score": 0.9478248953819275,
"start": 5709,
"tag": "KEY",
"value": "contracts#{type}\""
},
{
"context": "\tminT : 0.3\n\t\t}\n\t\t{\n\t\t\tid : \"Showreel\"\n\t\t\tname : \"Showreel\".localize()\n\t\t\tminD : 0.6\n\t\t}\n\t]\nUI.__olgGene",
"end": 20722,
"score": 0.6772029399871826,
"start": 20718,
"tag": "NAME",
"value": "Show"
},
{
"context": "salary/1e3) * 1e3\n\t\tnewApplicants.push {\n\t\t\tname : name,\n\t\t\tqualityFactor : q,\n\t\t\ttechnologyFactor : t / ",
"end": 22996,
"score": 0.9975956082344055,
"start": 22992,
"tag": "NAME",
"value": "name"
}
] | coffee/api/patches.coffee | Spartan322/Spartan-Dev-Project | 1 | ###
Functions which require patches
###
SDP.GDT.addTraining = (item) ->
item = item.toInput() if SDP.GDT.Training? and item instanceof SDP.GDT.Training
item.pointsCost = 0 unless item.pointsCost?
if Checks.checkPropertiesPresent(item, ['id', 'name', 'pointsCost', 'duration', 'category', 'categoryDisplayName']) and Checks.checkUniqueness(item, 'id', Training.getAllTraining())
Training.moddedTraining(item)
return
SDP.GDT.addPublisher = (item) ->
item = item.toInput() if SDP.GDT.Publisher? and item instanceof SDP.GDT.Publisher
return if not Checks.checkUniqueness(item, 'id', Companies.getAllCompanies())
if Checks.checkPropertiesPresent(item, ['id', 'name']) and Checks.checkUniqueness(item, 'id', ProjectContracts.getAllPublishers())
ProjectContracts.moddedPublishers.push(item)
return
SDP.GDT.addContract = (item) ->
item = item.toInput() if SDP.GDT.Contract? and item instanceof SDP.GDT.Contract
if Checks.checkPropertiesPresent(item, ['name', 'description', 'dF', 'tF'])
ProjectContracts.moddedContracts.push(item)
return
SDP.GDT.addReviewer = (item) ->
if item.constructor is String then item = {id: item.replace(/\s/g,""), name: item}
item = item.toInput() if SDP.GDT.Reviewer? and item instanceof SDP.GDT.Reviewer
if Checks.checkPropertiesPresent(item, ['id', 'name']) and Checks.checkUniqueness(item, 'id', Reviews.getAllReviewers())
Reviews.moddedReviewers.push(item)
return
SDP.GDT.addReviewMessage = (item) ->
if item.constructor is String then item = {message: item, isRandom: true}
if item.message or item.getMessage
Reviews.moddedMessages.push(item)
return
SDP.GDT.addApplicantFunctor = (item) ->
if Checks.checkPropertiesPresent(item, ['apply', 'forMale']) and typeof apply is "function"
JobApplicants.moddedAlgorithims.push(item)
return
SDP.GDT.addFamousFunctor = (item) ->
if Checks.checkPropertiesPresent(item, ['apply', 'forMale']) and typeof apply is "function"
JobApplicants.moddedFamous.push(item)
return
###
#
# Patches: improves game modularbility and performance and kills bugs
# Should force patches on mod load
#
###
SDP.GDT.Internal = {}
SDP.GDT.Internal.notificationsToTrigger = []
###
Triggers all notifications in the case they couldn't be triggered before (ie: before the GameManager.company.notification existed
###
GDT.on(GDT.eventKeys.saves.loaded, ->
GameManager.company.notifications.push(i) for i in SDP.GDT.Internal.notificationsToTrigger
SDP.GDT.Internal.notificationsToTrigger = [])
GDT.on(GDT.eventKeys.saves.newGame, ->
GameManager.company.notifications.push(i) for i in SDP.GDT.Internal.notificationsToTrigger
SDP.GDT.Internal.notificationsToTrigger = [])
###
Allows new platforms to incorporate different images based on the date
###
Platforms._oldGetPlatformImage = Platforms.getPlatformImage
Platforms.getPlatformImage = (platform, week) ->
if platform.id is 'PC' then return Platforms._oldGetPlatformImage(platform, week)
if not platform.imageDates? or not platform.baseIconUri? then return platform.iconUri
baseUri = platform.baseIconUri
image = null
if week and platform.imageDates.constructor is Array
image = "{0}/{1}-{2}.png".format(baseUri, platform.id, String(i+1)) for date, i in platform.imageDates when General.getWeekFromDateString(date) <= week and i isnt 0
image = "{0}/{1}.png".format(baseUri, platform.id) unless image?
return image
###
Forces getAllTraining to include modded training
###
Training._oldGetAllTraining = Training.getAllTraining
Training.moddedTraining = []
Training.getAllTraining = ->
trainings = Training._oldGetAllTraining()
for modT in Training.moddedTraining when modT.id? and modT.isTraining # provide more expected behavior
trainings.push(modT)
return
###
Adds features to the publisher contracts which determine how they act
Also allows low chance for platform company to randomly give a publisher contract
###
ProjectContracts.createPublisher = (item, id) ->
if item.constructor is String then item = {name: item}
if id? then item.id = id
if not item.id? and item.name? then item.id = name.replace(/\s/g,"")
item
ProjectContracts.vanillaPublishers = [
ProjectContracts.createPublisher("Active Visionaries")
ProjectContracts.createPublisher("Electronic Mass Productions", "ea")
ProjectContracts.createPublisher("Rockville Softworks")
ProjectContracts.createPublisher("Blue Bit Games")
ProjectContracts.createPublisher("CapeCom")
ProjectContracts.createPublisher("Codemeisters")
ProjectContracts.createPublisher("Deep Platinum")
ProjectContracts.createPublisher("Infro Games")
ProjectContracts.createPublisher("LoWood Productions")
ProjectContracts.createPublisher("TGQ")
ProjectContracts.createPublisher("\u00dcberSoft")
]
ProjectContracts.moddedPublishers = []
ProjectContracts.publisherContracts.__oldGetContract = ProjectContracts.publisherContracts.getContract
ProjectContracts.getAllPublishers = ->
results = ProjectContracts.vanillaPublishers.filter (val) -> val.id?
results.push(ProjectContracts.moddedPublishers.filter (val) -> val.id?)
results
ProjectContracts.getAvailablePublishers = (company) ->
week = Math.floor(company.currentWeek)
ProjectContracts.getAllPublishers().filter((val) ->
return (not val.startWeek? or week > General.getWeekFromDateString(val.startWeek, val.ignoreGameLengthModifier)) and (not val.retireWeek? or val.retireWeek is '260/12/4' or week < General.getWeekFromDateString(val.retireWeek, val.ignoreGameLengthModifier))
)
ProjectContracts.getPublishingCompanies = (company) ->
c = Companies.getAllCompanies(company).filter (val) -> val.notPublisher? and not val.notPublisher
c.forEach (val) -> val.isCompany = true
c
SDP.GDT.Internal.getGenericContractsSettings = (company, type) ->
key = "contracts#{type}"
settings = company.flags[key]
if not settings
settings = {id: key}
company.flags[key] = settings
settings
SDP.GDT.Internal.generatePublisherContracts = (company, settings, maxNumber) ->
contracts = []
seed = settings.seed
random = new MersenneTwister(SDP.Util.getSeed(settings))
if settings.seed isnt seed
settings.topic = undefined
settings.researchedTopics = undefined
settings.excludes = undefined
settings.platforms = undefined
if not settings.topics or not settings.researchedTopics or not settings.platforms
topics = company.topics.slice()
topics.addRange(General.getTopicsAvailableForResearch(company))
settings.topics = topics.map (t) -> t.id
researchedTopics = company.topics.map (t) -> t.id
settings.researchedTopics = researchedTopics
platforms = Platforms.getPlatformsOnMarket(company).filter (p) -> not p.isCustom and Platforms.doesPlatformSupportGameSize(p, "medium")
settings.platforms = platforms.map (p) -> p.id
settings.excludes = []
lastGame = company.gameLog.last()
settings.excludes.push {genre: lastGame.genre.id, topic: lastGame.topic.id} if lastGame
else
topics = settings.topics.map (id) -> Topics.topics.first (t) -> t.id is id
researchedTopics = settings.researchedTopics.map (id) -> Topics.topics.first (t) -> t.id is id
allPlatforms = Platforms.getPlatforms(company, true)
platforms = settings.platforms.map (id) -> allPlatforms.first (p) -> p.id is id
excludes = settings.excludes.slice()
count = SDP.Util.getRandomInt(random, maxNumber)
count = Math.max(1, count) if settings.intialSettings
sizes = ["medium"]
sizes.push("large","large","large") if company.canDevelopLargeGames()
audiences = SDP.Enum.Audience.toArray()
publishers = ProjectContracts.getAvailablePublishers(company)
publishers.push(ProjectContracts.getPublishingCompanies(company))
sizeBasePay = { medium:15e4, large:15e5/2 }
for i in [0...count]
if platform and (platform.company and random.random() <= 0.2)
publisher = publishers.find((val) -> val.toString() is platform.company)
else if random.random() <= 0.1
publisher = publishers.pickRandom(random) # Adds a low chance for random platform company contracts
else publisher = publishers.filter((val) -> not val.isCompany?()).pickRandom(random)
diffculty = 0
genre = undefined
topic = undefined
if random.random() <= 0.7
genre = if publisher.getGenre? then publisher.getGenre(random) else General.getAvailableGenres(company).pickRandom(random)
diffculty += 0.1
if random.random() <= 0.7
loop
if random.random() <= 0.7
topic = if publisher.getTopic? then publisher.getTopic(random, topics.except(researchedTopics)) else topics.except(researchedTopics).pickRandom(random)
else
topic = if publisher.getTopic? then publisher.getTopic(random, topics) else topics.pickRandom(random)
break if topic?
break unless excludes.some (e) -> (not genre? or e.genre is genre.id) and e.topic is topic.id
difficulty += 0.1 if topic?
excludes.push({genre: genre?.id, topic: topic?.id}) if genre or topic
platform = undefined
if random.random() <= 0.7
platform = if publisher.getPlatform? then publisher.getPlatform(random, platforms) else platform = platforms.pickRandom(random)
audience = undefined
if company.canSetTargetAudience() and random.random() <= 0.2
audience = if publisher.getAudience? then publisher.getAudience(random) else audience = audiences.pickRandom(random)
diffculty += 0.8 * random.random()
minScore = 4 + Math.floor(5 * diffculty)
loop
size = sizes.pickRandom(random)
break unless platform? and not Platforms.doesPlatformSupportGameSize(platform, size)
basePay = sizeBasePay[size]
pay = basePay * (minScore/10)
pay /= 5e3
pay = Math.max(1, Math.floor(pay)) * 5e3
penalty = pay * 1.2 + pay * 1.8 * random.random()
penalty /= 5e3
penalty = Math.floor(penalty) * 5e3
royaltyRate = Math.floor(7 + 8 * difficulty) / 100
name = "#{if topic then topic.name else 'Any Topic'.localize()} / #{if genre then genre.name else 'Any Genre'.localize()}"
if not platform or Platforms.getPlatformsOnMarket(company).first((p) -> p.id is platform.id)
pubName = if publisher.getName? then publisher.getName() else publisher.toString()
contracts.push {
id: "publisherContracts"
refNumber: Math.floor(Math.random() * 65535)
type: "gameContract"
name: name
description: "Publisher: {0}".localize().format(pubName)
publisher: pubName
topic: if topic then topic.id else topic
genre: if genre then genre.id else genre
platform: if platform then platform.id else undefined
gameSize: size
gameAudience: audience
minScore: minScore
payment: pay
penalty: penalty
royaltyRate: royaltyRate
}
else count++
contracts
ProjectContracts.publisherContracts.getContract = (company) ->
SDP.GDT.Internal.generatePublisherContracts(company, SDP.GDT.Internal.getGenericContractsSettings(company, "publisher"), 5).filter (c) -> not c.skip
###
Allows adding of standard contract work
###
ProjectContracts.moddedContracts = []
ProjectContracts.getAvailableModContractsOf = (company, size) ->
contracts = []
for c in ProjectContracts.moddedContracts when not c.isAvailable? or (c.isAvailable? and c.isAvailable(company))
contracts.push(c) if c.size is size
contracts
ProjectContracts.genericContracts.__oldGetContract = ProjectContracts.genericContracts.getContract
ProjectContracts.genericContracts.getContract = (company) ->
settings = SDP.GDT.Internal.getGenericContractsSettings(company, "small")
seed = SDP.Util.getSeed(settings)
random = new MersenneTwister(seed)
genCon = SDP.GDT.Internal.generateContracts
resultContracts = []
contracts = ProjectContracts.genericContracts.__oldGetContract(company)
contracts.addRange genCon(company, settings, ProjectContracts.getAvailableModContractsOf(company, "small"), 4)
if company.flags.mediumContractsEnabled
settings = SDP.GDT.Internal.getGenericContractsSettings(company, "medium")
contracts.addRange genCon(company, settings, ProjectContracts.getAvailableModContractsOf(company, "medium"), 3)
if company.flags.largeContractsEnabled
settings = SDP.GDT.Internal.getGenericContractsSettings(company, "large")
contracts.addRange genCon(company, settings, ProjectContracts.getAvailableModContractsOf(company, "large"), 2)
return contracts.shuffle(random).filter (c) -> not c.skip
SDP.GDT.Internal.generateContracts = (company, settings, sourceSet, size, maxNumber) ->
seed = SDP.Util.getSeed(settings)
random = new MersenneTwister(seed)
contracts = []
set = sourceSet.slice()
count = SDP.Util.getRandomInt(random, maxNumber)
count = Math.max(1, count) if settings.intialSettings
for i in [0...count] when set.length > 0
item = set.pickRandom(random)
set.remove(item)
contract = SDP.GDT.Internal.generateSpecificContract(company, item, size, random)
contract.id = "genericContracts"
contract.index = i
contract.skip = true if settings.contractsDone and settings.contractsDone.indexOf(i) isnt -1
contracts.push(contract)
contracts
SDP.GDT.Internal.generateSpecificContract = (company, template, size, random) ->
r = random.random()
r += random.random() if random.random() > 0.8
minPoints = 11
minPoints = 30 if size is "medium"
minPoints = 100 if size is "large"
minPoints += 6 if minPoints is 12 and company.staff.length > 2
factor = company.getCurrentDate().year / 25
minPoints += minPoints * factor
points = minPoints + minPoints * r
pointPart = points / (template.dF + template.tF)
d = pointPart * template.dF
t = pointPart * template.tF
d += d * 0.2 * random.random() * random.randomSign()
t += t * 0.2 * random.random() * random.randomSign()
d = Math.floor(d)
t = Math.floor(t)
pay = points * 1e3
pay /= 1e3
pay = Math.floor(pay) * 1e3
weeks = Math.floor(3 + 7 * random.random())
weeks = Math.floor(3 + 3 * random.random()) if size is "small"
penalty = pay * 0.2 + pay * 0.3 * random.random()
penalty /= 1e3
penalty = Math.floor(penalty) * 1e3
return {
name : template.name,
description : template.description
requiredD : d
requiredT : t
spawnedD : 0
spawnedT : 0
payment : pay
penalty : -penalty
weeksToFinish : weeks
rF : template.rF
isGeneric : true
size : size
}
###
Allows adding reviewer names to the reviewer list along with existing and retire dates
Allows adding review messages
###
Reviews.moddedReviewers = []
Reviews.moddedMessages = []
Reviews.vanillaReviewers = [
{id: 'StarGames', name: 'Star Games'}
{id: 'InformedGamer', name: 'Informed Game'}
{id: 'GameHero', name: 'Game Hero'}
{id: 'AllGames', name: 'All Games'}
]
Reviews.getAllReviewers = ->
result = Reviews.vanillaReviewers.slice()
result.addRange(Reviews.moddedReviewers.slice())
result
Reviews.getAvailableReviewers = (company) ->
week = Math.floor(company.currentWeek)
Reviews.getAllReviewers().filter((val) ->
return (not val.startWeek? or week > General.getWeekFromDateString(val.startWeek, val.ignoreGameLengthModifier)) and (not val.retireWeek? or week < General.getWeekFromDateString(val.retireWeek, val.ignoreGameLengthModifier))
)
Reviews.getFourRandomReviewers = (company) ->
reviews = Reviews.getAvailableReviewers(company)
if reviews.length < 4 then throw "Reviewers are missing"
if reviews.length is 4 then return [reviews[0],reviews[1],reviews[2], reviews[3]]
random = company._mersenneTwister
first = reviews.pickRandom(random)
reviews = reviews.except(first)
second = reviews.pickRandom(random)
reviews = reviews.except(second)
third = reviews.pickRandom(random)
reviews = reviews.except(third)
forth = reviews.pickRandom(random)
company.randomCalled += 4
[first, second, third, forth]
Reviews.getModdedPositiveMessages = (game, score) ->
result = []
for m in Reviews.moddedMessages when m.isPositive and not m.isNegative
if m.getMessage?
result.push(m.getMessage(game, score))
else if m.message? then result.push(m.message)
result
Reviews.getModdedNegativeMessages = (game, score) ->
result = []
for m in Reviews.moddedMessages when m.isNegative and not m.isPositive
if m.getMessage?
result.push(m.getMessage(game, score))
else if m.message? then result.push(m.message)
result
Reviews.getModdedGenericMessages = (game, score) ->
result = []
for m in Reviews.moddedMessages when not m.isNegative and not m.isPositive
if m.getMessage?
result.push(m.getMessage(game, score))
else if m.message? then result.push(m.message)
result
Reviews.__oldGetGenericReviewMessage = Reviews.getGenericReviewMessage
Reviews.getGenericReviewMessage = (game, score) ->
if game.company.getRandom() <= 0.5 then Reviews.getModdedGenericMessages(game, score) else Reviews.__oldGetGenericReviewMessage(game, score)
Reviews.getReviews = (game, finalScore, positiveMessages, negativeMessages) ->
intScore = Math.floor(finalScore).clamp(1, 10)
if finalScore >= 9.5
intScore = 10
reviewers = Reviews.getFourRandomReviewers(game.company)
reviews = []
usedMessages = []
scores = []
variation = 1
positiveMessages.addRange(Reviews.getModdedPositiveMessages(game))
negativeMessages.addRange(Reviews.getModdedNegativeMessages (game))
for i in [0...4]
if intScore is 5 or intScore is 6
variation = if game.company.getRandom() < 0.05 then 2 else 1
scoreVariation = if Math.randomSign() is 1 then 0 else variation * Math.randomSign()
score = (intScore + scoreVariation).clamp(1, 10)
if score is 10 and (scores.length is 3 and scores.average() is 10)
if not game.flags.psEnabled
if Math.floor(finalScore) < 10 or game.company.getRandom() < 0.8
score--
else if Math.floor(finalScore) is 10 and game.company.getRandom() < 0.4
score++
message = undefined
loop
if game.company.getRandom() <= 0.2
if scoreVariation >= 0 and (score > 2 and positiveMessages.length isnt 0)
message = positiveMessages.pickRandom()
else
if (scoreVariation < 0 and (score < 6 and negativeMessages.length isnt 0))
message = negativeMessages.pickRandom()
else
message = undefined
if not message
message = Reviews.getGenericReviewMessage(game, score)
break unless usedMessages.weakIndexOf(message) isnt -1
usedMessages.push(message)
scores.push(score)
reviews.push {
score : score
message : message
reviewerName : reviewers[i].name
}
return reviews
###
Forces all games to contain the company
###
`Game = (function(superClass) {
var __extend = function(child, parent) { for (var key in parent) { if (hasProp.call(parent, key)) child[key] = parent[key]; } function ctor() { this.constructor = child; } ctor.prototype = parent.prototype; child.prototype = new ctor(); child.__super__ = parent.prototype; return child; }
__extend(Game, superClass);
function Game(company) {
this.id = GameManager.getGUID();
this.title = void 0;
this.genre = void 0;
this.topic = void 0;
this.platforms = [];
this.engine = void 0;
this.state = GameState.notStarted;
this.gameSize = "small";
this.targetAudience = "everyone";
this.missionLog = [];
this.salesCashLog = [];
this.featureLog = null;
this.score = 0;
this.reviews = [];
this.costs = 0;
this.hypePoints = 0;
this.technologyPoints = 0;
this.bugs = 0;
this.freeBugCount = 0;
this.designPoints = 0;
this.currentSalesCash = 0;
this.totalSalesCash = 0;
this.amountSold = 0;
this.releaseWeek = 0;
this.fansChangeTarget = 0;
this.fansChanged = 0;
this.initialSalesRank = 0;
this.currentSalesRank = 0;
this.topSalesRank = 0;
this.researchFactor = 1;
this.revenue = 0;
this.flags = {};
this.soldOut = false;
this.company = company;
if (company.conferenceHype) {
this.hypePoints = company.conferenceHype;
company.conferenceHype = Math.floor(company.conferenceHype / 3);
}
}
return Game;
})(Game)`
###
Allow adding famous people and adding custom applicant algorithims
###
JobApplicants.moddedFamous = []
JobApplicants.moddedAlgorithims = []
JobApplicants.getRandomMale = (random) ->
results = []
JobApplicants.moddedAlgorithims.forEach (val) ->
results.push(val.apply(random)) if val.forMale
results.pickRandom(random)
JobApplicants.getRandomFemale = (random) ->
results = []
JobApplicants.moddedAlgorithims.forEach (val) ->
results.push(val.apply(random)) if not val.forMale
results.pickRandom(random)
JobApplicants.getFamousMale = (tech, design, random) ->
results = []
JobApplicants.moddedFamous.forEach (val) ->
results.push(val.apply(random, tech, design)) if val.forMale
results.pickRandom(random)
JobApplicants.getFamousFemale = (tech, design, random) ->
results = []
JobApplicants.moddedFamous.forEach (val) ->
results.push(val.apply(random, tech, design)) if not val.forMale
results.pickRandom(random)
JobApplicants.searchTests =
[
{
id : "ComplexAlgorithms"
name : "Complex Algorithms".localize()
minT : 0.6
}
{
id : "GameDemo"
name : "Game Demo".localize()
minD : 0.3,
minT : 0.3
}
{
id : "Showreel"
name : "Showreel".localize()
minD : 0.6
}
]
UI.__olgGenerateJobApplicants = UI._generateJobApplicants
UI._generateJobApplicants = ->
oldApplicants = UI.__olgGenerateJobApplicants()
settings = GameManager.uiSettings["findStaffData"]
settings = {ratio : 0.1, tests : []} if not settings
settings.seed = Math.floor(GameManager.company.getRandom() * 65535) if not settings.seed
ratio = settings.ratio
test = JobApplicants.searchTests.first (t) -> t.id is settings.tests.first()
company = GameManager.company
random = new MersenneTwister(settings.seed)
newApplicants = []
count = Math.floor(2 + 3 * (ratio + 0.2).clamp(0, 1))
rerolls = 0
maxRerolls = 2
maxBonus = if company.currentLevel is 4 then 4 / 5 else 2 / 5
takenNames = GameManager.company.staff.map (s) -> s.name
for i in [0...count]
qBonusFactor = ratio / 3 + (1 - ratio / 3) * random.random()
maxBonus += 1 / 5 if random.random() >= 0.95
q = 1 / 5 + maxBonus * qBonusFactor
level = Math.floor(q * 5).clamp(1,5)
maxD = 1
minD = 0
if test
maxD -= test.minT if test.minT
if test.minD
minD = test.minD
maxD -= minD
baseValue = 200 * level
d = baseValue * minD + baseValue * maxD * random.random()
t = baseValue - d
rBonusFactor = random.random()
r = 1 / 5 + maxBonus * rBonusFactor
sBonusFactor = random.random()
s = 1 / 5 + maxBonus * sBonusFactor
goodRoll = sBonusFactor > 0.5 && (qBonusFactor > 0.5 && rBonusFactor > 0.5)
if not goodRoll and (rerolls < maxRerolls and random.random() <= (ratio + 0.1).clamp(0, 0.7))
i--
rerolls++
continue
rerolls = 0
isFamous = false
sex = "male"
loop
sex = "male"
if goodRoll
name = JobApplicants.getFamousMale(t, d, random) if (random.random() > 0.15)
else
name = JobApplicants.getFamousFemale(t, d, random)
sex = "female"
isFamous = true
else
name = JobApplicants.getRandomMale(random) if random.random() > 0.25
else
name = JobApplicants.getRandomFemale(random)
sex = "female"
isFamous = false
break unless takenNames.indexOf(name) != -1
takenNames.push(name)
salary = Character.BASE_SALARY_PER_LEVEL * level
salary += salary * 0.2 * random.random() * random.randomSign()
salary = Math.floor(salary/1e3) * 1e3
newApplicants.push {
name : name,
qualityFactor : q,
technologyFactor : t / 500,
designFactor : d / 500,
researchFactor : r,
speedFactor : s,
salary : salary,
isFamous : isFamous,
sex : sex
}
GDT.fire GameManager, GDT.eventKeys.gameplay.staffApplicantsGenerated, {
newApplicants : newApplicants
settings : settings
rng : random
}
applicants = []
for i in [0...count]
if random.random() >= 0.5
a = newApplicants.pickRandom(random)
applicants.push(a)
newApplicants.remove(a)
else
a = oldApplicants.pickRandom(random)
applicants.push(a)
oldApplicants.remove(a)
return applicants | 88869 | ###
Functions which require patches
###
SDP.GDT.addTraining = (item) ->
item = item.toInput() if SDP.GDT.Training? and item instanceof SDP.GDT.Training
item.pointsCost = 0 unless item.pointsCost?
if Checks.checkPropertiesPresent(item, ['id', 'name', 'pointsCost', 'duration', 'category', 'categoryDisplayName']) and Checks.checkUniqueness(item, 'id', Training.getAllTraining())
Training.moddedTraining(item)
return
SDP.GDT.addPublisher = (item) ->
item = item.toInput() if SDP.GDT.Publisher? and item instanceof SDP.GDT.Publisher
return if not Checks.checkUniqueness(item, 'id', Companies.getAllCompanies())
if Checks.checkPropertiesPresent(item, ['id', 'name']) and Checks.checkUniqueness(item, 'id', ProjectContracts.getAllPublishers())
ProjectContracts.moddedPublishers.push(item)
return
SDP.GDT.addContract = (item) ->
item = item.toInput() if SDP.GDT.Contract? and item instanceof SDP.GDT.Contract
if Checks.checkPropertiesPresent(item, ['name', 'description', 'dF', 'tF'])
ProjectContracts.moddedContracts.push(item)
return
SDP.GDT.addReviewer = (item) ->
if item.constructor is String then item = {id: item.replace(/\s/g,""), name: item}
item = item.toInput() if SDP.GDT.Reviewer? and item instanceof SDP.GDT.Reviewer
if Checks.checkPropertiesPresent(item, ['id', 'name']) and Checks.checkUniqueness(item, 'id', Reviews.getAllReviewers())
Reviews.moddedReviewers.push(item)
return
SDP.GDT.addReviewMessage = (item) ->
if item.constructor is String then item = {message: item, isRandom: true}
if item.message or item.getMessage
Reviews.moddedMessages.push(item)
return
SDP.GDT.addApplicantFunctor = (item) ->
if Checks.checkPropertiesPresent(item, ['apply', 'forMale']) and typeof apply is "function"
JobApplicants.moddedAlgorithims.push(item)
return
SDP.GDT.addFamousFunctor = (item) ->
if Checks.checkPropertiesPresent(item, ['apply', 'forMale']) and typeof apply is "function"
JobApplicants.moddedFamous.push(item)
return
###
#
# Patches: improves game modularbility and performance and kills bugs
# Should force patches on mod load
#
###
SDP.GDT.Internal = {}
SDP.GDT.Internal.notificationsToTrigger = []
###
Triggers all notifications in the case they couldn't be triggered before (ie: before the GameManager.company.notification existed
###
GDT.on(GDT.eventKeys.saves.loaded, ->
GameManager.company.notifications.push(i) for i in SDP.GDT.Internal.notificationsToTrigger
SDP.GDT.Internal.notificationsToTrigger = [])
GDT.on(GDT.eventKeys.saves.newGame, ->
GameManager.company.notifications.push(i) for i in SDP.GDT.Internal.notificationsToTrigger
SDP.GDT.Internal.notificationsToTrigger = [])
###
Allows new platforms to incorporate different images based on the date
###
Platforms._oldGetPlatformImage = Platforms.getPlatformImage
Platforms.getPlatformImage = (platform, week) ->
if platform.id is 'PC' then return Platforms._oldGetPlatformImage(platform, week)
if not platform.imageDates? or not platform.baseIconUri? then return platform.iconUri
baseUri = platform.baseIconUri
image = null
if week and platform.imageDates.constructor is Array
image = "{0}/{1}-{2}.png".format(baseUri, platform.id, String(i+1)) for date, i in platform.imageDates when General.getWeekFromDateString(date) <= week and i isnt 0
image = "{0}/{1}.png".format(baseUri, platform.id) unless image?
return image
###
Forces getAllTraining to include modded training
###
Training._oldGetAllTraining = Training.getAllTraining
Training.moddedTraining = []
Training.getAllTraining = ->
trainings = Training._oldGetAllTraining()
for modT in Training.moddedTraining when modT.id? and modT.isTraining # provide more expected behavior
trainings.push(modT)
return
###
Adds features to the publisher contracts which determine how they act
Also allows low chance for platform company to randomly give a publisher contract
###
ProjectContracts.createPublisher = (item, id) ->
if item.constructor is String then item = {name: item}
if id? then item.id = id
if not item.id? and item.name? then item.id = name.replace(/\s/g,"")
item
ProjectContracts.vanillaPublishers = [
ProjectContracts.createPublisher("Active Visionaries")
ProjectContracts.createPublisher("Electronic Mass Productions", "ea")
ProjectContracts.createPublisher("Rockville Softworks")
ProjectContracts.createPublisher("Blue Bit Games")
ProjectContracts.createPublisher("CapeCom")
ProjectContracts.createPublisher("Codemeisters")
ProjectContracts.createPublisher("Deep Platinum")
ProjectContracts.createPublisher("Infro Games")
ProjectContracts.createPublisher("LoWood Productions")
ProjectContracts.createPublisher("TGQ")
ProjectContracts.createPublisher("\u00dcberSoft")
]
ProjectContracts.moddedPublishers = []
ProjectContracts.publisherContracts.__oldGetContract = ProjectContracts.publisherContracts.getContract
ProjectContracts.getAllPublishers = ->
results = ProjectContracts.vanillaPublishers.filter (val) -> val.id?
results.push(ProjectContracts.moddedPublishers.filter (val) -> val.id?)
results
ProjectContracts.getAvailablePublishers = (company) ->
week = Math.floor(company.currentWeek)
ProjectContracts.getAllPublishers().filter((val) ->
return (not val.startWeek? or week > General.getWeekFromDateString(val.startWeek, val.ignoreGameLengthModifier)) and (not val.retireWeek? or val.retireWeek is '260/12/4' or week < General.getWeekFromDateString(val.retireWeek, val.ignoreGameLengthModifier))
)
ProjectContracts.getPublishingCompanies = (company) ->
c = Companies.getAllCompanies(company).filter (val) -> val.notPublisher? and not val.notPublisher
c.forEach (val) -> val.isCompany = true
c
SDP.GDT.Internal.getGenericContractsSettings = (company, type) ->
key = "<KEY>
settings = company.flags[key]
if not settings
settings = {id: key}
company.flags[key] = settings
settings
SDP.GDT.Internal.generatePublisherContracts = (company, settings, maxNumber) ->
contracts = []
seed = settings.seed
random = new MersenneTwister(SDP.Util.getSeed(settings))
if settings.seed isnt seed
settings.topic = undefined
settings.researchedTopics = undefined
settings.excludes = undefined
settings.platforms = undefined
if not settings.topics or not settings.researchedTopics or not settings.platforms
topics = company.topics.slice()
topics.addRange(General.getTopicsAvailableForResearch(company))
settings.topics = topics.map (t) -> t.id
researchedTopics = company.topics.map (t) -> t.id
settings.researchedTopics = researchedTopics
platforms = Platforms.getPlatformsOnMarket(company).filter (p) -> not p.isCustom and Platforms.doesPlatformSupportGameSize(p, "medium")
settings.platforms = platforms.map (p) -> p.id
settings.excludes = []
lastGame = company.gameLog.last()
settings.excludes.push {genre: lastGame.genre.id, topic: lastGame.topic.id} if lastGame
else
topics = settings.topics.map (id) -> Topics.topics.first (t) -> t.id is id
researchedTopics = settings.researchedTopics.map (id) -> Topics.topics.first (t) -> t.id is id
allPlatforms = Platforms.getPlatforms(company, true)
platforms = settings.platforms.map (id) -> allPlatforms.first (p) -> p.id is id
excludes = settings.excludes.slice()
count = SDP.Util.getRandomInt(random, maxNumber)
count = Math.max(1, count) if settings.intialSettings
sizes = ["medium"]
sizes.push("large","large","large") if company.canDevelopLargeGames()
audiences = SDP.Enum.Audience.toArray()
publishers = ProjectContracts.getAvailablePublishers(company)
publishers.push(ProjectContracts.getPublishingCompanies(company))
sizeBasePay = { medium:15e4, large:15e5/2 }
for i in [0...count]
if platform and (platform.company and random.random() <= 0.2)
publisher = publishers.find((val) -> val.toString() is platform.company)
else if random.random() <= 0.1
publisher = publishers.pickRandom(random) # Adds a low chance for random platform company contracts
else publisher = publishers.filter((val) -> not val.isCompany?()).pickRandom(random)
diffculty = 0
genre = undefined
topic = undefined
if random.random() <= 0.7
genre = if publisher.getGenre? then publisher.getGenre(random) else General.getAvailableGenres(company).pickRandom(random)
diffculty += 0.1
if random.random() <= 0.7
loop
if random.random() <= 0.7
topic = if publisher.getTopic? then publisher.getTopic(random, topics.except(researchedTopics)) else topics.except(researchedTopics).pickRandom(random)
else
topic = if publisher.getTopic? then publisher.getTopic(random, topics) else topics.pickRandom(random)
break if topic?
break unless excludes.some (e) -> (not genre? or e.genre is genre.id) and e.topic is topic.id
difficulty += 0.1 if topic?
excludes.push({genre: genre?.id, topic: topic?.id}) if genre or topic
platform = undefined
if random.random() <= 0.7
platform = if publisher.getPlatform? then publisher.getPlatform(random, platforms) else platform = platforms.pickRandom(random)
audience = undefined
if company.canSetTargetAudience() and random.random() <= 0.2
audience = if publisher.getAudience? then publisher.getAudience(random) else audience = audiences.pickRandom(random)
diffculty += 0.8 * random.random()
minScore = 4 + Math.floor(5 * diffculty)
loop
size = sizes.pickRandom(random)
break unless platform? and not Platforms.doesPlatformSupportGameSize(platform, size)
basePay = sizeBasePay[size]
pay = basePay * (minScore/10)
pay /= 5e3
pay = Math.max(1, Math.floor(pay)) * 5e3
penalty = pay * 1.2 + pay * 1.8 * random.random()
penalty /= 5e3
penalty = Math.floor(penalty) * 5e3
royaltyRate = Math.floor(7 + 8 * difficulty) / 100
name = "#{if topic then topic.name else 'Any Topic'.localize()} / #{if genre then genre.name else 'Any Genre'.localize()}"
if not platform or Platforms.getPlatformsOnMarket(company).first((p) -> p.id is platform.id)
pubName = if publisher.getName? then publisher.getName() else publisher.toString()
contracts.push {
id: "publisherContracts"
refNumber: Math.floor(Math.random() * 65535)
type: "gameContract"
name: name
description: "Publisher: {0}".localize().format(pubName)
publisher: pubName
topic: if topic then topic.id else topic
genre: if genre then genre.id else genre
platform: if platform then platform.id else undefined
gameSize: size
gameAudience: audience
minScore: minScore
payment: pay
penalty: penalty
royaltyRate: royaltyRate
}
else count++
contracts
ProjectContracts.publisherContracts.getContract = (company) ->
SDP.GDT.Internal.generatePublisherContracts(company, SDP.GDT.Internal.getGenericContractsSettings(company, "publisher"), 5).filter (c) -> not c.skip
###
Allows adding of standard contract work
###
ProjectContracts.moddedContracts = []
ProjectContracts.getAvailableModContractsOf = (company, size) ->
contracts = []
for c in ProjectContracts.moddedContracts when not c.isAvailable? or (c.isAvailable? and c.isAvailable(company))
contracts.push(c) if c.size is size
contracts
ProjectContracts.genericContracts.__oldGetContract = ProjectContracts.genericContracts.getContract
ProjectContracts.genericContracts.getContract = (company) ->
settings = SDP.GDT.Internal.getGenericContractsSettings(company, "small")
seed = SDP.Util.getSeed(settings)
random = new MersenneTwister(seed)
genCon = SDP.GDT.Internal.generateContracts
resultContracts = []
contracts = ProjectContracts.genericContracts.__oldGetContract(company)
contracts.addRange genCon(company, settings, ProjectContracts.getAvailableModContractsOf(company, "small"), 4)
if company.flags.mediumContractsEnabled
settings = SDP.GDT.Internal.getGenericContractsSettings(company, "medium")
contracts.addRange genCon(company, settings, ProjectContracts.getAvailableModContractsOf(company, "medium"), 3)
if company.flags.largeContractsEnabled
settings = SDP.GDT.Internal.getGenericContractsSettings(company, "large")
contracts.addRange genCon(company, settings, ProjectContracts.getAvailableModContractsOf(company, "large"), 2)
return contracts.shuffle(random).filter (c) -> not c.skip
SDP.GDT.Internal.generateContracts = (company, settings, sourceSet, size, maxNumber) ->
seed = SDP.Util.getSeed(settings)
random = new MersenneTwister(seed)
contracts = []
set = sourceSet.slice()
count = SDP.Util.getRandomInt(random, maxNumber)
count = Math.max(1, count) if settings.intialSettings
for i in [0...count] when set.length > 0
item = set.pickRandom(random)
set.remove(item)
contract = SDP.GDT.Internal.generateSpecificContract(company, item, size, random)
contract.id = "genericContracts"
contract.index = i
contract.skip = true if settings.contractsDone and settings.contractsDone.indexOf(i) isnt -1
contracts.push(contract)
contracts
SDP.GDT.Internal.generateSpecificContract = (company, template, size, random) ->
r = random.random()
r += random.random() if random.random() > 0.8
minPoints = 11
minPoints = 30 if size is "medium"
minPoints = 100 if size is "large"
minPoints += 6 if minPoints is 12 and company.staff.length > 2
factor = company.getCurrentDate().year / 25
minPoints += minPoints * factor
points = minPoints + minPoints * r
pointPart = points / (template.dF + template.tF)
d = pointPart * template.dF
t = pointPart * template.tF
d += d * 0.2 * random.random() * random.randomSign()
t += t * 0.2 * random.random() * random.randomSign()
d = Math.floor(d)
t = Math.floor(t)
pay = points * 1e3
pay /= 1e3
pay = Math.floor(pay) * 1e3
weeks = Math.floor(3 + 7 * random.random())
weeks = Math.floor(3 + 3 * random.random()) if size is "small"
penalty = pay * 0.2 + pay * 0.3 * random.random()
penalty /= 1e3
penalty = Math.floor(penalty) * 1e3
return {
name : template.name,
description : template.description
requiredD : d
requiredT : t
spawnedD : 0
spawnedT : 0
payment : pay
penalty : -penalty
weeksToFinish : weeks
rF : template.rF
isGeneric : true
size : size
}
###
Allows adding reviewer names to the reviewer list along with existing and retire dates
Allows adding review messages
###
Reviews.moddedReviewers = []
Reviews.moddedMessages = []
Reviews.vanillaReviewers = [
{id: 'StarGames', name: 'Star Games'}
{id: 'InformedGamer', name: 'Informed Game'}
{id: 'GameHero', name: 'Game Hero'}
{id: 'AllGames', name: 'All Games'}
]
Reviews.getAllReviewers = ->
result = Reviews.vanillaReviewers.slice()
result.addRange(Reviews.moddedReviewers.slice())
result
Reviews.getAvailableReviewers = (company) ->
week = Math.floor(company.currentWeek)
Reviews.getAllReviewers().filter((val) ->
return (not val.startWeek? or week > General.getWeekFromDateString(val.startWeek, val.ignoreGameLengthModifier)) and (not val.retireWeek? or week < General.getWeekFromDateString(val.retireWeek, val.ignoreGameLengthModifier))
)
Reviews.getFourRandomReviewers = (company) ->
reviews = Reviews.getAvailableReviewers(company)
if reviews.length < 4 then throw "Reviewers are missing"
if reviews.length is 4 then return [reviews[0],reviews[1],reviews[2], reviews[3]]
random = company._mersenneTwister
first = reviews.pickRandom(random)
reviews = reviews.except(first)
second = reviews.pickRandom(random)
reviews = reviews.except(second)
third = reviews.pickRandom(random)
reviews = reviews.except(third)
forth = reviews.pickRandom(random)
company.randomCalled += 4
[first, second, third, forth]
Reviews.getModdedPositiveMessages = (game, score) ->
result = []
for m in Reviews.moddedMessages when m.isPositive and not m.isNegative
if m.getMessage?
result.push(m.getMessage(game, score))
else if m.message? then result.push(m.message)
result
Reviews.getModdedNegativeMessages = (game, score) ->
result = []
for m in Reviews.moddedMessages when m.isNegative and not m.isPositive
if m.getMessage?
result.push(m.getMessage(game, score))
else if m.message? then result.push(m.message)
result
Reviews.getModdedGenericMessages = (game, score) ->
result = []
for m in Reviews.moddedMessages when not m.isNegative and not m.isPositive
if m.getMessage?
result.push(m.getMessage(game, score))
else if m.message? then result.push(m.message)
result
Reviews.__oldGetGenericReviewMessage = Reviews.getGenericReviewMessage
Reviews.getGenericReviewMessage = (game, score) ->
if game.company.getRandom() <= 0.5 then Reviews.getModdedGenericMessages(game, score) else Reviews.__oldGetGenericReviewMessage(game, score)
Reviews.getReviews = (game, finalScore, positiveMessages, negativeMessages) ->
intScore = Math.floor(finalScore).clamp(1, 10)
if finalScore >= 9.5
intScore = 10
reviewers = Reviews.getFourRandomReviewers(game.company)
reviews = []
usedMessages = []
scores = []
variation = 1
positiveMessages.addRange(Reviews.getModdedPositiveMessages(game))
negativeMessages.addRange(Reviews.getModdedNegativeMessages (game))
for i in [0...4]
if intScore is 5 or intScore is 6
variation = if game.company.getRandom() < 0.05 then 2 else 1
scoreVariation = if Math.randomSign() is 1 then 0 else variation * Math.randomSign()
score = (intScore + scoreVariation).clamp(1, 10)
if score is 10 and (scores.length is 3 and scores.average() is 10)
if not game.flags.psEnabled
if Math.floor(finalScore) < 10 or game.company.getRandom() < 0.8
score--
else if Math.floor(finalScore) is 10 and game.company.getRandom() < 0.4
score++
message = undefined
loop
if game.company.getRandom() <= 0.2
if scoreVariation >= 0 and (score > 2 and positiveMessages.length isnt 0)
message = positiveMessages.pickRandom()
else
if (scoreVariation < 0 and (score < 6 and negativeMessages.length isnt 0))
message = negativeMessages.pickRandom()
else
message = undefined
if not message
message = Reviews.getGenericReviewMessage(game, score)
break unless usedMessages.weakIndexOf(message) isnt -1
usedMessages.push(message)
scores.push(score)
reviews.push {
score : score
message : message
reviewerName : reviewers[i].name
}
return reviews
###
Forces all games to contain the company
###
`Game = (function(superClass) {
var __extend = function(child, parent) { for (var key in parent) { if (hasProp.call(parent, key)) child[key] = parent[key]; } function ctor() { this.constructor = child; } ctor.prototype = parent.prototype; child.prototype = new ctor(); child.__super__ = parent.prototype; return child; }
__extend(Game, superClass);
function Game(company) {
this.id = GameManager.getGUID();
this.title = void 0;
this.genre = void 0;
this.topic = void 0;
this.platforms = [];
this.engine = void 0;
this.state = GameState.notStarted;
this.gameSize = "small";
this.targetAudience = "everyone";
this.missionLog = [];
this.salesCashLog = [];
this.featureLog = null;
this.score = 0;
this.reviews = [];
this.costs = 0;
this.hypePoints = 0;
this.technologyPoints = 0;
this.bugs = 0;
this.freeBugCount = 0;
this.designPoints = 0;
this.currentSalesCash = 0;
this.totalSalesCash = 0;
this.amountSold = 0;
this.releaseWeek = 0;
this.fansChangeTarget = 0;
this.fansChanged = 0;
this.initialSalesRank = 0;
this.currentSalesRank = 0;
this.topSalesRank = 0;
this.researchFactor = 1;
this.revenue = 0;
this.flags = {};
this.soldOut = false;
this.company = company;
if (company.conferenceHype) {
this.hypePoints = company.conferenceHype;
company.conferenceHype = Math.floor(company.conferenceHype / 3);
}
}
return Game;
})(Game)`
###
Allow adding famous people and adding custom applicant algorithims
###
JobApplicants.moddedFamous = []
JobApplicants.moddedAlgorithims = []
JobApplicants.getRandomMale = (random) ->
results = []
JobApplicants.moddedAlgorithims.forEach (val) ->
results.push(val.apply(random)) if val.forMale
results.pickRandom(random)
JobApplicants.getRandomFemale = (random) ->
results = []
JobApplicants.moddedAlgorithims.forEach (val) ->
results.push(val.apply(random)) if not val.forMale
results.pickRandom(random)
JobApplicants.getFamousMale = (tech, design, random) ->
results = []
JobApplicants.moddedFamous.forEach (val) ->
results.push(val.apply(random, tech, design)) if val.forMale
results.pickRandom(random)
JobApplicants.getFamousFemale = (tech, design, random) ->
results = []
JobApplicants.moddedFamous.forEach (val) ->
results.push(val.apply(random, tech, design)) if not val.forMale
results.pickRandom(random)
JobApplicants.searchTests =
[
{
id : "ComplexAlgorithms"
name : "Complex Algorithms".localize()
minT : 0.6
}
{
id : "GameDemo"
name : "Game Demo".localize()
minD : 0.3,
minT : 0.3
}
{
id : "Showreel"
name : "<NAME>reel".localize()
minD : 0.6
}
]
UI.__olgGenerateJobApplicants = UI._generateJobApplicants
UI._generateJobApplicants = ->
oldApplicants = UI.__olgGenerateJobApplicants()
settings = GameManager.uiSettings["findStaffData"]
settings = {ratio : 0.1, tests : []} if not settings
settings.seed = Math.floor(GameManager.company.getRandom() * 65535) if not settings.seed
ratio = settings.ratio
test = JobApplicants.searchTests.first (t) -> t.id is settings.tests.first()
company = GameManager.company
random = new MersenneTwister(settings.seed)
newApplicants = []
count = Math.floor(2 + 3 * (ratio + 0.2).clamp(0, 1))
rerolls = 0
maxRerolls = 2
maxBonus = if company.currentLevel is 4 then 4 / 5 else 2 / 5
takenNames = GameManager.company.staff.map (s) -> s.name
for i in [0...count]
qBonusFactor = ratio / 3 + (1 - ratio / 3) * random.random()
maxBonus += 1 / 5 if random.random() >= 0.95
q = 1 / 5 + maxBonus * qBonusFactor
level = Math.floor(q * 5).clamp(1,5)
maxD = 1
minD = 0
if test
maxD -= test.minT if test.minT
if test.minD
minD = test.minD
maxD -= minD
baseValue = 200 * level
d = baseValue * minD + baseValue * maxD * random.random()
t = baseValue - d
rBonusFactor = random.random()
r = 1 / 5 + maxBonus * rBonusFactor
sBonusFactor = random.random()
s = 1 / 5 + maxBonus * sBonusFactor
goodRoll = sBonusFactor > 0.5 && (qBonusFactor > 0.5 && rBonusFactor > 0.5)
if not goodRoll and (rerolls < maxRerolls and random.random() <= (ratio + 0.1).clamp(0, 0.7))
i--
rerolls++
continue
rerolls = 0
isFamous = false
sex = "male"
loop
sex = "male"
if goodRoll
name = JobApplicants.getFamousMale(t, d, random) if (random.random() > 0.15)
else
name = JobApplicants.getFamousFemale(t, d, random)
sex = "female"
isFamous = true
else
name = JobApplicants.getRandomMale(random) if random.random() > 0.25
else
name = JobApplicants.getRandomFemale(random)
sex = "female"
isFamous = false
break unless takenNames.indexOf(name) != -1
takenNames.push(name)
salary = Character.BASE_SALARY_PER_LEVEL * level
salary += salary * 0.2 * random.random() * random.randomSign()
salary = Math.floor(salary/1e3) * 1e3
newApplicants.push {
name : <NAME>,
qualityFactor : q,
technologyFactor : t / 500,
designFactor : d / 500,
researchFactor : r,
speedFactor : s,
salary : salary,
isFamous : isFamous,
sex : sex
}
GDT.fire GameManager, GDT.eventKeys.gameplay.staffApplicantsGenerated, {
newApplicants : newApplicants
settings : settings
rng : random
}
applicants = []
for i in [0...count]
if random.random() >= 0.5
a = newApplicants.pickRandom(random)
applicants.push(a)
newApplicants.remove(a)
else
a = oldApplicants.pickRandom(random)
applicants.push(a)
oldApplicants.remove(a)
return applicants | true | ###
Functions which require patches
###
SDP.GDT.addTraining = (item) ->
item = item.toInput() if SDP.GDT.Training? and item instanceof SDP.GDT.Training
item.pointsCost = 0 unless item.pointsCost?
if Checks.checkPropertiesPresent(item, ['id', 'name', 'pointsCost', 'duration', 'category', 'categoryDisplayName']) and Checks.checkUniqueness(item, 'id', Training.getAllTraining())
Training.moddedTraining(item)
return
SDP.GDT.addPublisher = (item) ->
item = item.toInput() if SDP.GDT.Publisher? and item instanceof SDP.GDT.Publisher
return if not Checks.checkUniqueness(item, 'id', Companies.getAllCompanies())
if Checks.checkPropertiesPresent(item, ['id', 'name']) and Checks.checkUniqueness(item, 'id', ProjectContracts.getAllPublishers())
ProjectContracts.moddedPublishers.push(item)
return
SDP.GDT.addContract = (item) ->
item = item.toInput() if SDP.GDT.Contract? and item instanceof SDP.GDT.Contract
if Checks.checkPropertiesPresent(item, ['name', 'description', 'dF', 'tF'])
ProjectContracts.moddedContracts.push(item)
return
SDP.GDT.addReviewer = (item) ->
if item.constructor is String then item = {id: item.replace(/\s/g,""), name: item}
item = item.toInput() if SDP.GDT.Reviewer? and item instanceof SDP.GDT.Reviewer
if Checks.checkPropertiesPresent(item, ['id', 'name']) and Checks.checkUniqueness(item, 'id', Reviews.getAllReviewers())
Reviews.moddedReviewers.push(item)
return
SDP.GDT.addReviewMessage = (item) ->
if item.constructor is String then item = {message: item, isRandom: true}
if item.message or item.getMessage
Reviews.moddedMessages.push(item)
return
SDP.GDT.addApplicantFunctor = (item) ->
if Checks.checkPropertiesPresent(item, ['apply', 'forMale']) and typeof apply is "function"
JobApplicants.moddedAlgorithims.push(item)
return
SDP.GDT.addFamousFunctor = (item) ->
if Checks.checkPropertiesPresent(item, ['apply', 'forMale']) and typeof apply is "function"
JobApplicants.moddedFamous.push(item)
return
###
#
# Patches: improves game modularbility and performance and kills bugs
# Should force patches on mod load
#
###
SDP.GDT.Internal = {}
SDP.GDT.Internal.notificationsToTrigger = []
###
Triggers all notifications in the case they couldn't be triggered before (ie: before the GameManager.company.notification existed
###
GDT.on(GDT.eventKeys.saves.loaded, ->
GameManager.company.notifications.push(i) for i in SDP.GDT.Internal.notificationsToTrigger
SDP.GDT.Internal.notificationsToTrigger = [])
GDT.on(GDT.eventKeys.saves.newGame, ->
GameManager.company.notifications.push(i) for i in SDP.GDT.Internal.notificationsToTrigger
SDP.GDT.Internal.notificationsToTrigger = [])
###
Allows new platforms to incorporate different images based on the date
###
Platforms._oldGetPlatformImage = Platforms.getPlatformImage
Platforms.getPlatformImage = (platform, week) ->
if platform.id is 'PC' then return Platforms._oldGetPlatformImage(platform, week)
if not platform.imageDates? or not platform.baseIconUri? then return platform.iconUri
baseUri = platform.baseIconUri
image = null
if week and platform.imageDates.constructor is Array
image = "{0}/{1}-{2}.png".format(baseUri, platform.id, String(i+1)) for date, i in platform.imageDates when General.getWeekFromDateString(date) <= week and i isnt 0
image = "{0}/{1}.png".format(baseUri, platform.id) unless image?
return image
###
Forces getAllTraining to include modded training
###
Training._oldGetAllTraining = Training.getAllTraining
Training.moddedTraining = []
Training.getAllTraining = ->
trainings = Training._oldGetAllTraining()
for modT in Training.moddedTraining when modT.id? and modT.isTraining # provide more expected behavior
trainings.push(modT)
return
###
Adds features to the publisher contracts which determine how they act
Also allows low chance for platform company to randomly give a publisher contract
###
ProjectContracts.createPublisher = (item, id) ->
if item.constructor is String then item = {name: item}
if id? then item.id = id
if not item.id? and item.name? then item.id = name.replace(/\s/g,"")
item
ProjectContracts.vanillaPublishers = [
ProjectContracts.createPublisher("Active Visionaries")
ProjectContracts.createPublisher("Electronic Mass Productions", "ea")
ProjectContracts.createPublisher("Rockville Softworks")
ProjectContracts.createPublisher("Blue Bit Games")
ProjectContracts.createPublisher("CapeCom")
ProjectContracts.createPublisher("Codemeisters")
ProjectContracts.createPublisher("Deep Platinum")
ProjectContracts.createPublisher("Infro Games")
ProjectContracts.createPublisher("LoWood Productions")
ProjectContracts.createPublisher("TGQ")
ProjectContracts.createPublisher("\u00dcberSoft")
]
ProjectContracts.moddedPublishers = []
ProjectContracts.publisherContracts.__oldGetContract = ProjectContracts.publisherContracts.getContract
ProjectContracts.getAllPublishers = ->
results = ProjectContracts.vanillaPublishers.filter (val) -> val.id?
results.push(ProjectContracts.moddedPublishers.filter (val) -> val.id?)
results
ProjectContracts.getAvailablePublishers = (company) ->
week = Math.floor(company.currentWeek)
ProjectContracts.getAllPublishers().filter((val) ->
return (not val.startWeek? or week > General.getWeekFromDateString(val.startWeek, val.ignoreGameLengthModifier)) and (not val.retireWeek? or val.retireWeek is '260/12/4' or week < General.getWeekFromDateString(val.retireWeek, val.ignoreGameLengthModifier))
)
ProjectContracts.getPublishingCompanies = (company) ->
c = Companies.getAllCompanies(company).filter (val) -> val.notPublisher? and not val.notPublisher
c.forEach (val) -> val.isCompany = true
c
SDP.GDT.Internal.getGenericContractsSettings = (company, type) ->
key = "PI:KEY:<KEY>END_PI
settings = company.flags[key]
if not settings
settings = {id: key}
company.flags[key] = settings
settings
SDP.GDT.Internal.generatePublisherContracts = (company, settings, maxNumber) ->
contracts = []
seed = settings.seed
random = new MersenneTwister(SDP.Util.getSeed(settings))
if settings.seed isnt seed
settings.topic = undefined
settings.researchedTopics = undefined
settings.excludes = undefined
settings.platforms = undefined
if not settings.topics or not settings.researchedTopics or not settings.platforms
topics = company.topics.slice()
topics.addRange(General.getTopicsAvailableForResearch(company))
settings.topics = topics.map (t) -> t.id
researchedTopics = company.topics.map (t) -> t.id
settings.researchedTopics = researchedTopics
platforms = Platforms.getPlatformsOnMarket(company).filter (p) -> not p.isCustom and Platforms.doesPlatformSupportGameSize(p, "medium")
settings.platforms = platforms.map (p) -> p.id
settings.excludes = []
lastGame = company.gameLog.last()
settings.excludes.push {genre: lastGame.genre.id, topic: lastGame.topic.id} if lastGame
else
topics = settings.topics.map (id) -> Topics.topics.first (t) -> t.id is id
researchedTopics = settings.researchedTopics.map (id) -> Topics.topics.first (t) -> t.id is id
allPlatforms = Platforms.getPlatforms(company, true)
platforms = settings.platforms.map (id) -> allPlatforms.first (p) -> p.id is id
excludes = settings.excludes.slice()
count = SDP.Util.getRandomInt(random, maxNumber)
count = Math.max(1, count) if settings.intialSettings
sizes = ["medium"]
sizes.push("large","large","large") if company.canDevelopLargeGames()
audiences = SDP.Enum.Audience.toArray()
publishers = ProjectContracts.getAvailablePublishers(company)
publishers.push(ProjectContracts.getPublishingCompanies(company))
sizeBasePay = { medium:15e4, large:15e5/2 }
for i in [0...count]
if platform and (platform.company and random.random() <= 0.2)
publisher = publishers.find((val) -> val.toString() is platform.company)
else if random.random() <= 0.1
publisher = publishers.pickRandom(random) # Adds a low chance for random platform company contracts
else publisher = publishers.filter((val) -> not val.isCompany?()).pickRandom(random)
diffculty = 0
genre = undefined
topic = undefined
if random.random() <= 0.7
genre = if publisher.getGenre? then publisher.getGenre(random) else General.getAvailableGenres(company).pickRandom(random)
diffculty += 0.1
if random.random() <= 0.7
loop
if random.random() <= 0.7
topic = if publisher.getTopic? then publisher.getTopic(random, topics.except(researchedTopics)) else topics.except(researchedTopics).pickRandom(random)
else
topic = if publisher.getTopic? then publisher.getTopic(random, topics) else topics.pickRandom(random)
break if topic?
break unless excludes.some (e) -> (not genre? or e.genre is genre.id) and e.topic is topic.id
difficulty += 0.1 if topic?
excludes.push({genre: genre?.id, topic: topic?.id}) if genre or topic
platform = undefined
if random.random() <= 0.7
platform = if publisher.getPlatform? then publisher.getPlatform(random, platforms) else platform = platforms.pickRandom(random)
audience = undefined
if company.canSetTargetAudience() and random.random() <= 0.2
audience = if publisher.getAudience? then publisher.getAudience(random) else audience = audiences.pickRandom(random)
diffculty += 0.8 * random.random()
minScore = 4 + Math.floor(5 * diffculty)
loop
size = sizes.pickRandom(random)
break unless platform? and not Platforms.doesPlatformSupportGameSize(platform, size)
basePay = sizeBasePay[size]
pay = basePay * (minScore/10)
pay /= 5e3
pay = Math.max(1, Math.floor(pay)) * 5e3
penalty = pay * 1.2 + pay * 1.8 * random.random()
penalty /= 5e3
penalty = Math.floor(penalty) * 5e3
royaltyRate = Math.floor(7 + 8 * difficulty) / 100
name = "#{if topic then topic.name else 'Any Topic'.localize()} / #{if genre then genre.name else 'Any Genre'.localize()}"
if not platform or Platforms.getPlatformsOnMarket(company).first((p) -> p.id is platform.id)
pubName = if publisher.getName? then publisher.getName() else publisher.toString()
contracts.push {
id: "publisherContracts"
refNumber: Math.floor(Math.random() * 65535)
type: "gameContract"
name: name
description: "Publisher: {0}".localize().format(pubName)
publisher: pubName
topic: if topic then topic.id else topic
genre: if genre then genre.id else genre
platform: if platform then platform.id else undefined
gameSize: size
gameAudience: audience
minScore: minScore
payment: pay
penalty: penalty
royaltyRate: royaltyRate
}
else count++
contracts
ProjectContracts.publisherContracts.getContract = (company) ->
SDP.GDT.Internal.generatePublisherContracts(company, SDP.GDT.Internal.getGenericContractsSettings(company, "publisher"), 5).filter (c) -> not c.skip
###
Allows adding of standard contract work
###
ProjectContracts.moddedContracts = []
ProjectContracts.getAvailableModContractsOf = (company, size) ->
contracts = []
for c in ProjectContracts.moddedContracts when not c.isAvailable? or (c.isAvailable? and c.isAvailable(company))
contracts.push(c) if c.size is size
contracts
ProjectContracts.genericContracts.__oldGetContract = ProjectContracts.genericContracts.getContract
ProjectContracts.genericContracts.getContract = (company) ->
settings = SDP.GDT.Internal.getGenericContractsSettings(company, "small")
seed = SDP.Util.getSeed(settings)
random = new MersenneTwister(seed)
genCon = SDP.GDT.Internal.generateContracts
resultContracts = []
contracts = ProjectContracts.genericContracts.__oldGetContract(company)
contracts.addRange genCon(company, settings, ProjectContracts.getAvailableModContractsOf(company, "small"), 4)
if company.flags.mediumContractsEnabled
settings = SDP.GDT.Internal.getGenericContractsSettings(company, "medium")
contracts.addRange genCon(company, settings, ProjectContracts.getAvailableModContractsOf(company, "medium"), 3)
if company.flags.largeContractsEnabled
settings = SDP.GDT.Internal.getGenericContractsSettings(company, "large")
contracts.addRange genCon(company, settings, ProjectContracts.getAvailableModContractsOf(company, "large"), 2)
return contracts.shuffle(random).filter (c) -> not c.skip
SDP.GDT.Internal.generateContracts = (company, settings, sourceSet, size, maxNumber) ->
seed = SDP.Util.getSeed(settings)
random = new MersenneTwister(seed)
contracts = []
set = sourceSet.slice()
count = SDP.Util.getRandomInt(random, maxNumber)
count = Math.max(1, count) if settings.intialSettings
for i in [0...count] when set.length > 0
item = set.pickRandom(random)
set.remove(item)
contract = SDP.GDT.Internal.generateSpecificContract(company, item, size, random)
contract.id = "genericContracts"
contract.index = i
contract.skip = true if settings.contractsDone and settings.contractsDone.indexOf(i) isnt -1
contracts.push(contract)
contracts
SDP.GDT.Internal.generateSpecificContract = (company, template, size, random) ->
r = random.random()
r += random.random() if random.random() > 0.8
minPoints = 11
minPoints = 30 if size is "medium"
minPoints = 100 if size is "large"
minPoints += 6 if minPoints is 12 and company.staff.length > 2
factor = company.getCurrentDate().year / 25
minPoints += minPoints * factor
points = minPoints + minPoints * r
pointPart = points / (template.dF + template.tF)
d = pointPart * template.dF
t = pointPart * template.tF
d += d * 0.2 * random.random() * random.randomSign()
t += t * 0.2 * random.random() * random.randomSign()
d = Math.floor(d)
t = Math.floor(t)
pay = points * 1e3
pay /= 1e3
pay = Math.floor(pay) * 1e3
weeks = Math.floor(3 + 7 * random.random())
weeks = Math.floor(3 + 3 * random.random()) if size is "small"
penalty = pay * 0.2 + pay * 0.3 * random.random()
penalty /= 1e3
penalty = Math.floor(penalty) * 1e3
return {
name : template.name,
description : template.description
requiredD : d
requiredT : t
spawnedD : 0
spawnedT : 0
payment : pay
penalty : -penalty
weeksToFinish : weeks
rF : template.rF
isGeneric : true
size : size
}
###
Allows adding reviewer names to the reviewer list along with existing and retire dates
Allows adding review messages
###
Reviews.moddedReviewers = []
Reviews.moddedMessages = []
Reviews.vanillaReviewers = [
{id: 'StarGames', name: 'Star Games'}
{id: 'InformedGamer', name: 'Informed Game'}
{id: 'GameHero', name: 'Game Hero'}
{id: 'AllGames', name: 'All Games'}
]
Reviews.getAllReviewers = ->
result = Reviews.vanillaReviewers.slice()
result.addRange(Reviews.moddedReviewers.slice())
result
Reviews.getAvailableReviewers = (company) ->
week = Math.floor(company.currentWeek)
Reviews.getAllReviewers().filter((val) ->
return (not val.startWeek? or week > General.getWeekFromDateString(val.startWeek, val.ignoreGameLengthModifier)) and (not val.retireWeek? or week < General.getWeekFromDateString(val.retireWeek, val.ignoreGameLengthModifier))
)
Reviews.getFourRandomReviewers = (company) ->
reviews = Reviews.getAvailableReviewers(company)
if reviews.length < 4 then throw "Reviewers are missing"
if reviews.length is 4 then return [reviews[0],reviews[1],reviews[2], reviews[3]]
random = company._mersenneTwister
first = reviews.pickRandom(random)
reviews = reviews.except(first)
second = reviews.pickRandom(random)
reviews = reviews.except(second)
third = reviews.pickRandom(random)
reviews = reviews.except(third)
forth = reviews.pickRandom(random)
company.randomCalled += 4
[first, second, third, forth]
Reviews.getModdedPositiveMessages = (game, score) ->
result = []
for m in Reviews.moddedMessages when m.isPositive and not m.isNegative
if m.getMessage?
result.push(m.getMessage(game, score))
else if m.message? then result.push(m.message)
result
Reviews.getModdedNegativeMessages = (game, score) ->
result = []
for m in Reviews.moddedMessages when m.isNegative and not m.isPositive
if m.getMessage?
result.push(m.getMessage(game, score))
else if m.message? then result.push(m.message)
result
Reviews.getModdedGenericMessages = (game, score) ->
result = []
for m in Reviews.moddedMessages when not m.isNegative and not m.isPositive
if m.getMessage?
result.push(m.getMessage(game, score))
else if m.message? then result.push(m.message)
result
Reviews.__oldGetGenericReviewMessage = Reviews.getGenericReviewMessage
Reviews.getGenericReviewMessage = (game, score) ->
if game.company.getRandom() <= 0.5 then Reviews.getModdedGenericMessages(game, score) else Reviews.__oldGetGenericReviewMessage(game, score)
Reviews.getReviews = (game, finalScore, positiveMessages, negativeMessages) ->
intScore = Math.floor(finalScore).clamp(1, 10)
if finalScore >= 9.5
intScore = 10
reviewers = Reviews.getFourRandomReviewers(game.company)
reviews = []
usedMessages = []
scores = []
variation = 1
positiveMessages.addRange(Reviews.getModdedPositiveMessages(game))
negativeMessages.addRange(Reviews.getModdedNegativeMessages (game))
for i in [0...4]
if intScore is 5 or intScore is 6
variation = if game.company.getRandom() < 0.05 then 2 else 1
scoreVariation = if Math.randomSign() is 1 then 0 else variation * Math.randomSign()
score = (intScore + scoreVariation).clamp(1, 10)
if score is 10 and (scores.length is 3 and scores.average() is 10)
if not game.flags.psEnabled
if Math.floor(finalScore) < 10 or game.company.getRandom() < 0.8
score--
else if Math.floor(finalScore) is 10 and game.company.getRandom() < 0.4
score++
message = undefined
loop
if game.company.getRandom() <= 0.2
if scoreVariation >= 0 and (score > 2 and positiveMessages.length isnt 0)
message = positiveMessages.pickRandom()
else
if (scoreVariation < 0 and (score < 6 and negativeMessages.length isnt 0))
message = negativeMessages.pickRandom()
else
message = undefined
if not message
message = Reviews.getGenericReviewMessage(game, score)
break unless usedMessages.weakIndexOf(message) isnt -1
usedMessages.push(message)
scores.push(score)
reviews.push {
score : score
message : message
reviewerName : reviewers[i].name
}
return reviews
###
Forces all games to contain the company
###
`Game = (function(superClass) {
var __extend = function(child, parent) { for (var key in parent) { if (hasProp.call(parent, key)) child[key] = parent[key]; } function ctor() { this.constructor = child; } ctor.prototype = parent.prototype; child.prototype = new ctor(); child.__super__ = parent.prototype; return child; }
__extend(Game, superClass);
function Game(company) {
this.id = GameManager.getGUID();
this.title = void 0;
this.genre = void 0;
this.topic = void 0;
this.platforms = [];
this.engine = void 0;
this.state = GameState.notStarted;
this.gameSize = "small";
this.targetAudience = "everyone";
this.missionLog = [];
this.salesCashLog = [];
this.featureLog = null;
this.score = 0;
this.reviews = [];
this.costs = 0;
this.hypePoints = 0;
this.technologyPoints = 0;
this.bugs = 0;
this.freeBugCount = 0;
this.designPoints = 0;
this.currentSalesCash = 0;
this.totalSalesCash = 0;
this.amountSold = 0;
this.releaseWeek = 0;
this.fansChangeTarget = 0;
this.fansChanged = 0;
this.initialSalesRank = 0;
this.currentSalesRank = 0;
this.topSalesRank = 0;
this.researchFactor = 1;
this.revenue = 0;
this.flags = {};
this.soldOut = false;
this.company = company;
if (company.conferenceHype) {
this.hypePoints = company.conferenceHype;
company.conferenceHype = Math.floor(company.conferenceHype / 3);
}
}
return Game;
})(Game)`
###
Allow adding famous people and adding custom applicant algorithims
###
JobApplicants.moddedFamous = []
JobApplicants.moddedAlgorithims = []
JobApplicants.getRandomMale = (random) ->
results = []
JobApplicants.moddedAlgorithims.forEach (val) ->
results.push(val.apply(random)) if val.forMale
results.pickRandom(random)
JobApplicants.getRandomFemale = (random) ->
results = []
JobApplicants.moddedAlgorithims.forEach (val) ->
results.push(val.apply(random)) if not val.forMale
results.pickRandom(random)
JobApplicants.getFamousMale = (tech, design, random) ->
results = []
JobApplicants.moddedFamous.forEach (val) ->
results.push(val.apply(random, tech, design)) if val.forMale
results.pickRandom(random)
JobApplicants.getFamousFemale = (tech, design, random) ->
results = []
JobApplicants.moddedFamous.forEach (val) ->
results.push(val.apply(random, tech, design)) if not val.forMale
results.pickRandom(random)
JobApplicants.searchTests =
[
{
id : "ComplexAlgorithms"
name : "Complex Algorithms".localize()
minT : 0.6
}
{
id : "GameDemo"
name : "Game Demo".localize()
minD : 0.3,
minT : 0.3
}
{
id : "Showreel"
name : "PI:NAME:<NAME>END_PIreel".localize()
minD : 0.6
}
]
UI.__olgGenerateJobApplicants = UI._generateJobApplicants
UI._generateJobApplicants = ->
oldApplicants = UI.__olgGenerateJobApplicants()
settings = GameManager.uiSettings["findStaffData"]
settings = {ratio : 0.1, tests : []} if not settings
settings.seed = Math.floor(GameManager.company.getRandom() * 65535) if not settings.seed
ratio = settings.ratio
test = JobApplicants.searchTests.first (t) -> t.id is settings.tests.first()
company = GameManager.company
random = new MersenneTwister(settings.seed)
newApplicants = []
count = Math.floor(2 + 3 * (ratio + 0.2).clamp(0, 1))
rerolls = 0
maxRerolls = 2
maxBonus = if company.currentLevel is 4 then 4 / 5 else 2 / 5
takenNames = GameManager.company.staff.map (s) -> s.name
for i in [0...count]
qBonusFactor = ratio / 3 + (1 - ratio / 3) * random.random()
maxBonus += 1 / 5 if random.random() >= 0.95
q = 1 / 5 + maxBonus * qBonusFactor
level = Math.floor(q * 5).clamp(1,5)
maxD = 1
minD = 0
if test
maxD -= test.minT if test.minT
if test.minD
minD = test.minD
maxD -= minD
baseValue = 200 * level
d = baseValue * minD + baseValue * maxD * random.random()
t = baseValue - d
rBonusFactor = random.random()
r = 1 / 5 + maxBonus * rBonusFactor
sBonusFactor = random.random()
s = 1 / 5 + maxBonus * sBonusFactor
goodRoll = sBonusFactor > 0.5 && (qBonusFactor > 0.5 && rBonusFactor > 0.5)
if not goodRoll and (rerolls < maxRerolls and random.random() <= (ratio + 0.1).clamp(0, 0.7))
i--
rerolls++
continue
rerolls = 0
isFamous = false
sex = "male"
loop
sex = "male"
if goodRoll
name = JobApplicants.getFamousMale(t, d, random) if (random.random() > 0.15)
else
name = JobApplicants.getFamousFemale(t, d, random)
sex = "female"
isFamous = true
else
name = JobApplicants.getRandomMale(random) if random.random() > 0.25
else
name = JobApplicants.getRandomFemale(random)
sex = "female"
isFamous = false
break unless takenNames.indexOf(name) != -1
takenNames.push(name)
salary = Character.BASE_SALARY_PER_LEVEL * level
salary += salary * 0.2 * random.random() * random.randomSign()
salary = Math.floor(salary/1e3) * 1e3
newApplicants.push {
name : PI:NAME:<NAME>END_PI,
qualityFactor : q,
technologyFactor : t / 500,
designFactor : d / 500,
researchFactor : r,
speedFactor : s,
salary : salary,
isFamous : isFamous,
sex : sex
}
GDT.fire GameManager, GDT.eventKeys.gameplay.staffApplicantsGenerated, {
newApplicants : newApplicants
settings : settings
rng : random
}
applicants = []
for i in [0...count]
if random.random() >= 0.5
a = newApplicants.pickRandom(random)
applicants.push(a)
newApplicants.remove(a)
else
a = oldApplicants.pickRandom(random)
applicants.push(a)
oldApplicants.remove(a)
return applicants |
[
{
"context": "ps://mainnet.vnode.app/v1/<YOUR TOKEN>\"\n\napiKey: \"CHANGEME\"\n\ninvoices:\n minConfirmations: 1\n\npublic:\n allo",
"end": 138,
"score": 0.9941465854644775,
"start": 130,
"tag": "KEY",
"value": "CHANGEME"
},
{
"context": ":\n minConfirmations: 1\n\npublic:\n allowTokens: ['DAI']\n contracts:\n DAI: \n address: \"0x6b175",
"end": 201,
"score": 0.5699249505996704,
"start": 199,
"tag": "KEY",
"value": "DA"
}
] | config/default.cson | codevet/daipay | 52 | listen: 8000
provider:
type: "rpc"
uri: "http://localhost:8545"
#uri: "https://mainnet.vnode.app/v1/<YOUR TOKEN>"
apiKey: "CHANGEME"
invoices:
minConfirmations: 1
public:
allowTokens: ['DAI']
contracts:
DAI:
address: "0x6b175474e89094c44da98b954eedeac495271d0f" # Mainnet
# address: "0xC4375B7De8af5a38a93548eb8453a498222C4fF2" DAI token on Kovan network
USDT:
address: "0xdAC17F958D2ee523a2206206994597C13D831ec7"
USDC:
address: "0xa0b86991c6218b36c1d19d4a2e9eb0ce3606eb48"
TUSD:
address: "0x0000000000085d4780B73119b644AE5ecd22b376"
| 66540 | listen: 8000
provider:
type: "rpc"
uri: "http://localhost:8545"
#uri: "https://mainnet.vnode.app/v1/<YOUR TOKEN>"
apiKey: "<KEY>"
invoices:
minConfirmations: 1
public:
allowTokens: ['<KEY>I']
contracts:
DAI:
address: "0x6b175474e89094c44da98b954eedeac495271d0f" # Mainnet
# address: "0xC4375B7De8af5a38a93548eb8453a498222C4fF2" DAI token on Kovan network
USDT:
address: "0xdAC17F958D2ee523a2206206994597C13D831ec7"
USDC:
address: "0xa0b86991c6218b36c1d19d4a2e9eb0ce3606eb48"
TUSD:
address: "0x0000000000085d4780B73119b644AE5ecd22b376"
| true | listen: 8000
provider:
type: "rpc"
uri: "http://localhost:8545"
#uri: "https://mainnet.vnode.app/v1/<YOUR TOKEN>"
apiKey: "PI:KEY:<KEY>END_PI"
invoices:
minConfirmations: 1
public:
allowTokens: ['PI:KEY:<KEY>END_PII']
contracts:
DAI:
address: "0x6b175474e89094c44da98b954eedeac495271d0f" # Mainnet
# address: "0xC4375B7De8af5a38a93548eb8453a498222C4fF2" DAI token on Kovan network
USDT:
address: "0xdAC17F958D2ee523a2206206994597C13D831ec7"
USDC:
address: "0xa0b86991c6218b36c1d19d4a2e9eb0ce3606eb48"
TUSD:
address: "0x0000000000085d4780B73119b644AE5ecd22b376"
|
[
{
"context": "r.methods[m]\n rec = {}\n rec.key = method.name\n counters[rec.key] = 1\n rec.group = met",
"end": 9733,
"score": 0.7375167608261108,
"start": 9729,
"tag": "KEY",
"value": "name"
}
] | server/structure.coffee | leviathanindustries/noddy | 2 |
import fs from 'fs'
API.add 'structure',
get: () ->
return if this.queryParams.group then API.structure.nodeslinks(undefined,this.queryParams.group) else API.structure.read()
API.add 'structure/method/:method', get: () -> return API.structure.method(this.urlParams.method)
API.add 'structure/methods', get: () -> return API.structure.methods()
API.add 'structure/collections', get: () -> return API.structure.collections()
API.add 'structure/groups', get: () -> return API.structure.groups()
API.add 'structure/nodes', get: () -> return API.structure.nodes()
API.add 'structure/links', get: () -> return API.structure.links()
API.add 'structure/nodeslinks', get: () -> return API.structure.nodeslinks(undefined, this.queryParams.group)
API.structure = {}
API.structure._structured = false
API.structure.read = (src='/home/cloo/dev/noddy/server') ->
if API.structure._structured is false
collections = []
settings = []
methods = {}
helpers = {}
routes = {}
called = {}
TODO = {}
logs = {}
# TODO add in a parse to see if calls are made from within loops, and if so, capture their conditionals if possible
# TODO parse the API.add URL routes, find which methods they call before the next route definition,
# then add the list of routes that calls a method to the method
# TODO once the above is done it could be possible to parse a static site for URLs that call certain methods
# although this would depend on what domains and other routings were used to route to the underlying API
method = {}
_parse = (fn) ->
incomment = false
inroute = false
counter = 0
fl = fs.readFileSync(fn).toString()
for l of lns = fl.replace(/\r\n/g,'\n').split '\n'
counter += 1
line = lns[l].replace(/\t/g,' ')
if JSON.stringify(method) isnt '{}' and (l is '0' or parseInt(l) is lns.length-1 or (line.indexOf('API.') is 0 and line.indexOf('(') isnt -1))
method.code = method.code.trim() #.replace(/\n/g,'')
method.checksum = API.job.sign method.code.replace(/\n/g,'')
#delete method.code
if method.name.indexOf('API.') is 0
methods[method.name] = method
else
helpers[method.name] = method
method = {}
if line.indexOf('API.settings') isnt -1
stng = 'API.settings' + line.split('API.settings')[1].split(' ')[0].split(')')[0].split('}')[0].split(',')[0].split('.indexOf')[0].replace(/[^a-zA-Z0-9\.\[\]]/g,'').replace(/\.$/,'')
if stng.split('.').length > 2
if method.name
method.settings ?= []
method.settings.push(stng) if stng not in method.settings
settings.push(stng) if stng not in settings
if line.indexOf('API.add') is 0
inroute = line.split(' ')[1].split(',')[0].replace(/'/g,'').replace(/"/g,'')
if inroute.split('/').pop() is 'test'
inroute = false
else
routes[inroute] ?= {methods: [], code: '', filename: fn.split('/noddy/')[1], line: counter}
if line.toLowerCase().indexOf('todo') isnt -1
TODO[method.name ? 'GENERAL'] ?= []
TODO[method.name ? 'GENERAL'].push line.split(if line.indexOf('todo') isnt -1 then 'todo' else 'TODO')[1].trim()
if incomment or not line.length
if line.indexOf("'''") isnt -1
incomment = false
else if line.trim().startsWith('#') or line.trim().startsWith("'''")
if line.trim().startsWith("'''")
incomment = true
else if line.indexOf('new API.collection') isnt -1
inroute = false
coll = line.split('new ')[0].split('=')[0].trim().split(' ')[0]
collections.push(coll) if coll not in collections and coll isnt 'tc' and coll.indexOf('test_') isnt 0 # don't use test collections
else if (line.indexOf('API.') is 0 or (not line.startsWith(' ') and line.indexOf('=') isnt -1)) and line.indexOf('(') isnt -1 and line.indexOf('.test') is -1 and line.indexOf('API.add') is -1 and line.indexOf('API.settings') isnt 0
inroute = false
method = {}
method.filename = fn.split('/noddy/')[1]
method.line = counter
method.lines = 1
method.secondary = line.indexOf('API.') isnt 0
method.code = line
method.name = line.split(' ')[0]
method.group = if method.name.indexOf('service.') isnt -1 then method.name.split('service.')[1].split('.')[0] else if method.name.indexOf('use.') isnt -1 then method.name.split('use.')[1].split('.')[0] else if method.name.indexOf('API.') is 0 then method.name.replace('API.','').split('.')[0] else undefined
method.args = line.split('(')[1].split(')')[0].split(',')
for a of method.args
method.args[a] = method.args[a].trim() #.split('=')[0].trim()
method.calls = []
method.remotes = []
else if inroute
routes[inroute].code += (if routes[inroute].code then '\n' else '') + line
if line.indexOf('API.') isnt -1 and line.indexOf('.test') is -1 and line.indexOf('API.settings') isnt 0
rtm = line.replace('API.add','').replace('API.settings','')
if rtm.indexOf('API.') isnt -1
rtmc = 'API.' + rtm.split('API.')[1].split(' ')[0].split('(')[0].replace(/[^a-zA-Z0-9\.\[\]]/g,'').replace(/\.$/,'')
routes[inroute].methods.push(rtmc) if rtmc.length and rtmc.split('.').length > 1 and rtmc not in routes[inroute].methods
else if method.name?
if not method.logs? and line.indexOf('API.log') isnt -1
log = line.split('API.log')[1]
method.logs ?= []
method.logs.push log
lar = (log.split('+')[0].split('#')[0] + method.args.join('')).toLowerCase().replace(/[^a-z0-9]/g,'')
logs[lar] = method.name
method.lines += 1
method.code += '\n' + line
for tp in ['API.','HTTP.']
li = line.indexOf(tp)
if li isnt -1
parts = line.split tp
parts.shift()
for p in parts
p = if tp is 'API.' then tp + p.split(' ')[0].split('(')[0].split(')')[0].trim() else p.trim().replace('call ','').replace('call(','')
if tp is 'API.' and p not in method.calls and li isnt line.indexOf('API.settings') and li isnt line.indexOf('API.add')
if p.indexOf('API.settings') isnt -1
stng = p.replace(/\?/g,'').split(')')[0].replace(/,$/,'')
method.settings ?= []
method.settings.push(stng) if stng not in method.settings
settings.push(stng) if stng not in settings
else if p.indexOf('?') is -1
pt = p.replace(/[^a-zA-Z0-9\.\[\]]/g,'').replace(/\.$/,'')
if pt.length and pt.split('.').length > 1 and pt not in method.calls
method.calls.push pt
called[pt] ?= []
called[pt].push method.name
else if tp is 'HTTP.' and p not in method.remotes
method.remotes.push p
_read = (d) ->
stats = fs.statSync(d)
#if stats.isSymbolicLink()
# console.log d
if stats.isDirectory()
for f in fs.readdirSync d
_read d + '/' + f
else if d.indexOf('structure.coffee') is -1
_parse d
_read src
for rk in _.keys(routes).sort()
for mt in routes[rk].methods
if methods[mt]? and (not methods[mt].routes? or rk not in methods[mt].routes)
methods[mt].routes ?= []
methods[mt].routes.push rk
for c in collections
cna = c.replace('@','')
re = new RegExp('API.' + cna, 'g')
res = new RegExp('API.settings.' + cna, 'g')
for m of methods
mb = methods[m].code.replace(re,'').replace(res,'').replace(/@/g,'')
if mb.indexOf(cna+'.') isnt -1
methods[m].collections ?= {}
methods[m].collections[c] ?= []
pts = mb.split(cna+'.')
pts.shift() if mb.indexOf(cna) isnt 0
for pt in pts
pt = pt.split(' ')[0].split('(')[0].split("'")[0].split('"')[0]
if pt not in methods[m].collections[c]
methods[m].collections[c].push pt
for cl of called
methods[cl].called = called[cl].sort() if methods[cl]? # where are the missing ones? in collections?
API.structure._structured = count: _.keys(methods).length, collections: collections.sort(), settings: settings.sort(), methods: methods, helpers: helpers, routes: routes, TODO: TODO, logs: logs
API.structure.nodeslinks(API.structure._structured)
return API.structure._structured
API.structure.logarg2fn = (la) ->
sr = API.structure.read()
return sr.logs[la]
API.structure.method = (method) ->
sr = API.structure.read()
return sr.methods[method]
API.structure.methods = () ->
return API.structure.read().methods
API.structure.collections = () ->
return API.structure.read().collections
API.structure.nodes = () ->
sr = API.structure.read()
return sr.nodes ? API.structure.nodeslinks().nodes
API.structure.links = () ->
sr = API.structure.read()
return sr.links ? API.structure.nodeslinks().links
API.structure.groups = () ->
sr = API.structure.read()
return sr.groups ? API.structure.nodeslinks().groups
API.structure.nodeslinks = (sr,group) ->
sr ?= API.structure.read()
positions = {}
counters = {}
nds = []
groups = []
colls = {}
for m of sr.methods
if m.indexOf('API.log') is -1
method = sr.methods[m]
rec = {}
rec.key = method.name
counters[rec.key] = 1
rec.group = method.group
groups.push(rec.group) if rec.group not in groups
rec.calls = method.calls
rec.collections = method.collections
nds.push rec
positions[rec.key] = nds.length-1
for c of method.collections
colls[c] ?= []
for pc in method.collections[c]
apc = 'API.collection.prototype.' + pc
colls[c].push(apc) if apc not in colls[c]
for col of colls
if not positions[col]?
rec = {}
rec.key = col
counters[rec.key] = 1
rec.group = 'collections'
rec.calls = []
for pc in colls[col]
rec.calls.push pc
groups.push(rec.group) if rec.group not in groups
nds.push rec
positions[rec.key] = nds.length-1
else
for pc in colls[col]
nds[positions[col]].calls.push(pc) if pc not in nds[positions[col]].calls
for coll in sr.collections
if not positions[coll]? # collections that no method actually calls, but should have a node anyway
rec = {}
rec.key = coll
counters[rec.key] = 1
rec.group = 'collections'
rec.calls = []
groups.push(rec.group) if rec.group not in groups
nds.push rec
positions[rec.key] = nds.length-1
lns = []
extras = []
esp = {}
nl = nds.length
for n of nds
node = nds[n]
for c in node.calls ? []
if c.indexOf('API.log') is -1
if not counters[c]
counters[c] = 1
else if not group or c.indexOf('.'+group) isnt -1
counters[c] += 1
pos = positions[c]
if not pos?
pos = esp[c]
if not pos?
extras.push {key: c, group: 'MISSING'}
esp[c] = extras.length-1
pos = nl + extras.length - 2
if (not group or c.indexOf('.'+group) isnt -1 or node.group is group)
lns.push {source: parseInt(n), target: pos}
for co of node.collections ? {}
if not counters[co]
counters[co] = 1
else if not group or c.indexOf('.'+group) isnt -1
counters[co] += 1
if not group or co.indexOf('.'+group) isnt -1 or node.group is group or group in ['collection','collections','es']
lns.push {source: parseInt(n), target: positions[co]}
for e of extras
nds.push extras[e]
for nd of nds
cv = counters[nds[nd].key] ? 1
nds[nd].value = cv
nds[nd].size = cv
API.structure._structured.nodecount ?= nds.length
API.structure._structured.linkcount ?= lns.length
API.structure._structured.nodes ?= nds
API.structure._structured.links ?= lns
API.structure._structured.groups ?= groups
return nodes: nds, links: lns, groups: groups.sort()
| 92818 |
import fs from 'fs'
API.add 'structure',
get: () ->
return if this.queryParams.group then API.structure.nodeslinks(undefined,this.queryParams.group) else API.structure.read()
API.add 'structure/method/:method', get: () -> return API.structure.method(this.urlParams.method)
API.add 'structure/methods', get: () -> return API.structure.methods()
API.add 'structure/collections', get: () -> return API.structure.collections()
API.add 'structure/groups', get: () -> return API.structure.groups()
API.add 'structure/nodes', get: () -> return API.structure.nodes()
API.add 'structure/links', get: () -> return API.structure.links()
API.add 'structure/nodeslinks', get: () -> return API.structure.nodeslinks(undefined, this.queryParams.group)
API.structure = {}
API.structure._structured = false
API.structure.read = (src='/home/cloo/dev/noddy/server') ->
if API.structure._structured is false
collections = []
settings = []
methods = {}
helpers = {}
routes = {}
called = {}
TODO = {}
logs = {}
# TODO add in a parse to see if calls are made from within loops, and if so, capture their conditionals if possible
# TODO parse the API.add URL routes, find which methods they call before the next route definition,
# then add the list of routes that calls a method to the method
# TODO once the above is done it could be possible to parse a static site for URLs that call certain methods
# although this would depend on what domains and other routings were used to route to the underlying API
method = {}
_parse = (fn) ->
incomment = false
inroute = false
counter = 0
fl = fs.readFileSync(fn).toString()
for l of lns = fl.replace(/\r\n/g,'\n').split '\n'
counter += 1
line = lns[l].replace(/\t/g,' ')
if JSON.stringify(method) isnt '{}' and (l is '0' or parseInt(l) is lns.length-1 or (line.indexOf('API.') is 0 and line.indexOf('(') isnt -1))
method.code = method.code.trim() #.replace(/\n/g,'')
method.checksum = API.job.sign method.code.replace(/\n/g,'')
#delete method.code
if method.name.indexOf('API.') is 0
methods[method.name] = method
else
helpers[method.name] = method
method = {}
if line.indexOf('API.settings') isnt -1
stng = 'API.settings' + line.split('API.settings')[1].split(' ')[0].split(')')[0].split('}')[0].split(',')[0].split('.indexOf')[0].replace(/[^a-zA-Z0-9\.\[\]]/g,'').replace(/\.$/,'')
if stng.split('.').length > 2
if method.name
method.settings ?= []
method.settings.push(stng) if stng not in method.settings
settings.push(stng) if stng not in settings
if line.indexOf('API.add') is 0
inroute = line.split(' ')[1].split(',')[0].replace(/'/g,'').replace(/"/g,'')
if inroute.split('/').pop() is 'test'
inroute = false
else
routes[inroute] ?= {methods: [], code: '', filename: fn.split('/noddy/')[1], line: counter}
if line.toLowerCase().indexOf('todo') isnt -1
TODO[method.name ? 'GENERAL'] ?= []
TODO[method.name ? 'GENERAL'].push line.split(if line.indexOf('todo') isnt -1 then 'todo' else 'TODO')[1].trim()
if incomment or not line.length
if line.indexOf("'''") isnt -1
incomment = false
else if line.trim().startsWith('#') or line.trim().startsWith("'''")
if line.trim().startsWith("'''")
incomment = true
else if line.indexOf('new API.collection') isnt -1
inroute = false
coll = line.split('new ')[0].split('=')[0].trim().split(' ')[0]
collections.push(coll) if coll not in collections and coll isnt 'tc' and coll.indexOf('test_') isnt 0 # don't use test collections
else if (line.indexOf('API.') is 0 or (not line.startsWith(' ') and line.indexOf('=') isnt -1)) and line.indexOf('(') isnt -1 and line.indexOf('.test') is -1 and line.indexOf('API.add') is -1 and line.indexOf('API.settings') isnt 0
inroute = false
method = {}
method.filename = fn.split('/noddy/')[1]
method.line = counter
method.lines = 1
method.secondary = line.indexOf('API.') isnt 0
method.code = line
method.name = line.split(' ')[0]
method.group = if method.name.indexOf('service.') isnt -1 then method.name.split('service.')[1].split('.')[0] else if method.name.indexOf('use.') isnt -1 then method.name.split('use.')[1].split('.')[0] else if method.name.indexOf('API.') is 0 then method.name.replace('API.','').split('.')[0] else undefined
method.args = line.split('(')[1].split(')')[0].split(',')
for a of method.args
method.args[a] = method.args[a].trim() #.split('=')[0].trim()
method.calls = []
method.remotes = []
else if inroute
routes[inroute].code += (if routes[inroute].code then '\n' else '') + line
if line.indexOf('API.') isnt -1 and line.indexOf('.test') is -1 and line.indexOf('API.settings') isnt 0
rtm = line.replace('API.add','').replace('API.settings','')
if rtm.indexOf('API.') isnt -1
rtmc = 'API.' + rtm.split('API.')[1].split(' ')[0].split('(')[0].replace(/[^a-zA-Z0-9\.\[\]]/g,'').replace(/\.$/,'')
routes[inroute].methods.push(rtmc) if rtmc.length and rtmc.split('.').length > 1 and rtmc not in routes[inroute].methods
else if method.name?
if not method.logs? and line.indexOf('API.log') isnt -1
log = line.split('API.log')[1]
method.logs ?= []
method.logs.push log
lar = (log.split('+')[0].split('#')[0] + method.args.join('')).toLowerCase().replace(/[^a-z0-9]/g,'')
logs[lar] = method.name
method.lines += 1
method.code += '\n' + line
for tp in ['API.','HTTP.']
li = line.indexOf(tp)
if li isnt -1
parts = line.split tp
parts.shift()
for p in parts
p = if tp is 'API.' then tp + p.split(' ')[0].split('(')[0].split(')')[0].trim() else p.trim().replace('call ','').replace('call(','')
if tp is 'API.' and p not in method.calls and li isnt line.indexOf('API.settings') and li isnt line.indexOf('API.add')
if p.indexOf('API.settings') isnt -1
stng = p.replace(/\?/g,'').split(')')[0].replace(/,$/,'')
method.settings ?= []
method.settings.push(stng) if stng not in method.settings
settings.push(stng) if stng not in settings
else if p.indexOf('?') is -1
pt = p.replace(/[^a-zA-Z0-9\.\[\]]/g,'').replace(/\.$/,'')
if pt.length and pt.split('.').length > 1 and pt not in method.calls
method.calls.push pt
called[pt] ?= []
called[pt].push method.name
else if tp is 'HTTP.' and p not in method.remotes
method.remotes.push p
_read = (d) ->
stats = fs.statSync(d)
#if stats.isSymbolicLink()
# console.log d
if stats.isDirectory()
for f in fs.readdirSync d
_read d + '/' + f
else if d.indexOf('structure.coffee') is -1
_parse d
_read src
for rk in _.keys(routes).sort()
for mt in routes[rk].methods
if methods[mt]? and (not methods[mt].routes? or rk not in methods[mt].routes)
methods[mt].routes ?= []
methods[mt].routes.push rk
for c in collections
cna = c.replace('@','')
re = new RegExp('API.' + cna, 'g')
res = new RegExp('API.settings.' + cna, 'g')
for m of methods
mb = methods[m].code.replace(re,'').replace(res,'').replace(/@/g,'')
if mb.indexOf(cna+'.') isnt -1
methods[m].collections ?= {}
methods[m].collections[c] ?= []
pts = mb.split(cna+'.')
pts.shift() if mb.indexOf(cna) isnt 0
for pt in pts
pt = pt.split(' ')[0].split('(')[0].split("'")[0].split('"')[0]
if pt not in methods[m].collections[c]
methods[m].collections[c].push pt
for cl of called
methods[cl].called = called[cl].sort() if methods[cl]? # where are the missing ones? in collections?
API.structure._structured = count: _.keys(methods).length, collections: collections.sort(), settings: settings.sort(), methods: methods, helpers: helpers, routes: routes, TODO: TODO, logs: logs
API.structure.nodeslinks(API.structure._structured)
return API.structure._structured
API.structure.logarg2fn = (la) ->
sr = API.structure.read()
return sr.logs[la]
API.structure.method = (method) ->
sr = API.structure.read()
return sr.methods[method]
API.structure.methods = () ->
return API.structure.read().methods
API.structure.collections = () ->
return API.structure.read().collections
API.structure.nodes = () ->
sr = API.structure.read()
return sr.nodes ? API.structure.nodeslinks().nodes
API.structure.links = () ->
sr = API.structure.read()
return sr.links ? API.structure.nodeslinks().links
API.structure.groups = () ->
sr = API.structure.read()
return sr.groups ? API.structure.nodeslinks().groups
API.structure.nodeslinks = (sr,group) ->
sr ?= API.structure.read()
positions = {}
counters = {}
nds = []
groups = []
colls = {}
for m of sr.methods
if m.indexOf('API.log') is -1
method = sr.methods[m]
rec = {}
rec.key = method.<KEY>
counters[rec.key] = 1
rec.group = method.group
groups.push(rec.group) if rec.group not in groups
rec.calls = method.calls
rec.collections = method.collections
nds.push rec
positions[rec.key] = nds.length-1
for c of method.collections
colls[c] ?= []
for pc in method.collections[c]
apc = 'API.collection.prototype.' + pc
colls[c].push(apc) if apc not in colls[c]
for col of colls
if not positions[col]?
rec = {}
rec.key = col
counters[rec.key] = 1
rec.group = 'collections'
rec.calls = []
for pc in colls[col]
rec.calls.push pc
groups.push(rec.group) if rec.group not in groups
nds.push rec
positions[rec.key] = nds.length-1
else
for pc in colls[col]
nds[positions[col]].calls.push(pc) if pc not in nds[positions[col]].calls
for coll in sr.collections
if not positions[coll]? # collections that no method actually calls, but should have a node anyway
rec = {}
rec.key = coll
counters[rec.key] = 1
rec.group = 'collections'
rec.calls = []
groups.push(rec.group) if rec.group not in groups
nds.push rec
positions[rec.key] = nds.length-1
lns = []
extras = []
esp = {}
nl = nds.length
for n of nds
node = nds[n]
for c in node.calls ? []
if c.indexOf('API.log') is -1
if not counters[c]
counters[c] = 1
else if not group or c.indexOf('.'+group) isnt -1
counters[c] += 1
pos = positions[c]
if not pos?
pos = esp[c]
if not pos?
extras.push {key: c, group: 'MISSING'}
esp[c] = extras.length-1
pos = nl + extras.length - 2
if (not group or c.indexOf('.'+group) isnt -1 or node.group is group)
lns.push {source: parseInt(n), target: pos}
for co of node.collections ? {}
if not counters[co]
counters[co] = 1
else if not group or c.indexOf('.'+group) isnt -1
counters[co] += 1
if not group or co.indexOf('.'+group) isnt -1 or node.group is group or group in ['collection','collections','es']
lns.push {source: parseInt(n), target: positions[co]}
for e of extras
nds.push extras[e]
for nd of nds
cv = counters[nds[nd].key] ? 1
nds[nd].value = cv
nds[nd].size = cv
API.structure._structured.nodecount ?= nds.length
API.structure._structured.linkcount ?= lns.length
API.structure._structured.nodes ?= nds
API.structure._structured.links ?= lns
API.structure._structured.groups ?= groups
return nodes: nds, links: lns, groups: groups.sort()
| true |
import fs from 'fs'
API.add 'structure',
get: () ->
return if this.queryParams.group then API.structure.nodeslinks(undefined,this.queryParams.group) else API.structure.read()
API.add 'structure/method/:method', get: () -> return API.structure.method(this.urlParams.method)
API.add 'structure/methods', get: () -> return API.structure.methods()
API.add 'structure/collections', get: () -> return API.structure.collections()
API.add 'structure/groups', get: () -> return API.structure.groups()
API.add 'structure/nodes', get: () -> return API.structure.nodes()
API.add 'structure/links', get: () -> return API.structure.links()
API.add 'structure/nodeslinks', get: () -> return API.structure.nodeslinks(undefined, this.queryParams.group)
API.structure = {}
API.structure._structured = false
API.structure.read = (src='/home/cloo/dev/noddy/server') ->
if API.structure._structured is false
collections = []
settings = []
methods = {}
helpers = {}
routes = {}
called = {}
TODO = {}
logs = {}
# TODO add in a parse to see if calls are made from within loops, and if so, capture their conditionals if possible
# TODO parse the API.add URL routes, find which methods they call before the next route definition,
# then add the list of routes that calls a method to the method
# TODO once the above is done it could be possible to parse a static site for URLs that call certain methods
# although this would depend on what domains and other routings were used to route to the underlying API
method = {}
_parse = (fn) ->
incomment = false
inroute = false
counter = 0
fl = fs.readFileSync(fn).toString()
for l of lns = fl.replace(/\r\n/g,'\n').split '\n'
counter += 1
line = lns[l].replace(/\t/g,' ')
if JSON.stringify(method) isnt '{}' and (l is '0' or parseInt(l) is lns.length-1 or (line.indexOf('API.') is 0 and line.indexOf('(') isnt -1))
method.code = method.code.trim() #.replace(/\n/g,'')
method.checksum = API.job.sign method.code.replace(/\n/g,'')
#delete method.code
if method.name.indexOf('API.') is 0
methods[method.name] = method
else
helpers[method.name] = method
method = {}
if line.indexOf('API.settings') isnt -1
stng = 'API.settings' + line.split('API.settings')[1].split(' ')[0].split(')')[0].split('}')[0].split(',')[0].split('.indexOf')[0].replace(/[^a-zA-Z0-9\.\[\]]/g,'').replace(/\.$/,'')
if stng.split('.').length > 2
if method.name
method.settings ?= []
method.settings.push(stng) if stng not in method.settings
settings.push(stng) if stng not in settings
if line.indexOf('API.add') is 0
inroute = line.split(' ')[1].split(',')[0].replace(/'/g,'').replace(/"/g,'')
if inroute.split('/').pop() is 'test'
inroute = false
else
routes[inroute] ?= {methods: [], code: '', filename: fn.split('/noddy/')[1], line: counter}
if line.toLowerCase().indexOf('todo') isnt -1
TODO[method.name ? 'GENERAL'] ?= []
TODO[method.name ? 'GENERAL'].push line.split(if line.indexOf('todo') isnt -1 then 'todo' else 'TODO')[1].trim()
if incomment or not line.length
if line.indexOf("'''") isnt -1
incomment = false
else if line.trim().startsWith('#') or line.trim().startsWith("'''")
if line.trim().startsWith("'''")
incomment = true
else if line.indexOf('new API.collection') isnt -1
inroute = false
coll = line.split('new ')[0].split('=')[0].trim().split(' ')[0]
collections.push(coll) if coll not in collections and coll isnt 'tc' and coll.indexOf('test_') isnt 0 # don't use test collections
else if (line.indexOf('API.') is 0 or (not line.startsWith(' ') and line.indexOf('=') isnt -1)) and line.indexOf('(') isnt -1 and line.indexOf('.test') is -1 and line.indexOf('API.add') is -1 and line.indexOf('API.settings') isnt 0
inroute = false
method = {}
method.filename = fn.split('/noddy/')[1]
method.line = counter
method.lines = 1
method.secondary = line.indexOf('API.') isnt 0
method.code = line
method.name = line.split(' ')[0]
method.group = if method.name.indexOf('service.') isnt -1 then method.name.split('service.')[1].split('.')[0] else if method.name.indexOf('use.') isnt -1 then method.name.split('use.')[1].split('.')[0] else if method.name.indexOf('API.') is 0 then method.name.replace('API.','').split('.')[0] else undefined
method.args = line.split('(')[1].split(')')[0].split(',')
for a of method.args
method.args[a] = method.args[a].trim() #.split('=')[0].trim()
method.calls = []
method.remotes = []
else if inroute
routes[inroute].code += (if routes[inroute].code then '\n' else '') + line
if line.indexOf('API.') isnt -1 and line.indexOf('.test') is -1 and line.indexOf('API.settings') isnt 0
rtm = line.replace('API.add','').replace('API.settings','')
if rtm.indexOf('API.') isnt -1
rtmc = 'API.' + rtm.split('API.')[1].split(' ')[0].split('(')[0].replace(/[^a-zA-Z0-9\.\[\]]/g,'').replace(/\.$/,'')
routes[inroute].methods.push(rtmc) if rtmc.length and rtmc.split('.').length > 1 and rtmc not in routes[inroute].methods
else if method.name?
if not method.logs? and line.indexOf('API.log') isnt -1
log = line.split('API.log')[1]
method.logs ?= []
method.logs.push log
lar = (log.split('+')[0].split('#')[0] + method.args.join('')).toLowerCase().replace(/[^a-z0-9]/g,'')
logs[lar] = method.name
method.lines += 1
method.code += '\n' + line
for tp in ['API.','HTTP.']
li = line.indexOf(tp)
if li isnt -1
parts = line.split tp
parts.shift()
for p in parts
p = if tp is 'API.' then tp + p.split(' ')[0].split('(')[0].split(')')[0].trim() else p.trim().replace('call ','').replace('call(','')
if tp is 'API.' and p not in method.calls and li isnt line.indexOf('API.settings') and li isnt line.indexOf('API.add')
if p.indexOf('API.settings') isnt -1
stng = p.replace(/\?/g,'').split(')')[0].replace(/,$/,'')
method.settings ?= []
method.settings.push(stng) if stng not in method.settings
settings.push(stng) if stng not in settings
else if p.indexOf('?') is -1
pt = p.replace(/[^a-zA-Z0-9\.\[\]]/g,'').replace(/\.$/,'')
if pt.length and pt.split('.').length > 1 and pt not in method.calls
method.calls.push pt
called[pt] ?= []
called[pt].push method.name
else if tp is 'HTTP.' and p not in method.remotes
method.remotes.push p
_read = (d) ->
stats = fs.statSync(d)
#if stats.isSymbolicLink()
# console.log d
if stats.isDirectory()
for f in fs.readdirSync d
_read d + '/' + f
else if d.indexOf('structure.coffee') is -1
_parse d
_read src
for rk in _.keys(routes).sort()
for mt in routes[rk].methods
if methods[mt]? and (not methods[mt].routes? or rk not in methods[mt].routes)
methods[mt].routes ?= []
methods[mt].routes.push rk
for c in collections
cna = c.replace('@','')
re = new RegExp('API.' + cna, 'g')
res = new RegExp('API.settings.' + cna, 'g')
for m of methods
mb = methods[m].code.replace(re,'').replace(res,'').replace(/@/g,'')
if mb.indexOf(cna+'.') isnt -1
methods[m].collections ?= {}
methods[m].collections[c] ?= []
pts = mb.split(cna+'.')
pts.shift() if mb.indexOf(cna) isnt 0
for pt in pts
pt = pt.split(' ')[0].split('(')[0].split("'")[0].split('"')[0]
if pt not in methods[m].collections[c]
methods[m].collections[c].push pt
for cl of called
methods[cl].called = called[cl].sort() if methods[cl]? # where are the missing ones? in collections?
API.structure._structured = count: _.keys(methods).length, collections: collections.sort(), settings: settings.sort(), methods: methods, helpers: helpers, routes: routes, TODO: TODO, logs: logs
API.structure.nodeslinks(API.structure._structured)
return API.structure._structured
API.structure.logarg2fn = (la) ->
sr = API.structure.read()
return sr.logs[la]
API.structure.method = (method) ->
sr = API.structure.read()
return sr.methods[method]
API.structure.methods = () ->
return API.structure.read().methods
API.structure.collections = () ->
return API.structure.read().collections
API.structure.nodes = () ->
sr = API.structure.read()
return sr.nodes ? API.structure.nodeslinks().nodes
API.structure.links = () ->
sr = API.structure.read()
return sr.links ? API.structure.nodeslinks().links
API.structure.groups = () ->
sr = API.structure.read()
return sr.groups ? API.structure.nodeslinks().groups
API.structure.nodeslinks = (sr,group) ->
sr ?= API.structure.read()
positions = {}
counters = {}
nds = []
groups = []
colls = {}
for m of sr.methods
if m.indexOf('API.log') is -1
method = sr.methods[m]
rec = {}
rec.key = method.PI:KEY:<KEY>END_PI
counters[rec.key] = 1
rec.group = method.group
groups.push(rec.group) if rec.group not in groups
rec.calls = method.calls
rec.collections = method.collections
nds.push rec
positions[rec.key] = nds.length-1
for c of method.collections
colls[c] ?= []
for pc in method.collections[c]
apc = 'API.collection.prototype.' + pc
colls[c].push(apc) if apc not in colls[c]
for col of colls
if not positions[col]?
rec = {}
rec.key = col
counters[rec.key] = 1
rec.group = 'collections'
rec.calls = []
for pc in colls[col]
rec.calls.push pc
groups.push(rec.group) if rec.group not in groups
nds.push rec
positions[rec.key] = nds.length-1
else
for pc in colls[col]
nds[positions[col]].calls.push(pc) if pc not in nds[positions[col]].calls
for coll in sr.collections
if not positions[coll]? # collections that no method actually calls, but should have a node anyway
rec = {}
rec.key = coll
counters[rec.key] = 1
rec.group = 'collections'
rec.calls = []
groups.push(rec.group) if rec.group not in groups
nds.push rec
positions[rec.key] = nds.length-1
lns = []
extras = []
esp = {}
nl = nds.length
for n of nds
node = nds[n]
for c in node.calls ? []
if c.indexOf('API.log') is -1
if not counters[c]
counters[c] = 1
else if not group or c.indexOf('.'+group) isnt -1
counters[c] += 1
pos = positions[c]
if not pos?
pos = esp[c]
if not pos?
extras.push {key: c, group: 'MISSING'}
esp[c] = extras.length-1
pos = nl + extras.length - 2
if (not group or c.indexOf('.'+group) isnt -1 or node.group is group)
lns.push {source: parseInt(n), target: pos}
for co of node.collections ? {}
if not counters[co]
counters[co] = 1
else if not group or c.indexOf('.'+group) isnt -1
counters[co] += 1
if not group or co.indexOf('.'+group) isnt -1 or node.group is group or group in ['collection','collections','es']
lns.push {source: parseInt(n), target: positions[co]}
for e of extras
nds.push extras[e]
for nd of nds
cv = counters[nds[nd].key] ? 1
nds[nd].value = cv
nds[nd].size = cv
API.structure._structured.nodecount ?= nds.length
API.structure._structured.linkcount ?= lns.length
API.structure._structured.nodes ?= nds
API.structure._structured.links ?= lns
API.structure._structured.groups ?= groups
return nodes: nds, links: lns, groups: groups.sort()
|
[
{
"context": "le.log \"✓ Paperjs Functionality\"\n this.name = \"siloseam_design\"\n gui.add this, \"name\"\n gui.add this, \"save",
"end": 161,
"score": 0.8498600125312805,
"start": 146,
"tag": "USERNAME",
"value": "siloseam_design"
},
{
"context": " nb = bladder.unite(r)\n nb.name=\"BLADDER\"\n bladder.remove()\n dm",
"end": 6323,
"score": 0.6912431120872498,
"start": 6321,
"tag": "NAME",
"value": "BL"
},
{
"context": " = this\n # CREATE SEAM\n if p.name.includes(\"BLADDER\")\n\n separator = p\n separator.set\n ",
"end": 6650,
"score": 0.7662085890769958,
"start": 6646,
"tag": "NAME",
"value": "BLAD"
},
{
"context": "his\n # CREATE SEAM\n if p.name.includes(\"BLADDER\")\n\n separator = p\n separator.set\n ",
"end": 6653,
"score": 0.5332305431365967,
"start": 6650,
"tag": "NAME",
"value": "DER"
},
{
"context": "->\n np = p.expand\n name: \"BLADDER\"\n strokeAlignment: \"exterior\", \n ",
"end": 11379,
"score": 0.9980718493461609,
"start": 11372,
"tag": "NAME",
"value": "BLADDER"
}
] | tool/app/assets/javascripts/tool/siloseam.coffee | The-Hybrid-Atelier/siloseam | 1 |
class window.PaperDesignTool extends ProxySTL
constructor: (ops)->
# super ops
console.log "✓ Paperjs Functionality"
this.name = "siloseam_design"
gui.add this, "name"
gui.add this, "save_svg"
@setup(ops)
setup: (ops)->
canvas = ops.canvas[0]
console.log $('#sandbox').height()
$(canvas)
.attr('width', $("#sandbox").width())
.attr('height', $("#sandbox").height())
window.paper = new paper.PaperScope
loadCustomLibraries()
paper.setup canvas
paper.view.zoom = 2.5
paper.tool = new paper.Tool
name: "default_tool"
$(canvas)
.attr('width', $("#sandbox").width())
.attr('height', $("#sandbox").height())
@toolEvents()
toolEvents: ()->
return
save_svg: ()->
prev = paper.view.zoom;
console.log("Exporting file as SVG");
paper.view.zoom = 1;
paper.view.update();
bg = paper.project.getItems({"name": "BACKGROUND"})
g = new paper.Group
name: "temp"
children: paper.project.getItems
className: (x)-> _.includes(["Path", "CompoundPath"], x)
g.pivot = g.bounds.topLeft
prior = g.position
g.position = new paper.Point(0, 0)
if bg.length > 0
exp = paper.project.exportSVG
bounds: g.bounds
asString: true,
precision: 5
else
exp = paper.project.exportSVG
asString: true,
precision: 5
g.position = prior
g.ungroup()
saveAs(new Blob([exp], {type:"application/svg+xml"}), @name + ".svg")
paper.view.zoom = prev
clear: ->
paper.project.clear()
###
Direct Manipulation Interactions
###
window.dm = (p)->
p.set
onMouseDown: (e)->
this.touched = true
this.selected = not this.selected
this.update_dimensions()
update_dimensions: (e)->
if dim
if this.data and this.data.height
z = this.data.height
else
z = 0
dim.set(this.bounds.height, this.bounds.width, z)
return
onMouseDrag: (e)->
this.position = this.position.add(e.delta)
onMouseUp: (e)->
return
clone_wire: ()->
x = dm(this.clone())
x.name = this.name
return x
return p
class window.SiliconeTool extends PaperDesignTool
@ROTATE_STEP: 15
@TRANSLATE_STEP: 10
@AIRTUBE_TOLERANCE: 0.5
@SEAM_ALLOWANCE: Ruler.mm2pts(4)#6.5) #8 mm
@SEAM_STEP: Ruler.mm2pts(2) #1 mm
@STEP_TOLERANCE: Ruler.mm2pts(2) #1 mm
@MOLD_WALL: Ruler.mm2pts(3) #3 mm
@SEPARATOR_PAD: Ruler.mm2pts(6) #5 mm
@MOLD_BORDER: Ruler.mm2pts(2) #2 mm
###
To inherit parent class functionality, super ops must be the first line.
This class hosts the logic for taking SVG Paths and interpreting them as wires.
###
constructor: (ops)->
super ops
console.log "✓ SiliconeTool Functionality"
@test_addSVG()
@keybindings()
keybindings: ()->
scope = this
# SILOSEAM KEYBINDINGS
$(document).on "paperkeydown", {}, (event, key, modifiers, paths)->
if modifiers.shift
# SHIFT CLUTCH
action = switch key
# OVERPRINT ALL
when "!"
paths = paper.project.getItems
className: (x)-> _.includes(["Path", "CompoundPath"], x)
(p)->
p.set
fillColor: null
strokeWidth: 1
strokeColor: "black"
shadowBlur: 0
shadowColor: null
else
# REGULAR
action = switch key
when "3"
SiliconeTool.stylize()
false
when "a" then (p)->
name = $('[data-key="a"]').find('select').val()
p.name = name
$(document).trigger("refresh")
# OVERPRINT
# when "1" then (p)-> p.set
# fillColor: null
# strokeWidth: 1
# strokeColor: "black"
false
when "1"
gradients = paper.project.getItems({name: "GRADIENT"})
_.each gradients, (g)-> g.remove()
paths = paper.project.getItems
className: (x)-> _.includes(["Path", "CompoundPath"], x)
_.each paths, (p)->
p.set
fillColor: null
strokeWidth: 1
strokeColor: "black"
shadowBlur: 0
shadowColor: null
false
when "2"
bladder = scope.connect_to_tube(paths)
if bladder
console.log "# OF BLADDERS", bladder.length
scope.bladder_seams(bladder)
false
if action and _.isFunction(action)
_.each paths, action
connect_to_tube: (paths)->
# JOIN INLET + BLADDER
# IF ANY AIRTUBES INTERSECT, THEN UNITE
airtubes = _.filter paths, (p)-> p.name.includes("AIRTUBE")
bladders = _.filter paths, (p)-> p.name.includes("BLADDER")
if airtubes.length == 0 or bladders.length == 0
alertify.error "You must select both a bladder and airtube geometry"
return null
else
nb = null
_.each airtubes, (airtube)->
airtube_material = airtube.name.split("_")[0]
airtube_material = window.materials.get(airtube_material)
airtube.applyMatrix = true
r = new paper.Path.Rectangle
size: [airtube.length, airtube_material.strokeWidth * SiliconeTool.AIRTUBE_TOLERANCE]
position: airtube.position.clone()
strokeColor: "black"
strokeWidth: 0.5
r.rotation = airtube.getTangentAt(airtube.length/2).angle
makeClip = (tube)->
clip = new paper.Path.Rectangle
name: "SUB"
size: [tube.length, 500]
fillColor: "red"
opacity: 0
strokeWidth: 0.5
clip.rotation = tube.getTangentAt(tube.length/2).angle
dir = tube.getTangentAt(0)
dir.length = -tube.length/2
clip.position = tube.firstSegment.point.add(dir)
return clip
sub = makeClip(airtube)
if bladders[0].intersects(sub)
sub.remove()
airtube.reverse()
makeClip(airtube)
_.each bladders, (bladder)->
if airtube.intersects(bladder)
nb = bladder.unite(r)
nb.name="BLADDER"
bladder.remove()
dm(nb)
r.remove()
airtube.name = "OUTLET"
airtube.airtube_allowance = airtube_material.strokeWidth * SiliconeTool.AIRTUBE_TOLERANCE
return nb
bladder_seams: (p)->
scope = this
# CREATE SEAM
if p.name.includes("BLADDER")
separator = p
separator.set
name: "SEPARATOR"
fillColor: "#BC519E"
strokeWidth: 0
seam = separator.expand
name: "SEAM"
strokeAlignment: "exterior",
strokeOffset: SiliconeTool.SEAM_ALLOWANCE
fillColor: "#BFDFD1"
strokeWidth: 0
joinType: "miter"
data:
height: 0
seam_step = seam.expand
name: "SEAM_STEP"
strokeAlignment: "exterior",
strokeOffset: SiliconeTool.SEAM_STEP
fillColor: "blue"
strokeWidth: 0
joinType: "miter"
data:
height: 2
separator.sendToBack()
seam.sendToBack()
seam_step.sendToBack()
subs = paper.project.getItems
name: "SUB"
_.each subs, (s)->
if seam.intersects(s)
ns = seam.subtract(s)
ns.name = seam.name
seam.remove()
seam = dm(ns)
if seam_step.intersects(s)
ns = seam_step.subtract(s)
ns.name = seam_step.name
seam_step.remove()
seam_step = dm(ns)
_.each subs, (s)-> s.remove()
separator.bringToFront()
SiliconeTool.registration_site(separator, seam, seam_step)
@registration_site: (separator, seam, seam_step)->
outlets = paper.project.getItems
name: "OUTLET"
# ADD REGISTRATION SITE
_.each outlets, (outlet)->
outlet.bringToFront()
outlet.fillColor = "yellow"
pt = outlet.firstSegment.point
tang = outlet.getTangentAt(0)
norm = outlet.getNormalAt(0)
norm.length = SiliconeTool.SEPARATOR_PAD
tang.length = SiliconeTool.SEPARATOR_PAD
pad = new paper.Path.Rectangle
name: "PAD"
size: [SiliconeTool.SEPARATOR_PAD, outlet.airtube_allowance * 3]
fillColor: "orange"
position: outlet.position
pad.rotation = outlet.getTangentAt(outlet.length/2).angle
tang.length = -SiliconeTool.SEPARATOR_PAD/2
pad.position = tang.add(outlet.firstSegment.point)
outlet.remove()
big_pad = pad.expand
strokeAlignment: "exterior",
strokeOffset: SiliconeTool.STEP_TOLERANCE
joinType: "miter"
fillColor: "red"
nsep = dm(separator.unite(pad))
separator.remove()
separator = nsep
nss = dm(seam_step.unite(big_pad))
seam_step.remove()
seam_step = nss
ns = dm(seam.subtract(big_pad))
seam.remove()
seam = ns
seam.sendToBack()
seam_step.sendToBack()
pad.remove()
big_pad.remove()
SiliconeTool.mold_wall(separator, seam, seam_step)
@mold_wall: (separator, seam, seam_step)->
wall = seam_step.expand
name: "MOLD_WALL"
strokeAlignment: "exterior",
strokeOffset: SiliconeTool.MOLD_WALL
fillColor: "white"
strokeWidth: 0
joinType: "round"
data:
height: 4
wall.sendToBack()
dm(wall)
separator.bringToFront()
# ADD BACKGROUND
bg = new paper.Path.Rectangle
name: "BACKGROUND"
rectangle: wall.bounds.expand(SiliconeTool.MOLD_BORDER)
data:
height: 0
dm(bg)
bg.sendToBack()
# FOR 2.5D MOLD GENERATION
g = new paper.Group
name: "STL"
children: [bg, wall, seam_step, seam, separator]
g.ungroup()
# scope.normalize_heights()
# scope.update_dimensions()
# scope.siloseam_stylize()
###
Binds hotkeys to wire operations.
Overrides default tool events from PaperDesignTool.
###
toolEvents: ()->
scope = this
hitOptions =
class: paper.Path
stroke: true
fill: true
tolerance: 15
paper.tool.set
onMouseDown: (event)->
hitResults = paper.project.hitTestAll event.point, hitOptions
if _.isEmpty(hitResults)
paper.project.deselectAll()
$(document).trigger("refresh")
onMouseDrag: (event)->
if event.modifiers.shift
a = event.downPoint.subtract(event.point)
a = a.add(paper.view.center)
paper.view.center = a
onKeyUp: (event)->
if not event.modifiers.shift
$(document).trigger "end_shift"
$(document).trigger "paperkeyup", [event.key, event.modifiers, []]
onKeyDown: (event) ->
paths = paper.project.selectedItems
if event.modifiers.shift
$(document).trigger "start_shift"
$(document).trigger "paperkeydown", [event.key, event.modifiers, paths]
if event.key == 'b'
nps = _.map paths, (p)->
np = p.expand
name: "BLADDER"
strokeAlignment: "exterior",
strokeWidth: 1,
strokeOffset: 50
strokeColor: "black"
fillColor: null
joinType: "miter"
return np
if nps.length > 0
unp = nps[0]
nps = nps.slice(0)
if nps.length > 0
_.each nps, (np)->
temp = unp.unite(np)
unp.remove()
np.remove()
unp = temp
dm(unp)
###
Styles the artwork to match the Siloseam color palette
###
@stylize: ()->
# console.log "STYLIZE"
style_set = (name, style)->
matches = paper.project.getItems({name: name})
_.each matches, (m)-> m.set(style)
all = paper.project.getItems({})
_.each all, (m)-> m.set
fillColor: null
strokeWidth: 1
strokeColor: "black"
shadowBlur: 0
shadowColor: null
style_set "SEAM",
fillColor: "#BFDFD1"
strokeColor: "BCBEC0"
strokeWidth: 0.709
style_set "AIRTUBE",
fillColor: "#F1F2F2"
strokeColor: "#BCBEC0"
strokeWidth: 0.709
style_set "AIRTUBE4",
opacity: 1
fillColor: "#F1F2F2"
strokeColor: "#BCBEC0"
strokeWidth: Ruler.in2pts(0.25)
shadowColor: new paper.Color(0, 0, 0, 0.3),
shadowBlur: 5,
shadowOffset: new paper.Point(1, 1)
style_set "AIRTUBE8",
opacity: 1
fillColor: "#F1F2F2"
strokeColor: "#BCBEC0"
strokeWidth: Ruler.in2pts(0.25)
shadowColor: new paper.Color(0, 0, 0, 0.3),
shadowBlur: 5,
shadowOffset: new paper.Point(1, 1)
style_set "OUTLET",
opacity: 1
shadowColor: new paper.Color(0, 0, 0, 0.3),
shadowBlur: 5,
shadowOffset: new paper.Point(1, 1)
style_set "MOLD_WALL",
fillColor: "#D2D2D2"
strokeColor: "#111111"
strokeWidth: 1
shadowColor: new paper.Color(0, 0, 0, 0.3),
shadowBlur: 5,
shadowOffset: new paper.Point(1, 1)
style_set "BLADDER",
fillColor: "#BC519E"
strokeWidth: 0
style_set "BACKGROUND",
fillColor: "#DDD"
strokeColor: "black"
strokeWidth: 1
shadowColor: new paper.Color(0, 0, 0, 0.3),
shadowBlur: 5,
shadowOffset: new paper.Point(2, 2)
style_set "SEAM_STEP",
fillColor: "white"
strokeColor: "#111111"
strokeWidth: 0
shadowColor: new paper.Color(0, 0, 0, 0.3),
shadowBlur: 5,
shadowOffset: new paper.Point(1, 1)
matches = paper.project.getItems({name: "SEPARATOR"})
_.each matches, (m)->
transparent_blue = new paper.Color("#2884C6")
transparent_blue.alpha = 0
top = m.clone()
m.set
fillColor: "#E6E7E8"
strokeColor: "#BCBEC0"
strokeWidth: 0.709
top.set
name: "GRADIENT"
fillColor:
gradient:
stops: [['#2884C6'], [transparent_blue, 1]]
radial: false
origin: m.bounds.leftCenter
destination: m.bounds.rightCenter
alpha: 0.38
###
Given an SVG asset url, the extracts all Path objects to the topmost
level of the SVG graph. Other groups are removed.
ops =
url: url of the SVG asset (string, required)
position: where to place paths (paper.Point, default: paper.view.center)
###
addSVG: (ops)->
scope = this
# POSITION HANDLING
if not ops.position
ops.position = paper.view.center
ops.position = ops.position.clone()
console.log "LOADING", ops.url
paper.project.activeLayer.name = "SILOSEAM"
paper.project.importSVG ops.url,
expandShapes: true
insert: false
onError: (item)->
alertify.error "Could not load: " + item
onLoad: (item) ->
# Extract Path and Compound Path Elements
paths = item.getItems
className: (n)->
_.includes ["Path", "CompoundPath"], n
# Attach to Temporary Group and Release
g = new paper.Group
name: "temp"
_.each paths, (p)-> p.parent = g
g.set {position: ops.position}
g.reverseChildren()
g.ungroup()
# Add Interactivity
_.each paths, (p)->
dm(p)
if p.name
p.name = p.name.split("_")[0]
SiliconeTool.stylize()
###
Test: Places SVG asset on canvas with full wire interactivity.
###
test_addSVG: ()->
scope = this
file = "/primitives/example.svg"
console.log "DEFAULT LOAD", file
@addSVG
url: file
# url: "/primitives/primitives_elegant_elle-1.svg"
# url: "/primitives/primitives_elegant_elle-1.svg"
position: paper.view.center
# mat = Material.detectMaterial(path)
# w = new WirePath(scope.paper, value)
| 225852 |
class window.PaperDesignTool extends ProxySTL
constructor: (ops)->
# super ops
console.log "✓ Paperjs Functionality"
this.name = "siloseam_design"
gui.add this, "name"
gui.add this, "save_svg"
@setup(ops)
setup: (ops)->
canvas = ops.canvas[0]
console.log $('#sandbox').height()
$(canvas)
.attr('width', $("#sandbox").width())
.attr('height', $("#sandbox").height())
window.paper = new paper.PaperScope
loadCustomLibraries()
paper.setup canvas
paper.view.zoom = 2.5
paper.tool = new paper.Tool
name: "default_tool"
$(canvas)
.attr('width', $("#sandbox").width())
.attr('height', $("#sandbox").height())
@toolEvents()
toolEvents: ()->
return
save_svg: ()->
prev = paper.view.zoom;
console.log("Exporting file as SVG");
paper.view.zoom = 1;
paper.view.update();
bg = paper.project.getItems({"name": "BACKGROUND"})
g = new paper.Group
name: "temp"
children: paper.project.getItems
className: (x)-> _.includes(["Path", "CompoundPath"], x)
g.pivot = g.bounds.topLeft
prior = g.position
g.position = new paper.Point(0, 0)
if bg.length > 0
exp = paper.project.exportSVG
bounds: g.bounds
asString: true,
precision: 5
else
exp = paper.project.exportSVG
asString: true,
precision: 5
g.position = prior
g.ungroup()
saveAs(new Blob([exp], {type:"application/svg+xml"}), @name + ".svg")
paper.view.zoom = prev
clear: ->
paper.project.clear()
###
Direct Manipulation Interactions
###
window.dm = (p)->
p.set
onMouseDown: (e)->
this.touched = true
this.selected = not this.selected
this.update_dimensions()
update_dimensions: (e)->
if dim
if this.data and this.data.height
z = this.data.height
else
z = 0
dim.set(this.bounds.height, this.bounds.width, z)
return
onMouseDrag: (e)->
this.position = this.position.add(e.delta)
onMouseUp: (e)->
return
clone_wire: ()->
x = dm(this.clone())
x.name = this.name
return x
return p
class window.SiliconeTool extends PaperDesignTool
@ROTATE_STEP: 15
@TRANSLATE_STEP: 10
@AIRTUBE_TOLERANCE: 0.5
@SEAM_ALLOWANCE: Ruler.mm2pts(4)#6.5) #8 mm
@SEAM_STEP: Ruler.mm2pts(2) #1 mm
@STEP_TOLERANCE: Ruler.mm2pts(2) #1 mm
@MOLD_WALL: Ruler.mm2pts(3) #3 mm
@SEPARATOR_PAD: Ruler.mm2pts(6) #5 mm
@MOLD_BORDER: Ruler.mm2pts(2) #2 mm
###
To inherit parent class functionality, super ops must be the first line.
This class hosts the logic for taking SVG Paths and interpreting them as wires.
###
constructor: (ops)->
super ops
console.log "✓ SiliconeTool Functionality"
@test_addSVG()
@keybindings()
keybindings: ()->
scope = this
# SILOSEAM KEYBINDINGS
$(document).on "paperkeydown", {}, (event, key, modifiers, paths)->
if modifiers.shift
# SHIFT CLUTCH
action = switch key
# OVERPRINT ALL
when "!"
paths = paper.project.getItems
className: (x)-> _.includes(["Path", "CompoundPath"], x)
(p)->
p.set
fillColor: null
strokeWidth: 1
strokeColor: "black"
shadowBlur: 0
shadowColor: null
else
# REGULAR
action = switch key
when "3"
SiliconeTool.stylize()
false
when "a" then (p)->
name = $('[data-key="a"]').find('select').val()
p.name = name
$(document).trigger("refresh")
# OVERPRINT
# when "1" then (p)-> p.set
# fillColor: null
# strokeWidth: 1
# strokeColor: "black"
false
when "1"
gradients = paper.project.getItems({name: "GRADIENT"})
_.each gradients, (g)-> g.remove()
paths = paper.project.getItems
className: (x)-> _.includes(["Path", "CompoundPath"], x)
_.each paths, (p)->
p.set
fillColor: null
strokeWidth: 1
strokeColor: "black"
shadowBlur: 0
shadowColor: null
false
when "2"
bladder = scope.connect_to_tube(paths)
if bladder
console.log "# OF BLADDERS", bladder.length
scope.bladder_seams(bladder)
false
if action and _.isFunction(action)
_.each paths, action
connect_to_tube: (paths)->
# JOIN INLET + BLADDER
# IF ANY AIRTUBES INTERSECT, THEN UNITE
airtubes = _.filter paths, (p)-> p.name.includes("AIRTUBE")
bladders = _.filter paths, (p)-> p.name.includes("BLADDER")
if airtubes.length == 0 or bladders.length == 0
alertify.error "You must select both a bladder and airtube geometry"
return null
else
nb = null
_.each airtubes, (airtube)->
airtube_material = airtube.name.split("_")[0]
airtube_material = window.materials.get(airtube_material)
airtube.applyMatrix = true
r = new paper.Path.Rectangle
size: [airtube.length, airtube_material.strokeWidth * SiliconeTool.AIRTUBE_TOLERANCE]
position: airtube.position.clone()
strokeColor: "black"
strokeWidth: 0.5
r.rotation = airtube.getTangentAt(airtube.length/2).angle
makeClip = (tube)->
clip = new paper.Path.Rectangle
name: "SUB"
size: [tube.length, 500]
fillColor: "red"
opacity: 0
strokeWidth: 0.5
clip.rotation = tube.getTangentAt(tube.length/2).angle
dir = tube.getTangentAt(0)
dir.length = -tube.length/2
clip.position = tube.firstSegment.point.add(dir)
return clip
sub = makeClip(airtube)
if bladders[0].intersects(sub)
sub.remove()
airtube.reverse()
makeClip(airtube)
_.each bladders, (bladder)->
if airtube.intersects(bladder)
nb = bladder.unite(r)
nb.name="<NAME>ADDER"
bladder.remove()
dm(nb)
r.remove()
airtube.name = "OUTLET"
airtube.airtube_allowance = airtube_material.strokeWidth * SiliconeTool.AIRTUBE_TOLERANCE
return nb
bladder_seams: (p)->
scope = this
# CREATE SEAM
if p.name.includes("<NAME> <NAME>")
separator = p
separator.set
name: "SEPARATOR"
fillColor: "#BC519E"
strokeWidth: 0
seam = separator.expand
name: "SEAM"
strokeAlignment: "exterior",
strokeOffset: SiliconeTool.SEAM_ALLOWANCE
fillColor: "#BFDFD1"
strokeWidth: 0
joinType: "miter"
data:
height: 0
seam_step = seam.expand
name: "SEAM_STEP"
strokeAlignment: "exterior",
strokeOffset: SiliconeTool.SEAM_STEP
fillColor: "blue"
strokeWidth: 0
joinType: "miter"
data:
height: 2
separator.sendToBack()
seam.sendToBack()
seam_step.sendToBack()
subs = paper.project.getItems
name: "SUB"
_.each subs, (s)->
if seam.intersects(s)
ns = seam.subtract(s)
ns.name = seam.name
seam.remove()
seam = dm(ns)
if seam_step.intersects(s)
ns = seam_step.subtract(s)
ns.name = seam_step.name
seam_step.remove()
seam_step = dm(ns)
_.each subs, (s)-> s.remove()
separator.bringToFront()
SiliconeTool.registration_site(separator, seam, seam_step)
@registration_site: (separator, seam, seam_step)->
outlets = paper.project.getItems
name: "OUTLET"
# ADD REGISTRATION SITE
_.each outlets, (outlet)->
outlet.bringToFront()
outlet.fillColor = "yellow"
pt = outlet.firstSegment.point
tang = outlet.getTangentAt(0)
norm = outlet.getNormalAt(0)
norm.length = SiliconeTool.SEPARATOR_PAD
tang.length = SiliconeTool.SEPARATOR_PAD
pad = new paper.Path.Rectangle
name: "PAD"
size: [SiliconeTool.SEPARATOR_PAD, outlet.airtube_allowance * 3]
fillColor: "orange"
position: outlet.position
pad.rotation = outlet.getTangentAt(outlet.length/2).angle
tang.length = -SiliconeTool.SEPARATOR_PAD/2
pad.position = tang.add(outlet.firstSegment.point)
outlet.remove()
big_pad = pad.expand
strokeAlignment: "exterior",
strokeOffset: SiliconeTool.STEP_TOLERANCE
joinType: "miter"
fillColor: "red"
nsep = dm(separator.unite(pad))
separator.remove()
separator = nsep
nss = dm(seam_step.unite(big_pad))
seam_step.remove()
seam_step = nss
ns = dm(seam.subtract(big_pad))
seam.remove()
seam = ns
seam.sendToBack()
seam_step.sendToBack()
pad.remove()
big_pad.remove()
SiliconeTool.mold_wall(separator, seam, seam_step)
@mold_wall: (separator, seam, seam_step)->
wall = seam_step.expand
name: "MOLD_WALL"
strokeAlignment: "exterior",
strokeOffset: SiliconeTool.MOLD_WALL
fillColor: "white"
strokeWidth: 0
joinType: "round"
data:
height: 4
wall.sendToBack()
dm(wall)
separator.bringToFront()
# ADD BACKGROUND
bg = new paper.Path.Rectangle
name: "BACKGROUND"
rectangle: wall.bounds.expand(SiliconeTool.MOLD_BORDER)
data:
height: 0
dm(bg)
bg.sendToBack()
# FOR 2.5D MOLD GENERATION
g = new paper.Group
name: "STL"
children: [bg, wall, seam_step, seam, separator]
g.ungroup()
# scope.normalize_heights()
# scope.update_dimensions()
# scope.siloseam_stylize()
###
Binds hotkeys to wire operations.
Overrides default tool events from PaperDesignTool.
###
toolEvents: ()->
scope = this
hitOptions =
class: paper.Path
stroke: true
fill: true
tolerance: 15
paper.tool.set
onMouseDown: (event)->
hitResults = paper.project.hitTestAll event.point, hitOptions
if _.isEmpty(hitResults)
paper.project.deselectAll()
$(document).trigger("refresh")
onMouseDrag: (event)->
if event.modifiers.shift
a = event.downPoint.subtract(event.point)
a = a.add(paper.view.center)
paper.view.center = a
onKeyUp: (event)->
if not event.modifiers.shift
$(document).trigger "end_shift"
$(document).trigger "paperkeyup", [event.key, event.modifiers, []]
onKeyDown: (event) ->
paths = paper.project.selectedItems
if event.modifiers.shift
$(document).trigger "start_shift"
$(document).trigger "paperkeydown", [event.key, event.modifiers, paths]
if event.key == 'b'
nps = _.map paths, (p)->
np = p.expand
name: "<NAME>"
strokeAlignment: "exterior",
strokeWidth: 1,
strokeOffset: 50
strokeColor: "black"
fillColor: null
joinType: "miter"
return np
if nps.length > 0
unp = nps[0]
nps = nps.slice(0)
if nps.length > 0
_.each nps, (np)->
temp = unp.unite(np)
unp.remove()
np.remove()
unp = temp
dm(unp)
###
Styles the artwork to match the Siloseam color palette
###
@stylize: ()->
# console.log "STYLIZE"
style_set = (name, style)->
matches = paper.project.getItems({name: name})
_.each matches, (m)-> m.set(style)
all = paper.project.getItems({})
_.each all, (m)-> m.set
fillColor: null
strokeWidth: 1
strokeColor: "black"
shadowBlur: 0
shadowColor: null
style_set "SEAM",
fillColor: "#BFDFD1"
strokeColor: "BCBEC0"
strokeWidth: 0.709
style_set "AIRTUBE",
fillColor: "#F1F2F2"
strokeColor: "#BCBEC0"
strokeWidth: 0.709
style_set "AIRTUBE4",
opacity: 1
fillColor: "#F1F2F2"
strokeColor: "#BCBEC0"
strokeWidth: Ruler.in2pts(0.25)
shadowColor: new paper.Color(0, 0, 0, 0.3),
shadowBlur: 5,
shadowOffset: new paper.Point(1, 1)
style_set "AIRTUBE8",
opacity: 1
fillColor: "#F1F2F2"
strokeColor: "#BCBEC0"
strokeWidth: Ruler.in2pts(0.25)
shadowColor: new paper.Color(0, 0, 0, 0.3),
shadowBlur: 5,
shadowOffset: new paper.Point(1, 1)
style_set "OUTLET",
opacity: 1
shadowColor: new paper.Color(0, 0, 0, 0.3),
shadowBlur: 5,
shadowOffset: new paper.Point(1, 1)
style_set "MOLD_WALL",
fillColor: "#D2D2D2"
strokeColor: "#111111"
strokeWidth: 1
shadowColor: new paper.Color(0, 0, 0, 0.3),
shadowBlur: 5,
shadowOffset: new paper.Point(1, 1)
style_set "BLADDER",
fillColor: "#BC519E"
strokeWidth: 0
style_set "BACKGROUND",
fillColor: "#DDD"
strokeColor: "black"
strokeWidth: 1
shadowColor: new paper.Color(0, 0, 0, 0.3),
shadowBlur: 5,
shadowOffset: new paper.Point(2, 2)
style_set "SEAM_STEP",
fillColor: "white"
strokeColor: "#111111"
strokeWidth: 0
shadowColor: new paper.Color(0, 0, 0, 0.3),
shadowBlur: 5,
shadowOffset: new paper.Point(1, 1)
matches = paper.project.getItems({name: "SEPARATOR"})
_.each matches, (m)->
transparent_blue = new paper.Color("#2884C6")
transparent_blue.alpha = 0
top = m.clone()
m.set
fillColor: "#E6E7E8"
strokeColor: "#BCBEC0"
strokeWidth: 0.709
top.set
name: "GRADIENT"
fillColor:
gradient:
stops: [['#2884C6'], [transparent_blue, 1]]
radial: false
origin: m.bounds.leftCenter
destination: m.bounds.rightCenter
alpha: 0.38
###
Given an SVG asset url, the extracts all Path objects to the topmost
level of the SVG graph. Other groups are removed.
ops =
url: url of the SVG asset (string, required)
position: where to place paths (paper.Point, default: paper.view.center)
###
addSVG: (ops)->
scope = this
# POSITION HANDLING
if not ops.position
ops.position = paper.view.center
ops.position = ops.position.clone()
console.log "LOADING", ops.url
paper.project.activeLayer.name = "SILOSEAM"
paper.project.importSVG ops.url,
expandShapes: true
insert: false
onError: (item)->
alertify.error "Could not load: " + item
onLoad: (item) ->
# Extract Path and Compound Path Elements
paths = item.getItems
className: (n)->
_.includes ["Path", "CompoundPath"], n
# Attach to Temporary Group and Release
g = new paper.Group
name: "temp"
_.each paths, (p)-> p.parent = g
g.set {position: ops.position}
g.reverseChildren()
g.ungroup()
# Add Interactivity
_.each paths, (p)->
dm(p)
if p.name
p.name = p.name.split("_")[0]
SiliconeTool.stylize()
###
Test: Places SVG asset on canvas with full wire interactivity.
###
test_addSVG: ()->
scope = this
file = "/primitives/example.svg"
console.log "DEFAULT LOAD", file
@addSVG
url: file
# url: "/primitives/primitives_elegant_elle-1.svg"
# url: "/primitives/primitives_elegant_elle-1.svg"
position: paper.view.center
# mat = Material.detectMaterial(path)
# w = new WirePath(scope.paper, value)
| true |
class window.PaperDesignTool extends ProxySTL
constructor: (ops)->
# super ops
console.log "✓ Paperjs Functionality"
this.name = "siloseam_design"
gui.add this, "name"
gui.add this, "save_svg"
@setup(ops)
setup: (ops)->
canvas = ops.canvas[0]
console.log $('#sandbox').height()
$(canvas)
.attr('width', $("#sandbox").width())
.attr('height', $("#sandbox").height())
window.paper = new paper.PaperScope
loadCustomLibraries()
paper.setup canvas
paper.view.zoom = 2.5
paper.tool = new paper.Tool
name: "default_tool"
$(canvas)
.attr('width', $("#sandbox").width())
.attr('height', $("#sandbox").height())
@toolEvents()
toolEvents: ()->
return
save_svg: ()->
prev = paper.view.zoom;
console.log("Exporting file as SVG");
paper.view.zoom = 1;
paper.view.update();
bg = paper.project.getItems({"name": "BACKGROUND"})
g = new paper.Group
name: "temp"
children: paper.project.getItems
className: (x)-> _.includes(["Path", "CompoundPath"], x)
g.pivot = g.bounds.topLeft
prior = g.position
g.position = new paper.Point(0, 0)
if bg.length > 0
exp = paper.project.exportSVG
bounds: g.bounds
asString: true,
precision: 5
else
exp = paper.project.exportSVG
asString: true,
precision: 5
g.position = prior
g.ungroup()
saveAs(new Blob([exp], {type:"application/svg+xml"}), @name + ".svg")
paper.view.zoom = prev
clear: ->
paper.project.clear()
###
Direct Manipulation Interactions
###
window.dm = (p)->
p.set
onMouseDown: (e)->
this.touched = true
this.selected = not this.selected
this.update_dimensions()
update_dimensions: (e)->
if dim
if this.data and this.data.height
z = this.data.height
else
z = 0
dim.set(this.bounds.height, this.bounds.width, z)
return
onMouseDrag: (e)->
this.position = this.position.add(e.delta)
onMouseUp: (e)->
return
clone_wire: ()->
x = dm(this.clone())
x.name = this.name
return x
return p
class window.SiliconeTool extends PaperDesignTool
@ROTATE_STEP: 15
@TRANSLATE_STEP: 10
@AIRTUBE_TOLERANCE: 0.5
@SEAM_ALLOWANCE: Ruler.mm2pts(4)#6.5) #8 mm
@SEAM_STEP: Ruler.mm2pts(2) #1 mm
@STEP_TOLERANCE: Ruler.mm2pts(2) #1 mm
@MOLD_WALL: Ruler.mm2pts(3) #3 mm
@SEPARATOR_PAD: Ruler.mm2pts(6) #5 mm
@MOLD_BORDER: Ruler.mm2pts(2) #2 mm
###
To inherit parent class functionality, super ops must be the first line.
This class hosts the logic for taking SVG Paths and interpreting them as wires.
###
constructor: (ops)->
super ops
console.log "✓ SiliconeTool Functionality"
@test_addSVG()
@keybindings()
keybindings: ()->
scope = this
# SILOSEAM KEYBINDINGS
$(document).on "paperkeydown", {}, (event, key, modifiers, paths)->
if modifiers.shift
# SHIFT CLUTCH
action = switch key
# OVERPRINT ALL
when "!"
paths = paper.project.getItems
className: (x)-> _.includes(["Path", "CompoundPath"], x)
(p)->
p.set
fillColor: null
strokeWidth: 1
strokeColor: "black"
shadowBlur: 0
shadowColor: null
else
# REGULAR
action = switch key
when "3"
SiliconeTool.stylize()
false
when "a" then (p)->
name = $('[data-key="a"]').find('select').val()
p.name = name
$(document).trigger("refresh")
# OVERPRINT
# when "1" then (p)-> p.set
# fillColor: null
# strokeWidth: 1
# strokeColor: "black"
false
when "1"
gradients = paper.project.getItems({name: "GRADIENT"})
_.each gradients, (g)-> g.remove()
paths = paper.project.getItems
className: (x)-> _.includes(["Path", "CompoundPath"], x)
_.each paths, (p)->
p.set
fillColor: null
strokeWidth: 1
strokeColor: "black"
shadowBlur: 0
shadowColor: null
false
when "2"
bladder = scope.connect_to_tube(paths)
if bladder
console.log "# OF BLADDERS", bladder.length
scope.bladder_seams(bladder)
false
if action and _.isFunction(action)
_.each paths, action
connect_to_tube: (paths)->
# JOIN INLET + BLADDER
# IF ANY AIRTUBES INTERSECT, THEN UNITE
airtubes = _.filter paths, (p)-> p.name.includes("AIRTUBE")
bladders = _.filter paths, (p)-> p.name.includes("BLADDER")
if airtubes.length == 0 or bladders.length == 0
alertify.error "You must select both a bladder and airtube geometry"
return null
else
nb = null
_.each airtubes, (airtube)->
airtube_material = airtube.name.split("_")[0]
airtube_material = window.materials.get(airtube_material)
airtube.applyMatrix = true
r = new paper.Path.Rectangle
size: [airtube.length, airtube_material.strokeWidth * SiliconeTool.AIRTUBE_TOLERANCE]
position: airtube.position.clone()
strokeColor: "black"
strokeWidth: 0.5
r.rotation = airtube.getTangentAt(airtube.length/2).angle
makeClip = (tube)->
clip = new paper.Path.Rectangle
name: "SUB"
size: [tube.length, 500]
fillColor: "red"
opacity: 0
strokeWidth: 0.5
clip.rotation = tube.getTangentAt(tube.length/2).angle
dir = tube.getTangentAt(0)
dir.length = -tube.length/2
clip.position = tube.firstSegment.point.add(dir)
return clip
sub = makeClip(airtube)
if bladders[0].intersects(sub)
sub.remove()
airtube.reverse()
makeClip(airtube)
_.each bladders, (bladder)->
if airtube.intersects(bladder)
nb = bladder.unite(r)
nb.name="PI:NAME:<NAME>END_PIADDER"
bladder.remove()
dm(nb)
r.remove()
airtube.name = "OUTLET"
airtube.airtube_allowance = airtube_material.strokeWidth * SiliconeTool.AIRTUBE_TOLERANCE
return nb
bladder_seams: (p)->
scope = this
# CREATE SEAM
if p.name.includes("PI:NAME:<NAME>END_PI PI:NAME:<NAME>END_PI")
separator = p
separator.set
name: "SEPARATOR"
fillColor: "#BC519E"
strokeWidth: 0
seam = separator.expand
name: "SEAM"
strokeAlignment: "exterior",
strokeOffset: SiliconeTool.SEAM_ALLOWANCE
fillColor: "#BFDFD1"
strokeWidth: 0
joinType: "miter"
data:
height: 0
seam_step = seam.expand
name: "SEAM_STEP"
strokeAlignment: "exterior",
strokeOffset: SiliconeTool.SEAM_STEP
fillColor: "blue"
strokeWidth: 0
joinType: "miter"
data:
height: 2
separator.sendToBack()
seam.sendToBack()
seam_step.sendToBack()
subs = paper.project.getItems
name: "SUB"
_.each subs, (s)->
if seam.intersects(s)
ns = seam.subtract(s)
ns.name = seam.name
seam.remove()
seam = dm(ns)
if seam_step.intersects(s)
ns = seam_step.subtract(s)
ns.name = seam_step.name
seam_step.remove()
seam_step = dm(ns)
_.each subs, (s)-> s.remove()
separator.bringToFront()
SiliconeTool.registration_site(separator, seam, seam_step)
@registration_site: (separator, seam, seam_step)->
outlets = paper.project.getItems
name: "OUTLET"
# ADD REGISTRATION SITE
_.each outlets, (outlet)->
outlet.bringToFront()
outlet.fillColor = "yellow"
pt = outlet.firstSegment.point
tang = outlet.getTangentAt(0)
norm = outlet.getNormalAt(0)
norm.length = SiliconeTool.SEPARATOR_PAD
tang.length = SiliconeTool.SEPARATOR_PAD
pad = new paper.Path.Rectangle
name: "PAD"
size: [SiliconeTool.SEPARATOR_PAD, outlet.airtube_allowance * 3]
fillColor: "orange"
position: outlet.position
pad.rotation = outlet.getTangentAt(outlet.length/2).angle
tang.length = -SiliconeTool.SEPARATOR_PAD/2
pad.position = tang.add(outlet.firstSegment.point)
outlet.remove()
big_pad = pad.expand
strokeAlignment: "exterior",
strokeOffset: SiliconeTool.STEP_TOLERANCE
joinType: "miter"
fillColor: "red"
nsep = dm(separator.unite(pad))
separator.remove()
separator = nsep
nss = dm(seam_step.unite(big_pad))
seam_step.remove()
seam_step = nss
ns = dm(seam.subtract(big_pad))
seam.remove()
seam = ns
seam.sendToBack()
seam_step.sendToBack()
pad.remove()
big_pad.remove()
SiliconeTool.mold_wall(separator, seam, seam_step)
@mold_wall: (separator, seam, seam_step)->
wall = seam_step.expand
name: "MOLD_WALL"
strokeAlignment: "exterior",
strokeOffset: SiliconeTool.MOLD_WALL
fillColor: "white"
strokeWidth: 0
joinType: "round"
data:
height: 4
wall.sendToBack()
dm(wall)
separator.bringToFront()
# ADD BACKGROUND
bg = new paper.Path.Rectangle
name: "BACKGROUND"
rectangle: wall.bounds.expand(SiliconeTool.MOLD_BORDER)
data:
height: 0
dm(bg)
bg.sendToBack()
# FOR 2.5D MOLD GENERATION
g = new paper.Group
name: "STL"
children: [bg, wall, seam_step, seam, separator]
g.ungroup()
# scope.normalize_heights()
# scope.update_dimensions()
# scope.siloseam_stylize()
###
Binds hotkeys to wire operations.
Overrides default tool events from PaperDesignTool.
###
toolEvents: ()->
scope = this
hitOptions =
class: paper.Path
stroke: true
fill: true
tolerance: 15
paper.tool.set
onMouseDown: (event)->
hitResults = paper.project.hitTestAll event.point, hitOptions
if _.isEmpty(hitResults)
paper.project.deselectAll()
$(document).trigger("refresh")
onMouseDrag: (event)->
if event.modifiers.shift
a = event.downPoint.subtract(event.point)
a = a.add(paper.view.center)
paper.view.center = a
onKeyUp: (event)->
if not event.modifiers.shift
$(document).trigger "end_shift"
$(document).trigger "paperkeyup", [event.key, event.modifiers, []]
onKeyDown: (event) ->
paths = paper.project.selectedItems
if event.modifiers.shift
$(document).trigger "start_shift"
$(document).trigger "paperkeydown", [event.key, event.modifiers, paths]
if event.key == 'b'
nps = _.map paths, (p)->
np = p.expand
name: "PI:NAME:<NAME>END_PI"
strokeAlignment: "exterior",
strokeWidth: 1,
strokeOffset: 50
strokeColor: "black"
fillColor: null
joinType: "miter"
return np
if nps.length > 0
unp = nps[0]
nps = nps.slice(0)
if nps.length > 0
_.each nps, (np)->
temp = unp.unite(np)
unp.remove()
np.remove()
unp = temp
dm(unp)
###
Styles the artwork to match the Siloseam color palette
###
@stylize: ()->
# console.log "STYLIZE"
style_set = (name, style)->
matches = paper.project.getItems({name: name})
_.each matches, (m)-> m.set(style)
all = paper.project.getItems({})
_.each all, (m)-> m.set
fillColor: null
strokeWidth: 1
strokeColor: "black"
shadowBlur: 0
shadowColor: null
style_set "SEAM",
fillColor: "#BFDFD1"
strokeColor: "BCBEC0"
strokeWidth: 0.709
style_set "AIRTUBE",
fillColor: "#F1F2F2"
strokeColor: "#BCBEC0"
strokeWidth: 0.709
style_set "AIRTUBE4",
opacity: 1
fillColor: "#F1F2F2"
strokeColor: "#BCBEC0"
strokeWidth: Ruler.in2pts(0.25)
shadowColor: new paper.Color(0, 0, 0, 0.3),
shadowBlur: 5,
shadowOffset: new paper.Point(1, 1)
style_set "AIRTUBE8",
opacity: 1
fillColor: "#F1F2F2"
strokeColor: "#BCBEC0"
strokeWidth: Ruler.in2pts(0.25)
shadowColor: new paper.Color(0, 0, 0, 0.3),
shadowBlur: 5,
shadowOffset: new paper.Point(1, 1)
style_set "OUTLET",
opacity: 1
shadowColor: new paper.Color(0, 0, 0, 0.3),
shadowBlur: 5,
shadowOffset: new paper.Point(1, 1)
style_set "MOLD_WALL",
fillColor: "#D2D2D2"
strokeColor: "#111111"
strokeWidth: 1
shadowColor: new paper.Color(0, 0, 0, 0.3),
shadowBlur: 5,
shadowOffset: new paper.Point(1, 1)
style_set "BLADDER",
fillColor: "#BC519E"
strokeWidth: 0
style_set "BACKGROUND",
fillColor: "#DDD"
strokeColor: "black"
strokeWidth: 1
shadowColor: new paper.Color(0, 0, 0, 0.3),
shadowBlur: 5,
shadowOffset: new paper.Point(2, 2)
style_set "SEAM_STEP",
fillColor: "white"
strokeColor: "#111111"
strokeWidth: 0
shadowColor: new paper.Color(0, 0, 0, 0.3),
shadowBlur: 5,
shadowOffset: new paper.Point(1, 1)
matches = paper.project.getItems({name: "SEPARATOR"})
_.each matches, (m)->
transparent_blue = new paper.Color("#2884C6")
transparent_blue.alpha = 0
top = m.clone()
m.set
fillColor: "#E6E7E8"
strokeColor: "#BCBEC0"
strokeWidth: 0.709
top.set
name: "GRADIENT"
fillColor:
gradient:
stops: [['#2884C6'], [transparent_blue, 1]]
radial: false
origin: m.bounds.leftCenter
destination: m.bounds.rightCenter
alpha: 0.38
###
Given an SVG asset url, the extracts all Path objects to the topmost
level of the SVG graph. Other groups are removed.
ops =
url: url of the SVG asset (string, required)
position: where to place paths (paper.Point, default: paper.view.center)
###
addSVG: (ops)->
scope = this
# POSITION HANDLING
if not ops.position
ops.position = paper.view.center
ops.position = ops.position.clone()
console.log "LOADING", ops.url
paper.project.activeLayer.name = "SILOSEAM"
paper.project.importSVG ops.url,
expandShapes: true
insert: false
onError: (item)->
alertify.error "Could not load: " + item
onLoad: (item) ->
# Extract Path and Compound Path Elements
paths = item.getItems
className: (n)->
_.includes ["Path", "CompoundPath"], n
# Attach to Temporary Group and Release
g = new paper.Group
name: "temp"
_.each paths, (p)-> p.parent = g
g.set {position: ops.position}
g.reverseChildren()
g.ungroup()
# Add Interactivity
_.each paths, (p)->
dm(p)
if p.name
p.name = p.name.split("_")[0]
SiliconeTool.stylize()
###
Test: Places SVG asset on canvas with full wire interactivity.
###
test_addSVG: ()->
scope = this
file = "/primitives/example.svg"
console.log "DEFAULT LOAD", file
@addSVG
url: file
# url: "/primitives/primitives_elegant_elle-1.svg"
# url: "/primitives/primitives_elegant_elle-1.svg"
position: paper.view.center
# mat = Material.detectMaterial(path)
# w = new WirePath(scope.paper, value)
|
[
{
"context": "lotMap: interactive plot of a genetic marker map\n# Karl W Broman\n\niplotMap = (widgetdiv, data, chartOpts) ->\n\n ",
"end": 68,
"score": 0.9998856782913208,
"start": 55,
"tag": "NAME",
"value": "Karl W Broman"
}
] | inst/htmlwidgets/lib/qtlcharts/iplotMap.coffee | Alanocallaghan/qtlcharts | 0 | # iplotMap: interactive plot of a genetic marker map
# Karl W Broman
iplotMap = (widgetdiv, data, chartOpts) ->
# chartOpts start
width = chartOpts?.width ? 1000 # width of chart in pixels
height = chartOpts?.height ? 600 # height of chart in pixels
margin = chartOpts?.margin ? {left:60, top:40, right:100, bottom: 40, inner:10} # margins in pixels (left, top, right, bottom, inner)
axispos = chartOpts?.axispos ? {xtitle:25, ytitle:30, xlabel:5, ylabel:5} # position of axis labels in pixels (xtitle, ytitle, xlabel, ylabel)
titlepos = chartOpts?.titlepos ? 20 # position of chart title in pixels
ylim = chartOpts?.ylim ? null # y-axis limits
nyticks = chartOpts?.nyticks ? 5 # no. ticks on y-axis
yticks = chartOpts?.yticks ? null # vector of tick positions on y-axis
xlineOpts = chartOpts?.xlineOpts ? {color:"#cdcdcd", width:5} # color and width of vertical lines
tickwidth = chartOpts?.tickwidth ? 10 # width of tick marks at markers, in pixels
rectcolor = chartOpts?.rectcolor ? "#E6E6E6" # color of background rectangle
linecolor = chartOpts?.linecolor ? "slateblue" # color of lines
linecolorhilit = chartOpts?.linecolorhilit ? "Orchid" # color of lines, when highlighted
linewidth = chartOpts?.linewidth ? 3 # width of lines
title = chartOpts?.title ? "" # title for chart
xlab = chartOpts?.xlab ? "Chromosome" # x-axis label
ylab = chartOpts?.ylab ? "Position (cM)" # y-axis label
shiftStart = chartOpts?.shiftStart ? false # if true, shift the start of chromosomes to 0
horizontal = chartOpts?.horizontal ? false # if true, have chromosomes on vertical axis and positions horizontally
# chartOpts end
chartdivid = chartOpts?.chartdivid ? 'chart'
widgetdivid = d3.select(widgetdiv).attr('id')
# make sure list args have all necessary bits
margin = d3panels.check_listarg_v_default(margin, {left:60, top:40, right:100, bottom: 40, inner:10})
axispos = d3panels.check_listarg_v_default(axispos, {xtitle:25, ytitle:30, xlabel:5, ylabel:5})
mychart = d3panels.mapchart({
height:height
width:width
margin:margin
axispos:axispos
titlepos:titlepos
ylim:ylim
yticks:yticks
nyticks:nyticks
xlineOpts:xlineOpts
tickwidth:tickwidth
rectcolor:rectcolor
linecolor:linecolor
linecolorhilit:linecolorhilit
linewidth:linewidth
title:title
xlab:xlab
ylab:ylab
horizontal:horizontal
shiftStart:shiftStart
tipclass:widgetdivid})
# select htmlwidget div and grab its ID
div = d3.select(widgetdiv)
mychart(div.select("svg"), data)
svg = mychart.svg()
##############################
# code for marker search box for iplotMap
##############################
# create marker tip
martip = d3.tip()
.attr('class', "d3-tip #{widgetdivid}")
.html((d) ->
pos = d3.format(".1f")(data.pos[data.marker.indexOf(d)])
"#{d} (#{pos})")
.direction(() ->
return 'n' if horizontal
'e')
.offset(() ->
return [-10,0] if horizontal
[0,10])
svg.call(martip)
clean_marker_name = (markername) ->
markername.replace(".", "\\.")
.replace("#", "\\#")
.replace("/", "\\/")
# grab selected marker from the search box
selectedMarker = ""
$("div#markerinput_#{widgetdivid}").submit () ->
newSelection = document.getElementById("marker_#{widgetdivid}").value
event.preventDefault()
unless selectedMarker == ""
div.select("line##{clean_marker_name(selectedMarker)}")
.attr("stroke", linecolor)
martip.hide()
if newSelection != ""
if data.marker.indexOf(newSelection) >= 0
selectedMarker = newSelection
line = div.select("line##{clean_marker_name(selectedMarker)}")
.attr("stroke", linecolorhilit)
martip.show(line.datum(), line.node())
div.select("a#currentmarker")
.text("")
return true
else
div.select("a#currentmarker")
.text("Marker \"#{newSelection}\" not found")
return false
# autocomplete
$("input#marker_#{widgetdivid}").autocomplete({
autoFocus: true,
source: (request, response) ->
matches = $.map(data.marker, (tag) ->
tag if tag.toUpperCase().indexOf(request.term.toUpperCase()) is 0)
response(matches)
,
select: (event, ui) ->
$("input#marker_#{widgetdivid}").val(ui.item.label)
$("input#submit_#{widgetdivid}").submit()})
# grayed out "Marker name"
$("input#marker_#{widgetdivid}").each(() ->
$("div.searchbox#markerinput_#{widgetdivid}").addClass('inactive')
$(this)
.data('default', $(this).val())
.focus(() ->
$("div.searchbox#markerinput_#{widgetdivid}").removeClass('inactive')
$(this).val('') if($(this).val() is $(this).data('default') or $(this).val() is '')
)
.blur(() ->
if($(this).val() is '')
$("div.searchbox#markerinput_#{widgetdivid}").addClass('inactive')
$(this).val($(this).data('default'))
)
)
# on hover, remove tool tip from marker search
markerSelect = mychart.markerSelect()
markerSelect.on "mouseover", (d) ->
unless selectedMarker == ""
unless selectedMarker == d # de-highlight (if hovering over something other than the selected marker)
div.select("line##{clean_marker_name(selectedMarker)}")
.attr("stroke", linecolor)
martip.hide()
if chartOpts.heading?
d3.select("div#htmlwidget_container")
.insert("h2", ":first-child")
.html(chartOpts.heading)
.style("font-family", "sans-serif")
if chartOpts.caption?
d3.select("body")
.append("p")
.attr("class", "caption")
.html(chartOpts.caption)
if chartOpts.footer?
d3.select("body")
.append("div")
.html(chartOpts.footer)
.style("font-family", "sans-serif")
add_search_box = (widgetdiv) ->
div = d3.select(widgetdiv)
widgetdivid = div.attr("id")
form = div.append("div")
.attr("class", "searchbox")
.attr("id", "markerinput_#{widgetdivid}")
.append("form")
.attr("name", "markerinput_#{widgetdivid}")
form.append("input")
.attr("id", "marker_#{widgetdivid}")
.attr("type", "text")
.attr("value", "Marker name")
.attr("name", "marker")
form.append("input")
.attr("type", "submit")
.attr("id", "submit_#{widgetdivid}")
.attr("value", "Submit")
form.append("a")
.attr("id", "currentmarker")
| 198358 | # iplotMap: interactive plot of a genetic marker map
# <NAME>
iplotMap = (widgetdiv, data, chartOpts) ->
# chartOpts start
width = chartOpts?.width ? 1000 # width of chart in pixels
height = chartOpts?.height ? 600 # height of chart in pixels
margin = chartOpts?.margin ? {left:60, top:40, right:100, bottom: 40, inner:10} # margins in pixels (left, top, right, bottom, inner)
axispos = chartOpts?.axispos ? {xtitle:25, ytitle:30, xlabel:5, ylabel:5} # position of axis labels in pixels (xtitle, ytitle, xlabel, ylabel)
titlepos = chartOpts?.titlepos ? 20 # position of chart title in pixels
ylim = chartOpts?.ylim ? null # y-axis limits
nyticks = chartOpts?.nyticks ? 5 # no. ticks on y-axis
yticks = chartOpts?.yticks ? null # vector of tick positions on y-axis
xlineOpts = chartOpts?.xlineOpts ? {color:"#cdcdcd", width:5} # color and width of vertical lines
tickwidth = chartOpts?.tickwidth ? 10 # width of tick marks at markers, in pixels
rectcolor = chartOpts?.rectcolor ? "#E6E6E6" # color of background rectangle
linecolor = chartOpts?.linecolor ? "slateblue" # color of lines
linecolorhilit = chartOpts?.linecolorhilit ? "Orchid" # color of lines, when highlighted
linewidth = chartOpts?.linewidth ? 3 # width of lines
title = chartOpts?.title ? "" # title for chart
xlab = chartOpts?.xlab ? "Chromosome" # x-axis label
ylab = chartOpts?.ylab ? "Position (cM)" # y-axis label
shiftStart = chartOpts?.shiftStart ? false # if true, shift the start of chromosomes to 0
horizontal = chartOpts?.horizontal ? false # if true, have chromosomes on vertical axis and positions horizontally
# chartOpts end
chartdivid = chartOpts?.chartdivid ? 'chart'
widgetdivid = d3.select(widgetdiv).attr('id')
# make sure list args have all necessary bits
margin = d3panels.check_listarg_v_default(margin, {left:60, top:40, right:100, bottom: 40, inner:10})
axispos = d3panels.check_listarg_v_default(axispos, {xtitle:25, ytitle:30, xlabel:5, ylabel:5})
mychart = d3panels.mapchart({
height:height
width:width
margin:margin
axispos:axispos
titlepos:titlepos
ylim:ylim
yticks:yticks
nyticks:nyticks
xlineOpts:xlineOpts
tickwidth:tickwidth
rectcolor:rectcolor
linecolor:linecolor
linecolorhilit:linecolorhilit
linewidth:linewidth
title:title
xlab:xlab
ylab:ylab
horizontal:horizontal
shiftStart:shiftStart
tipclass:widgetdivid})
# select htmlwidget div and grab its ID
div = d3.select(widgetdiv)
mychart(div.select("svg"), data)
svg = mychart.svg()
##############################
# code for marker search box for iplotMap
##############################
# create marker tip
martip = d3.tip()
.attr('class', "d3-tip #{widgetdivid}")
.html((d) ->
pos = d3.format(".1f")(data.pos[data.marker.indexOf(d)])
"#{d} (#{pos})")
.direction(() ->
return 'n' if horizontal
'e')
.offset(() ->
return [-10,0] if horizontal
[0,10])
svg.call(martip)
clean_marker_name = (markername) ->
markername.replace(".", "\\.")
.replace("#", "\\#")
.replace("/", "\\/")
# grab selected marker from the search box
selectedMarker = ""
$("div#markerinput_#{widgetdivid}").submit () ->
newSelection = document.getElementById("marker_#{widgetdivid}").value
event.preventDefault()
unless selectedMarker == ""
div.select("line##{clean_marker_name(selectedMarker)}")
.attr("stroke", linecolor)
martip.hide()
if newSelection != ""
if data.marker.indexOf(newSelection) >= 0
selectedMarker = newSelection
line = div.select("line##{clean_marker_name(selectedMarker)}")
.attr("stroke", linecolorhilit)
martip.show(line.datum(), line.node())
div.select("a#currentmarker")
.text("")
return true
else
div.select("a#currentmarker")
.text("Marker \"#{newSelection}\" not found")
return false
# autocomplete
$("input#marker_#{widgetdivid}").autocomplete({
autoFocus: true,
source: (request, response) ->
matches = $.map(data.marker, (tag) ->
tag if tag.toUpperCase().indexOf(request.term.toUpperCase()) is 0)
response(matches)
,
select: (event, ui) ->
$("input#marker_#{widgetdivid}").val(ui.item.label)
$("input#submit_#{widgetdivid}").submit()})
# grayed out "Marker name"
$("input#marker_#{widgetdivid}").each(() ->
$("div.searchbox#markerinput_#{widgetdivid}").addClass('inactive')
$(this)
.data('default', $(this).val())
.focus(() ->
$("div.searchbox#markerinput_#{widgetdivid}").removeClass('inactive')
$(this).val('') if($(this).val() is $(this).data('default') or $(this).val() is '')
)
.blur(() ->
if($(this).val() is '')
$("div.searchbox#markerinput_#{widgetdivid}").addClass('inactive')
$(this).val($(this).data('default'))
)
)
# on hover, remove tool tip from marker search
markerSelect = mychart.markerSelect()
markerSelect.on "mouseover", (d) ->
unless selectedMarker == ""
unless selectedMarker == d # de-highlight (if hovering over something other than the selected marker)
div.select("line##{clean_marker_name(selectedMarker)}")
.attr("stroke", linecolor)
martip.hide()
if chartOpts.heading?
d3.select("div#htmlwidget_container")
.insert("h2", ":first-child")
.html(chartOpts.heading)
.style("font-family", "sans-serif")
if chartOpts.caption?
d3.select("body")
.append("p")
.attr("class", "caption")
.html(chartOpts.caption)
if chartOpts.footer?
d3.select("body")
.append("div")
.html(chartOpts.footer)
.style("font-family", "sans-serif")
add_search_box = (widgetdiv) ->
div = d3.select(widgetdiv)
widgetdivid = div.attr("id")
form = div.append("div")
.attr("class", "searchbox")
.attr("id", "markerinput_#{widgetdivid}")
.append("form")
.attr("name", "markerinput_#{widgetdivid}")
form.append("input")
.attr("id", "marker_#{widgetdivid}")
.attr("type", "text")
.attr("value", "Marker name")
.attr("name", "marker")
form.append("input")
.attr("type", "submit")
.attr("id", "submit_#{widgetdivid}")
.attr("value", "Submit")
form.append("a")
.attr("id", "currentmarker")
| true | # iplotMap: interactive plot of a genetic marker map
# PI:NAME:<NAME>END_PI
iplotMap = (widgetdiv, data, chartOpts) ->
# chartOpts start
width = chartOpts?.width ? 1000 # width of chart in pixels
height = chartOpts?.height ? 600 # height of chart in pixels
margin = chartOpts?.margin ? {left:60, top:40, right:100, bottom: 40, inner:10} # margins in pixels (left, top, right, bottom, inner)
axispos = chartOpts?.axispos ? {xtitle:25, ytitle:30, xlabel:5, ylabel:5} # position of axis labels in pixels (xtitle, ytitle, xlabel, ylabel)
titlepos = chartOpts?.titlepos ? 20 # position of chart title in pixels
ylim = chartOpts?.ylim ? null # y-axis limits
nyticks = chartOpts?.nyticks ? 5 # no. ticks on y-axis
yticks = chartOpts?.yticks ? null # vector of tick positions on y-axis
xlineOpts = chartOpts?.xlineOpts ? {color:"#cdcdcd", width:5} # color and width of vertical lines
tickwidth = chartOpts?.tickwidth ? 10 # width of tick marks at markers, in pixels
rectcolor = chartOpts?.rectcolor ? "#E6E6E6" # color of background rectangle
linecolor = chartOpts?.linecolor ? "slateblue" # color of lines
linecolorhilit = chartOpts?.linecolorhilit ? "Orchid" # color of lines, when highlighted
linewidth = chartOpts?.linewidth ? 3 # width of lines
title = chartOpts?.title ? "" # title for chart
xlab = chartOpts?.xlab ? "Chromosome" # x-axis label
ylab = chartOpts?.ylab ? "Position (cM)" # y-axis label
shiftStart = chartOpts?.shiftStart ? false # if true, shift the start of chromosomes to 0
horizontal = chartOpts?.horizontal ? false # if true, have chromosomes on vertical axis and positions horizontally
# chartOpts end
chartdivid = chartOpts?.chartdivid ? 'chart'
widgetdivid = d3.select(widgetdiv).attr('id')
# make sure list args have all necessary bits
margin = d3panels.check_listarg_v_default(margin, {left:60, top:40, right:100, bottom: 40, inner:10})
axispos = d3panels.check_listarg_v_default(axispos, {xtitle:25, ytitle:30, xlabel:5, ylabel:5})
mychart = d3panels.mapchart({
height:height
width:width
margin:margin
axispos:axispos
titlepos:titlepos
ylim:ylim
yticks:yticks
nyticks:nyticks
xlineOpts:xlineOpts
tickwidth:tickwidth
rectcolor:rectcolor
linecolor:linecolor
linecolorhilit:linecolorhilit
linewidth:linewidth
title:title
xlab:xlab
ylab:ylab
horizontal:horizontal
shiftStart:shiftStart
tipclass:widgetdivid})
# select htmlwidget div and grab its ID
div = d3.select(widgetdiv)
mychart(div.select("svg"), data)
svg = mychart.svg()
##############################
# code for marker search box for iplotMap
##############################
# create marker tip
martip = d3.tip()
.attr('class', "d3-tip #{widgetdivid}")
.html((d) ->
pos = d3.format(".1f")(data.pos[data.marker.indexOf(d)])
"#{d} (#{pos})")
.direction(() ->
return 'n' if horizontal
'e')
.offset(() ->
return [-10,0] if horizontal
[0,10])
svg.call(martip)
clean_marker_name = (markername) ->
markername.replace(".", "\\.")
.replace("#", "\\#")
.replace("/", "\\/")
# grab selected marker from the search box
selectedMarker = ""
$("div#markerinput_#{widgetdivid}").submit () ->
newSelection = document.getElementById("marker_#{widgetdivid}").value
event.preventDefault()
unless selectedMarker == ""
div.select("line##{clean_marker_name(selectedMarker)}")
.attr("stroke", linecolor)
martip.hide()
if newSelection != ""
if data.marker.indexOf(newSelection) >= 0
selectedMarker = newSelection
line = div.select("line##{clean_marker_name(selectedMarker)}")
.attr("stroke", linecolorhilit)
martip.show(line.datum(), line.node())
div.select("a#currentmarker")
.text("")
return true
else
div.select("a#currentmarker")
.text("Marker \"#{newSelection}\" not found")
return false
# autocomplete
$("input#marker_#{widgetdivid}").autocomplete({
autoFocus: true,
source: (request, response) ->
matches = $.map(data.marker, (tag) ->
tag if tag.toUpperCase().indexOf(request.term.toUpperCase()) is 0)
response(matches)
,
select: (event, ui) ->
$("input#marker_#{widgetdivid}").val(ui.item.label)
$("input#submit_#{widgetdivid}").submit()})
# grayed out "Marker name"
$("input#marker_#{widgetdivid}").each(() ->
$("div.searchbox#markerinput_#{widgetdivid}").addClass('inactive')
$(this)
.data('default', $(this).val())
.focus(() ->
$("div.searchbox#markerinput_#{widgetdivid}").removeClass('inactive')
$(this).val('') if($(this).val() is $(this).data('default') or $(this).val() is '')
)
.blur(() ->
if($(this).val() is '')
$("div.searchbox#markerinput_#{widgetdivid}").addClass('inactive')
$(this).val($(this).data('default'))
)
)
# on hover, remove tool tip from marker search
markerSelect = mychart.markerSelect()
markerSelect.on "mouseover", (d) ->
unless selectedMarker == ""
unless selectedMarker == d # de-highlight (if hovering over something other than the selected marker)
div.select("line##{clean_marker_name(selectedMarker)}")
.attr("stroke", linecolor)
martip.hide()
if chartOpts.heading?
d3.select("div#htmlwidget_container")
.insert("h2", ":first-child")
.html(chartOpts.heading)
.style("font-family", "sans-serif")
if chartOpts.caption?
d3.select("body")
.append("p")
.attr("class", "caption")
.html(chartOpts.caption)
if chartOpts.footer?
d3.select("body")
.append("div")
.html(chartOpts.footer)
.style("font-family", "sans-serif")
add_search_box = (widgetdiv) ->
div = d3.select(widgetdiv)
widgetdivid = div.attr("id")
form = div.append("div")
.attr("class", "searchbox")
.attr("id", "markerinput_#{widgetdivid}")
.append("form")
.attr("name", "markerinput_#{widgetdivid}")
form.append("input")
.attr("id", "marker_#{widgetdivid}")
.attr("type", "text")
.attr("value", "Marker name")
.attr("name", "marker")
form.append("input")
.attr("type", "submit")
.attr("id", "submit_#{widgetdivid}")
.attr("value", "Submit")
form.append("a")
.attr("id", "currentmarker")
|
[
{
"context": "son to include useful information.\n#\n# Author:\n# ParadoxGuitarist\n\nhellos = [\n \"Greetings human.\", \n \"wut up\",\n ",
"end": 289,
"score": 0.955964207649231,
"start": 273,
"tag": "NAME",
"value": "ParadoxGuitarist"
},
{
"context": "\nhellos = [\n \"Greetings human.\", \n \"wut up\",\n \"Holla\",\n \"'sup homeslice\",\n \"Hi!\",\n \"Ahoy, matey!\",\n",
"end": 345,
"score": 0.9990707039833069,
"start": 340,
"tag": "NAME",
"value": "Holla"
},
{
"context": " \"Holla\",\n \"'sup homeslice\",\n \"Hi!\",\n \"Ahoy, matey!\",\n \"Aloha\",\n \"Hola\",\n \"Que pasa\",\n \"Gutten ",
"end": 390,
"score": 0.6205960512161255,
"start": 387,
"tag": "NAME",
"value": "ate"
},
{
"context": " \"'sup homeslice\",\n \"Hi!\",\n \"Ahoy, matey!\",\n \"Aloha\",\n \"Hola\",\n \"Que pasa\",\n \"Gutten tag\",\n \"Ciao",
"end": 403,
"score": 0.7333542704582214,
"start": 398,
"tag": "NAME",
"value": "Aloha"
},
{
"context": "eslice\",\n \"Hi!\",\n \"Ahoy, matey!\",\n \"Aloha\",\n \"Hola\",\n \"Que pasa\",\n \"Gutten tag\",\n \"Ciao\",\n \"Konn",
"end": 413,
"score": 0.6380957365036011,
"start": 409,
"tag": "NAME",
"value": "Hola"
},
{
"context": "loha\",\n \"Hola\",\n \"Que pasa\",\n \"Gutten tag\",\n \"Ciao\",\n \"Konnichiwa\",\n \"Yo!\",\n \"GOOOOOOOOOD MORNING",
"end": 453,
"score": 0.9867993593215942,
"start": 449,
"tag": "NAME",
"value": "Ciao"
},
{
"context": "Hola\",\n \"Que pasa\",\n \"Gutten tag\",\n \"Ciao\",\n \"Konnichiwa\",\n \"Yo!\",\n \"GOOOOOOOOOD MORNING VIETNAM!\",\n \"'",
"end": 469,
"score": 0.99979567527771,
"start": 459,
"tag": "NAME",
"value": "Konnichiwa"
},
{
"context": "> \n msg.send \"Perhaps, you are referring to me, Harambot, brought back to life in this form: a chat bot. #",
"end": 1899,
"score": 0.7313003540039062,
"start": 1891,
"tag": "NAME",
"value": "Harambot"
}
] | src/harambe.coffee | ptobias16/hubot-harambe | 0 | # Description
# Silly scripts (usually automatic responses) that provide some spice to hubot.
#
# Commands:
# hubot list lists - returns a list of lists that can be returned
#
# Notes:
# You should edit /src/lists.json to include useful information.
#
# Author:
# ParadoxGuitarist
hellos = [
"Greetings human.",
"wut up",
"Holla",
"'sup homeslice",
"Hi!",
"Ahoy, matey!",
"Aloha",
"Hola",
"Que pasa",
"Gutten tag",
"Ciao",
"Konnichiwa",
"Yo!",
"GOOOOOOOOOD MORNING VIETNAM!",
"'Ello gov'nor!",
"This call may be recorded for training purposes.",
"How you doin'?",
"https://media.giphy.com/media/ASd0Ukj0y3qMM/giphy.gif",
"Myohmyohmyohmy"
]
papercount = [
"https://media.tenor.com/images/6cfa7a41ac8e289f5b3991f414429022/tenor.gif",
"https://media.giphy.com/media/tTMhcdiREv2W4/giphy.gif",
"https://media.giphy.com/media/aXThX9ftrHnji/giphy.gif",
"https://media1.giphy.com/media/m77wEsvF99FyU/giphy.gif",
"https://media3.giphy.com/media/l2Jeal95KEUTLzNpS/giphy.gif",
"https://media4.giphy.com/media/Nyyg06iiTiKli/200w.webp#21-grid1",
"https://media.giphy.com/media/310wCPV0kDUuk/giphy.gif",
"https://68.media.tumblr.com/f270e4494aa01cd26b52c699e77fdd98/tumblr_ml6ef4HvIp1rs9keio1_400.gif"
]
nooo = [
'http://www.nerdist.com/wp-content/uploads/2014/09/Vader-noooo.gif',
'http://www.nooooooooooooooo.com/vader.jpg',
'http://i.imgur.com/fE18keE.gif'
]
lists = require('./lists.json')
module.exports = (robot) ->
robot.hear /hello|greeting|good morning|good evening|aloha|hola|hi haramb/i, (msg) ->
msg.send msg.random hellos
robot.hear /paper count|papercount/i, (msg) ->
msg.send msg.random papercount
robot.hear /orly/i, (msg) ->
msg.send "yarly"
robot.hear /\bno{3,}\b/i, (msg) ->
msg.send msg.random nooo
robot.hear /harambe/i, (msg) ->
msg.send "Perhaps, you are referring to me, Harambot, brought back to life in this form: a chat bot. #NeverForget."
robot.respond /list (.*)$/i, (msg) ->
key = msg.match[1].toLowerCase().split(" ").join("-")
msg.send lists[key]
robot.hear /badge/i, (msg) ->
msg.send "Badges? BADGES?!? We Don't Need No Stink'n BADGES!!!!"
robot.hear /it's working|its working|it works/i, (msg) ->
msg.send "https://media.giphy.com/media/GIEXgLDfghUSQ/giphy.gif"
| 145866 | # Description
# Silly scripts (usually automatic responses) that provide some spice to hubot.
#
# Commands:
# hubot list lists - returns a list of lists that can be returned
#
# Notes:
# You should edit /src/lists.json to include useful information.
#
# Author:
# <NAME>
hellos = [
"Greetings human.",
"wut up",
"<NAME>",
"'sup homeslice",
"Hi!",
"Ahoy, m<NAME>y!",
"<NAME>",
"<NAME>",
"Que pasa",
"Gutten tag",
"<NAME>",
"<NAME>",
"Yo!",
"GOOOOOOOOOD MORNING VIETNAM!",
"'Ello gov'nor!",
"This call may be recorded for training purposes.",
"How you doin'?",
"https://media.giphy.com/media/ASd0Ukj0y3qMM/giphy.gif",
"Myohmyohmyohmy"
]
papercount = [
"https://media.tenor.com/images/6cfa7a41ac8e289f5b3991f414429022/tenor.gif",
"https://media.giphy.com/media/tTMhcdiREv2W4/giphy.gif",
"https://media.giphy.com/media/aXThX9ftrHnji/giphy.gif",
"https://media1.giphy.com/media/m77wEsvF99FyU/giphy.gif",
"https://media3.giphy.com/media/l2Jeal95KEUTLzNpS/giphy.gif",
"https://media4.giphy.com/media/Nyyg06iiTiKli/200w.webp#21-grid1",
"https://media.giphy.com/media/310wCPV0kDUuk/giphy.gif",
"https://68.media.tumblr.com/f270e4494aa01cd26b52c699e77fdd98/tumblr_ml6ef4HvIp1rs9keio1_400.gif"
]
nooo = [
'http://www.nerdist.com/wp-content/uploads/2014/09/Vader-noooo.gif',
'http://www.nooooooooooooooo.com/vader.jpg',
'http://i.imgur.com/fE18keE.gif'
]
lists = require('./lists.json')
module.exports = (robot) ->
robot.hear /hello|greeting|good morning|good evening|aloha|hola|hi haramb/i, (msg) ->
msg.send msg.random hellos
robot.hear /paper count|papercount/i, (msg) ->
msg.send msg.random papercount
robot.hear /orly/i, (msg) ->
msg.send "yarly"
robot.hear /\bno{3,}\b/i, (msg) ->
msg.send msg.random nooo
robot.hear /harambe/i, (msg) ->
msg.send "Perhaps, you are referring to me, <NAME>, brought back to life in this form: a chat bot. #NeverForget."
robot.respond /list (.*)$/i, (msg) ->
key = msg.match[1].toLowerCase().split(" ").join("-")
msg.send lists[key]
robot.hear /badge/i, (msg) ->
msg.send "Badges? BADGES?!? We Don't Need No Stink'n BADGES!!!!"
robot.hear /it's working|its working|it works/i, (msg) ->
msg.send "https://media.giphy.com/media/GIEXgLDfghUSQ/giphy.gif"
| true | # Description
# Silly scripts (usually automatic responses) that provide some spice to hubot.
#
# Commands:
# hubot list lists - returns a list of lists that can be returned
#
# Notes:
# You should edit /src/lists.json to include useful information.
#
# Author:
# PI:NAME:<NAME>END_PI
hellos = [
"Greetings human.",
"wut up",
"PI:NAME:<NAME>END_PI",
"'sup homeslice",
"Hi!",
"Ahoy, mPI:NAME:<NAME>END_PIy!",
"PI:NAME:<NAME>END_PI",
"PI:NAME:<NAME>END_PI",
"Que pasa",
"Gutten tag",
"PI:NAME:<NAME>END_PI",
"PI:NAME:<NAME>END_PI",
"Yo!",
"GOOOOOOOOOD MORNING VIETNAM!",
"'Ello gov'nor!",
"This call may be recorded for training purposes.",
"How you doin'?",
"https://media.giphy.com/media/ASd0Ukj0y3qMM/giphy.gif",
"Myohmyohmyohmy"
]
papercount = [
"https://media.tenor.com/images/6cfa7a41ac8e289f5b3991f414429022/tenor.gif",
"https://media.giphy.com/media/tTMhcdiREv2W4/giphy.gif",
"https://media.giphy.com/media/aXThX9ftrHnji/giphy.gif",
"https://media1.giphy.com/media/m77wEsvF99FyU/giphy.gif",
"https://media3.giphy.com/media/l2Jeal95KEUTLzNpS/giphy.gif",
"https://media4.giphy.com/media/Nyyg06iiTiKli/200w.webp#21-grid1",
"https://media.giphy.com/media/310wCPV0kDUuk/giphy.gif",
"https://68.media.tumblr.com/f270e4494aa01cd26b52c699e77fdd98/tumblr_ml6ef4HvIp1rs9keio1_400.gif"
]
nooo = [
'http://www.nerdist.com/wp-content/uploads/2014/09/Vader-noooo.gif',
'http://www.nooooooooooooooo.com/vader.jpg',
'http://i.imgur.com/fE18keE.gif'
]
lists = require('./lists.json')
module.exports = (robot) ->
robot.hear /hello|greeting|good morning|good evening|aloha|hola|hi haramb/i, (msg) ->
msg.send msg.random hellos
robot.hear /paper count|papercount/i, (msg) ->
msg.send msg.random papercount
robot.hear /orly/i, (msg) ->
msg.send "yarly"
robot.hear /\bno{3,}\b/i, (msg) ->
msg.send msg.random nooo
robot.hear /harambe/i, (msg) ->
msg.send "Perhaps, you are referring to me, PI:NAME:<NAME>END_PI, brought back to life in this form: a chat bot. #NeverForget."
robot.respond /list (.*)$/i, (msg) ->
key = msg.match[1].toLowerCase().split(" ").join("-")
msg.send lists[key]
robot.hear /badge/i, (msg) ->
msg.send "Badges? BADGES?!? We Don't Need No Stink'n BADGES!!!!"
robot.hear /it's working|its working|it works/i, (msg) ->
msg.send "https://media.giphy.com/media/GIEXgLDfghUSQ/giphy.gif"
|
[
{
"context": "puUsage.push({\n \"key\": \"two\",\n \"y\": result[0].data.",
"end": 3051,
"score": 0.5210563540458679,
"start": 3048,
"tag": "KEY",
"value": "two"
},
{
"context": "geUsage.push({\n \"key\": \"one\",\n \"y\": result[1].data.",
"end": 3248,
"score": 0.5209628939628601,
"start": 3245,
"tag": "KEY",
"value": "one"
},
{
"context": "geUsage.push({\n \"key\": \"two\",\n \"y\": result[1].data.",
"end": 3424,
"score": 0.6121820211410522,
"start": 3421,
"tag": "KEY",
"value": "two"
},
{
"context": "ryUsage.push({\n \"key\": \"one\",\n \"y\": result[2].data.",
"end": 3620,
"score": 0.7895482778549194,
"start": 3617,
"tag": "KEY",
"value": "one"
},
{
"context": "ryUsage.push({\n \"key\": \"two\",\n \"y\": result[2].data.",
"end": 3795,
"score": 0.7946485280990601,
"start": 3792,
"tag": "KEY",
"value": "two"
},
{
"context": "puUsage.push({\n \"key\": \"one\", \n \"y\": data.allocated",
"end": 4215,
"score": 0.6956160664558411,
"start": 4212,
"tag": "KEY",
"value": "one"
},
{
"context": "moryUsage.push({\n \"key\": \"one\",\n \"y\": data.allocated\n ",
"end": 4728,
"score": 0.5820963978767395,
"start": 4725,
"tag": "KEY",
"value": "one"
},
{
"context": "moryUsage.push({\n \"key\": \"two\",\n \"y\": data.total-data.a",
"end": 4888,
"score": 0.5672182440757751,
"start": 4885,
"tag": "KEY",
"value": "two"
},
{
"context": "rageUsage.push({\n \"key\": \"one\",\n \"y\": data.allocated\n ",
"end": 5218,
"score": 0.5207192897796631,
"start": 5215,
"tag": "KEY",
"value": "one"
},
{
"context": "rageUsage.push({\n \"key\": \"two\",\n \"y\": data.total-data.a",
"end": 5378,
"score": 0.8189526796340942,
"start": 5375,
"tag": "KEY",
"value": "two"
}
] | v2.5/src/app/services/rsaService.coffee | sharonlucong/compass-intel-rsa-dist | 0 | define(['./baseService'], ->
'use strict';
window.Namespace = {
"podid":"id",
"rackid": "id",
"drawerid": "id",
"thermalzoneid": "id",
"powerzoneid": "id",
"moduleid": "id",
"processorid": "id",
"memoryid": "id",
"numOfFansPresent": "number_of_fans_present",
"maxFanNumbers": "max_fans_supported",
"presentTemperature": "present_temperature",
"outletTemperature": "outlet_temperature",
"volumetricAirflow": "volumetric_airflow",
"numOfPsusPresent": "number_of_psus_present",
"maxNumPsus": "number_of_psus_supported",
"component_name":"component_name",
"storageid": "id"
}
class RsaService
constructor: (@dataService, @$q, @rsaFactory) ->
getRSAManagers: ($scope) ->
$scope.pods = []
$q = @$q
deferred = $q.defer()
@dataService.getRSAManagers().success (data) ->
for num, value of data
for key, val of value
if key is Namespace.podid
$scope.pods.push({
"title": "POD "+val,
"id": val
})
deferred.resolve()
return deferred.promise
getPodRacks: ($scope, podid) ->
$q = @$q
deferred = $q.defer()
$scope.racks = []
@dataService.getPodRacks(podid).success (data) ->
for num, value of data
for key, val of value
if key is Namespace.rackid
$scope.racks.push({
"title": "Rack " + val
"id": val
})
deferred.resolve()
return deferred.promise
getAllChartsData: ($scope, rackid) ->
promises = []
deferred = @$q.defer()
cpuPromise = @dataService.getCpuUsage(rackid)
memoryPromise = @dataService.getMemoryUsage(rackid)
storagePromise = @dataService.getStorageUsage(rackid)
promises.push(cpuPromise)
promises.push(storagePromise)
promises.push(memoryPromise)
@$q.all(promises).then (result) ->
$scope.cpuUsage = []
$scope.storageUsage = []
$scope.memoryUsage = []
$scope.cpuUsage.push({
"key": "one",
"y": result[0].data.allocated
})
$scope.cpuUsage.push({
"key": "two",
"y": result[0].data.total-result[0].data.allocated
})
$scope.storageUsage.push({
"key": "one",
"y": result[1].data.allocated
})
$scope.storageUsage.push({
"key": "two",
"y": result[1].data.total-result[1].data.allocated
})
$scope.memoryUsage.push({
"key": "one",
"y": result[2].data.allocated
})
$scope.memoryUsage.push({
"key": "two",
"y": result[2].data.total-result[2].data.allocated
})
deferred.resolve()
return deferred.promise
getCpuUsage: ($scope, rackid) ->
$scope.cpuUsage = []
@dataService.getCpuUsage(rackid).success (data) ->
$scope.cpuUsage.push({
"key": "one",
"y": data.allocated
})
$scope.cpuUsage.push({
"key": "two",
"y": data.total-data.allocated
})
getMemoryUsage: ($scope, rackid) ->
$scope.memoryUsage = []
@dataService.getMemoryUsage(rackid).success (data) ->
$scope.memoryUsage.push({
"key": "one",
"y": data.allocated
})
$scope.memoryUsage.push({
"key": "two",
"y": data.total-data.allocated
})
getStorageUsage: ($scope, rackid) ->
$scope.storageUsage = []
@dataService.getStorageUsage(rackid).success (data) ->
$scope.storageUsage.push({
"key": "one",
"y": data.allocated
})
$scope.storageUsage.push({
"key": "two",
"y": data.total-data.allocated
})
getRackDrawers: ($scope, rackid) ->
$scope.drawers = []
@dataService.getRackDrawers(rackid).success (data) ->
for num, value of data
for key,val of value
if key is Namespace.drawerid
$scope.drawers.push({
"title": "Drawer " + val
"id": val
})
getRackThermalZones: ($scope, rackid) ->
$scope.thermalZones = []
@dataService.getRackThermalZones(rackid).success (data) ->
for num, value of data
thermalDetail = {}
for key, val of value
if key is Namespace.thermalzoneid
thermalDetail["id"] = val
if key is Namespace.numOfFansPresent
thermalDetail["presentFanNum"] = val
if key is Namespace.maxFanNumbers
thermalDetail["maxFanNum"] = val
$scope.thermalZones.push(thermalDetail)
getRackPowerZones: ($scope, rackid) ->
$scope.powerZones = []
@dataService.getRackPowerZones(rackid).success (data) ->
for num, value of data
powerDetail = {}
for key, val of value
if key is Namespace.powerzoneid
powerDetail["id"] = val
if key is Namespace.numOfPsusPresent
powerDetail["presentPsNum"] = val
if key is Namespace.maxNumPsus
powerDetail["maxPsNum"] = val
$scope.powerZones.push(powerDetail)
# console.log($scope.powerZones)
getRackPowerZonesSupplyUnits: ($scope, rackid, zoneid) ->
$scope.powerData = {}
$scope.powerData["name"] = "Power Zone " + zoneid
$scope.powerData["children"] = []
deferred = @$q.defer()
@dataService.getRackPowerZonesSupplyUnits(rackid, zoneid).success (data) ->
for num, value of data
unitsdetail = {}
unitsdetail["details"] = {}
unitsdetail["size"] = 200
for unitkey, unitval of value
if !unitsdetail["name"]
unitsdetail["name"] = value[Namespace.component_name]
if typeof unitval isnt "string"
if Array.isArray(unitval)
for idx, obj of unitval
for objkey, objval of obj
unitsdetail["details"][objkey] = objval
else
for objkey, objval of unitval
unitsdetail["details"][objkey] = objval
else
unitsdetail["details"][unitkey] = unitval
$scope.powerData["children"].push(unitsdetail)
deferred.resolve()
return deferred.promise
getRackThermalZonesFans: ($scope, rackid, zoneid) ->
$scope.thermalData = {}
$scope.thermalData["name"] = "Thermal Zone " + zoneid
$scope.thermalData["children"] = []
$q = @$q
deferred = $q.defer()
@dataService.getRackThermalZonesFans(rackid, zoneid).success (data) ->
for num, value of data
thermals = {}
thermals["details"] = {}
thermals["size"] = 200
for thermalkey, thermalval of value
if !thermals["name"]
thermals["name"] = value[Namespace.component_name]
if typeof thermalval isnt "string"
if Array.isArray(thermalval)
for idx, obj of thermalval
for objkey, objval of obj
thermals["details"][objkey] = objval
else
for objkey, objval of thermalval
thermals["details"][objkey] = objval
else
thermals["details"][thermalkey] = thermalval
$scope.thermalData["children"].push(thermals)
deferred.resolve()
return deferred.promise
getRackDrawerDetails: ($scope, drawerid) ->
dataService = @dataService
$scope.drawerData = {}
$q = @$q
deferred = $q.defer()
dataService.getRackDrawerModules(drawerid).success (data) ->
# console.log(data)
promises = []
$scope.drawerData["name"] = "drawer " + drawerid
$scope.drawerData["children"] = []
if data.length is 0
$scope.drawerData["children"].push({})
$scope.drawerData["size"] = 200
deferred.resolve()
else
for num, value of data
for key, val of value
if key is Namespace.moduleid
moduledetail = {}
moduledetail["name"] = "Module " + val
moduledetail["children"] = []
$scope.drawerData["children"].push(moduledetail)
processorPromise = dataService.getRackDrawerModuleProcessors(drawerid, val)
memoryPromise = dataService.getRackDrawerModuleMemories(drawerid, val)
storagePromise = dataService.getModuleStorage(val)
promises.push(processorPromise)
promises.push(memoryPromise)
promises.push(storagePromise)
index = 0
$q.all(promises).then (result)->
memoriesdetail = {}
processorsdetail = {}
storagesdetail = {}
angular.forEach(result, (response)->
angular.forEach(response.data, (responsedata)->
#object in one request
if responsedata[Namespace.component_name] is "Memory Module"
if !memoriesdetail["name"]
memoriesdetail["name"] = "Memories"
memoriesdetail["children"] = []
memorydetail = {}
memorydetail["name"] = "memory " + responsedata[Namespace.memoryid]
memorydetail["details"] = {}
memorydetail["size"] = 200
for mkey, mval of responsedata
if typeof mval isnt "object"
memorydetail["details"][mkey] = mval
else
str = "{"
for kmval, vmval of mval
str+= "\n"+ kmval + ": " + vmval+ "\n"
str+="}"
memorydetail["details"][mkey] = str
memoriesdetail["children"].push(memorydetail)
else if responsedata[Namespace.component_name] is "Processor"
if !processorsdetail["name"]
processorsdetail["name"] = "Processors"
processorsdetail["children"] = []
processordetail = {}
processordetail["name"] = "processor " + responsedata[Namespace.processorid]
processordetail["details"] = {}
processordetail["size"] = 200
for processorkey, processorval of responsedata
if typeof processorval isnt "object"
processordetail["details"][processorkey]=processorval
processorsdetail["children"].push(processordetail)
else
if !storagesdetail["name"]
storagesdetail["name"] = "Storage"
storagesdetail["children"] = []
storagedetail = {}
storagedetail["name"] = "device " + responsedata[Namespace.storageid]
storagedetail["details"] = {}
storagedetail["size"] = 200
for storagekey, storageval of responsedata
storagedetail["details"][storagekey] = storageval
storagesdetail["children"].push(storagedetail)
)
# if Object.keys(storagesdetail).length isnt 0
# $scope.drawerData["children"][index]["children"].push(storagesdetail)
if Object.keys(memoriesdetail).length isnt 0 and Object.keys(processorsdetail).length isnt 0
$scope.drawerData["children"][index]["children"].push(processorsdetail)
$scope.drawerData["children"][index]["children"].push(memoriesdetail)
memoriesdetail = {}
processorsdetail = {}
storagesdetail = {}
index++
)
deferred.resolve()
return deferred.promise
angular.module('compass.services').service('rsaService',['dataService','$q','rsaFactory', (dataService, $q, rsaFactory) -> new RsaService(dataService, $q, rsaFactory)])
)
| 66242 | define(['./baseService'], ->
'use strict';
window.Namespace = {
"podid":"id",
"rackid": "id",
"drawerid": "id",
"thermalzoneid": "id",
"powerzoneid": "id",
"moduleid": "id",
"processorid": "id",
"memoryid": "id",
"numOfFansPresent": "number_of_fans_present",
"maxFanNumbers": "max_fans_supported",
"presentTemperature": "present_temperature",
"outletTemperature": "outlet_temperature",
"volumetricAirflow": "volumetric_airflow",
"numOfPsusPresent": "number_of_psus_present",
"maxNumPsus": "number_of_psus_supported",
"component_name":"component_name",
"storageid": "id"
}
class RsaService
constructor: (@dataService, @$q, @rsaFactory) ->
getRSAManagers: ($scope) ->
$scope.pods = []
$q = @$q
deferred = $q.defer()
@dataService.getRSAManagers().success (data) ->
for num, value of data
for key, val of value
if key is Namespace.podid
$scope.pods.push({
"title": "POD "+val,
"id": val
})
deferred.resolve()
return deferred.promise
getPodRacks: ($scope, podid) ->
$q = @$q
deferred = $q.defer()
$scope.racks = []
@dataService.getPodRacks(podid).success (data) ->
for num, value of data
for key, val of value
if key is Namespace.rackid
$scope.racks.push({
"title": "Rack " + val
"id": val
})
deferred.resolve()
return deferred.promise
getAllChartsData: ($scope, rackid) ->
promises = []
deferred = @$q.defer()
cpuPromise = @dataService.getCpuUsage(rackid)
memoryPromise = @dataService.getMemoryUsage(rackid)
storagePromise = @dataService.getStorageUsage(rackid)
promises.push(cpuPromise)
promises.push(storagePromise)
promises.push(memoryPromise)
@$q.all(promises).then (result) ->
$scope.cpuUsage = []
$scope.storageUsage = []
$scope.memoryUsage = []
$scope.cpuUsage.push({
"key": "one",
"y": result[0].data.allocated
})
$scope.cpuUsage.push({
"key": "<KEY>",
"y": result[0].data.total-result[0].data.allocated
})
$scope.storageUsage.push({
"key": "<KEY>",
"y": result[1].data.allocated
})
$scope.storageUsage.push({
"key": "<KEY>",
"y": result[1].data.total-result[1].data.allocated
})
$scope.memoryUsage.push({
"key": "<KEY>",
"y": result[2].data.allocated
})
$scope.memoryUsage.push({
"key": "<KEY>",
"y": result[2].data.total-result[2].data.allocated
})
deferred.resolve()
return deferred.promise
getCpuUsage: ($scope, rackid) ->
$scope.cpuUsage = []
@dataService.getCpuUsage(rackid).success (data) ->
$scope.cpuUsage.push({
"key": "<KEY>",
"y": data.allocated
})
$scope.cpuUsage.push({
"key": "two",
"y": data.total-data.allocated
})
getMemoryUsage: ($scope, rackid) ->
$scope.memoryUsage = []
@dataService.getMemoryUsage(rackid).success (data) ->
$scope.memoryUsage.push({
"key": "<KEY>",
"y": data.allocated
})
$scope.memoryUsage.push({
"key": "<KEY>",
"y": data.total-data.allocated
})
getStorageUsage: ($scope, rackid) ->
$scope.storageUsage = []
@dataService.getStorageUsage(rackid).success (data) ->
$scope.storageUsage.push({
"key": "<KEY>",
"y": data.allocated
})
$scope.storageUsage.push({
"key": "<KEY>",
"y": data.total-data.allocated
})
getRackDrawers: ($scope, rackid) ->
$scope.drawers = []
@dataService.getRackDrawers(rackid).success (data) ->
for num, value of data
for key,val of value
if key is Namespace.drawerid
$scope.drawers.push({
"title": "Drawer " + val
"id": val
})
getRackThermalZones: ($scope, rackid) ->
$scope.thermalZones = []
@dataService.getRackThermalZones(rackid).success (data) ->
for num, value of data
thermalDetail = {}
for key, val of value
if key is Namespace.thermalzoneid
thermalDetail["id"] = val
if key is Namespace.numOfFansPresent
thermalDetail["presentFanNum"] = val
if key is Namespace.maxFanNumbers
thermalDetail["maxFanNum"] = val
$scope.thermalZones.push(thermalDetail)
getRackPowerZones: ($scope, rackid) ->
$scope.powerZones = []
@dataService.getRackPowerZones(rackid).success (data) ->
for num, value of data
powerDetail = {}
for key, val of value
if key is Namespace.powerzoneid
powerDetail["id"] = val
if key is Namespace.numOfPsusPresent
powerDetail["presentPsNum"] = val
if key is Namespace.maxNumPsus
powerDetail["maxPsNum"] = val
$scope.powerZones.push(powerDetail)
# console.log($scope.powerZones)
getRackPowerZonesSupplyUnits: ($scope, rackid, zoneid) ->
$scope.powerData = {}
$scope.powerData["name"] = "Power Zone " + zoneid
$scope.powerData["children"] = []
deferred = @$q.defer()
@dataService.getRackPowerZonesSupplyUnits(rackid, zoneid).success (data) ->
for num, value of data
unitsdetail = {}
unitsdetail["details"] = {}
unitsdetail["size"] = 200
for unitkey, unitval of value
if !unitsdetail["name"]
unitsdetail["name"] = value[Namespace.component_name]
if typeof unitval isnt "string"
if Array.isArray(unitval)
for idx, obj of unitval
for objkey, objval of obj
unitsdetail["details"][objkey] = objval
else
for objkey, objval of unitval
unitsdetail["details"][objkey] = objval
else
unitsdetail["details"][unitkey] = unitval
$scope.powerData["children"].push(unitsdetail)
deferred.resolve()
return deferred.promise
getRackThermalZonesFans: ($scope, rackid, zoneid) ->
$scope.thermalData = {}
$scope.thermalData["name"] = "Thermal Zone " + zoneid
$scope.thermalData["children"] = []
$q = @$q
deferred = $q.defer()
@dataService.getRackThermalZonesFans(rackid, zoneid).success (data) ->
for num, value of data
thermals = {}
thermals["details"] = {}
thermals["size"] = 200
for thermalkey, thermalval of value
if !thermals["name"]
thermals["name"] = value[Namespace.component_name]
if typeof thermalval isnt "string"
if Array.isArray(thermalval)
for idx, obj of thermalval
for objkey, objval of obj
thermals["details"][objkey] = objval
else
for objkey, objval of thermalval
thermals["details"][objkey] = objval
else
thermals["details"][thermalkey] = thermalval
$scope.thermalData["children"].push(thermals)
deferred.resolve()
return deferred.promise
getRackDrawerDetails: ($scope, drawerid) ->
dataService = @dataService
$scope.drawerData = {}
$q = @$q
deferred = $q.defer()
dataService.getRackDrawerModules(drawerid).success (data) ->
# console.log(data)
promises = []
$scope.drawerData["name"] = "drawer " + drawerid
$scope.drawerData["children"] = []
if data.length is 0
$scope.drawerData["children"].push({})
$scope.drawerData["size"] = 200
deferred.resolve()
else
for num, value of data
for key, val of value
if key is Namespace.moduleid
moduledetail = {}
moduledetail["name"] = "Module " + val
moduledetail["children"] = []
$scope.drawerData["children"].push(moduledetail)
processorPromise = dataService.getRackDrawerModuleProcessors(drawerid, val)
memoryPromise = dataService.getRackDrawerModuleMemories(drawerid, val)
storagePromise = dataService.getModuleStorage(val)
promises.push(processorPromise)
promises.push(memoryPromise)
promises.push(storagePromise)
index = 0
$q.all(promises).then (result)->
memoriesdetail = {}
processorsdetail = {}
storagesdetail = {}
angular.forEach(result, (response)->
angular.forEach(response.data, (responsedata)->
#object in one request
if responsedata[Namespace.component_name] is "Memory Module"
if !memoriesdetail["name"]
memoriesdetail["name"] = "Memories"
memoriesdetail["children"] = []
memorydetail = {}
memorydetail["name"] = "memory " + responsedata[Namespace.memoryid]
memorydetail["details"] = {}
memorydetail["size"] = 200
for mkey, mval of responsedata
if typeof mval isnt "object"
memorydetail["details"][mkey] = mval
else
str = "{"
for kmval, vmval of mval
str+= "\n"+ kmval + ": " + vmval+ "\n"
str+="}"
memorydetail["details"][mkey] = str
memoriesdetail["children"].push(memorydetail)
else if responsedata[Namespace.component_name] is "Processor"
if !processorsdetail["name"]
processorsdetail["name"] = "Processors"
processorsdetail["children"] = []
processordetail = {}
processordetail["name"] = "processor " + responsedata[Namespace.processorid]
processordetail["details"] = {}
processordetail["size"] = 200
for processorkey, processorval of responsedata
if typeof processorval isnt "object"
processordetail["details"][processorkey]=processorval
processorsdetail["children"].push(processordetail)
else
if !storagesdetail["name"]
storagesdetail["name"] = "Storage"
storagesdetail["children"] = []
storagedetail = {}
storagedetail["name"] = "device " + responsedata[Namespace.storageid]
storagedetail["details"] = {}
storagedetail["size"] = 200
for storagekey, storageval of responsedata
storagedetail["details"][storagekey] = storageval
storagesdetail["children"].push(storagedetail)
)
# if Object.keys(storagesdetail).length isnt 0
# $scope.drawerData["children"][index]["children"].push(storagesdetail)
if Object.keys(memoriesdetail).length isnt 0 and Object.keys(processorsdetail).length isnt 0
$scope.drawerData["children"][index]["children"].push(processorsdetail)
$scope.drawerData["children"][index]["children"].push(memoriesdetail)
memoriesdetail = {}
processorsdetail = {}
storagesdetail = {}
index++
)
deferred.resolve()
return deferred.promise
angular.module('compass.services').service('rsaService',['dataService','$q','rsaFactory', (dataService, $q, rsaFactory) -> new RsaService(dataService, $q, rsaFactory)])
)
| true | define(['./baseService'], ->
'use strict';
window.Namespace = {
"podid":"id",
"rackid": "id",
"drawerid": "id",
"thermalzoneid": "id",
"powerzoneid": "id",
"moduleid": "id",
"processorid": "id",
"memoryid": "id",
"numOfFansPresent": "number_of_fans_present",
"maxFanNumbers": "max_fans_supported",
"presentTemperature": "present_temperature",
"outletTemperature": "outlet_temperature",
"volumetricAirflow": "volumetric_airflow",
"numOfPsusPresent": "number_of_psus_present",
"maxNumPsus": "number_of_psus_supported",
"component_name":"component_name",
"storageid": "id"
}
class RsaService
constructor: (@dataService, @$q, @rsaFactory) ->
getRSAManagers: ($scope) ->
$scope.pods = []
$q = @$q
deferred = $q.defer()
@dataService.getRSAManagers().success (data) ->
for num, value of data
for key, val of value
if key is Namespace.podid
$scope.pods.push({
"title": "POD "+val,
"id": val
})
deferred.resolve()
return deferred.promise
getPodRacks: ($scope, podid) ->
$q = @$q
deferred = $q.defer()
$scope.racks = []
@dataService.getPodRacks(podid).success (data) ->
for num, value of data
for key, val of value
if key is Namespace.rackid
$scope.racks.push({
"title": "Rack " + val
"id": val
})
deferred.resolve()
return deferred.promise
getAllChartsData: ($scope, rackid) ->
promises = []
deferred = @$q.defer()
cpuPromise = @dataService.getCpuUsage(rackid)
memoryPromise = @dataService.getMemoryUsage(rackid)
storagePromise = @dataService.getStorageUsage(rackid)
promises.push(cpuPromise)
promises.push(storagePromise)
promises.push(memoryPromise)
@$q.all(promises).then (result) ->
$scope.cpuUsage = []
$scope.storageUsage = []
$scope.memoryUsage = []
$scope.cpuUsage.push({
"key": "one",
"y": result[0].data.allocated
})
$scope.cpuUsage.push({
"key": "PI:KEY:<KEY>END_PI",
"y": result[0].data.total-result[0].data.allocated
})
$scope.storageUsage.push({
"key": "PI:KEY:<KEY>END_PI",
"y": result[1].data.allocated
})
$scope.storageUsage.push({
"key": "PI:KEY:<KEY>END_PI",
"y": result[1].data.total-result[1].data.allocated
})
$scope.memoryUsage.push({
"key": "PI:KEY:<KEY>END_PI",
"y": result[2].data.allocated
})
$scope.memoryUsage.push({
"key": "PI:KEY:<KEY>END_PI",
"y": result[2].data.total-result[2].data.allocated
})
deferred.resolve()
return deferred.promise
getCpuUsage: ($scope, rackid) ->
$scope.cpuUsage = []
@dataService.getCpuUsage(rackid).success (data) ->
$scope.cpuUsage.push({
"key": "PI:KEY:<KEY>END_PI",
"y": data.allocated
})
$scope.cpuUsage.push({
"key": "two",
"y": data.total-data.allocated
})
getMemoryUsage: ($scope, rackid) ->
$scope.memoryUsage = []
@dataService.getMemoryUsage(rackid).success (data) ->
$scope.memoryUsage.push({
"key": "PI:KEY:<KEY>END_PI",
"y": data.allocated
})
$scope.memoryUsage.push({
"key": "PI:KEY:<KEY>END_PI",
"y": data.total-data.allocated
})
getStorageUsage: ($scope, rackid) ->
$scope.storageUsage = []
@dataService.getStorageUsage(rackid).success (data) ->
$scope.storageUsage.push({
"key": "PI:KEY:<KEY>END_PI",
"y": data.allocated
})
$scope.storageUsage.push({
"key": "PI:KEY:<KEY>END_PI",
"y": data.total-data.allocated
})
getRackDrawers: ($scope, rackid) ->
$scope.drawers = []
@dataService.getRackDrawers(rackid).success (data) ->
for num, value of data
for key,val of value
if key is Namespace.drawerid
$scope.drawers.push({
"title": "Drawer " + val
"id": val
})
getRackThermalZones: ($scope, rackid) ->
$scope.thermalZones = []
@dataService.getRackThermalZones(rackid).success (data) ->
for num, value of data
thermalDetail = {}
for key, val of value
if key is Namespace.thermalzoneid
thermalDetail["id"] = val
if key is Namespace.numOfFansPresent
thermalDetail["presentFanNum"] = val
if key is Namespace.maxFanNumbers
thermalDetail["maxFanNum"] = val
$scope.thermalZones.push(thermalDetail)
getRackPowerZones: ($scope, rackid) ->
$scope.powerZones = []
@dataService.getRackPowerZones(rackid).success (data) ->
for num, value of data
powerDetail = {}
for key, val of value
if key is Namespace.powerzoneid
powerDetail["id"] = val
if key is Namespace.numOfPsusPresent
powerDetail["presentPsNum"] = val
if key is Namespace.maxNumPsus
powerDetail["maxPsNum"] = val
$scope.powerZones.push(powerDetail)
# console.log($scope.powerZones)
getRackPowerZonesSupplyUnits: ($scope, rackid, zoneid) ->
$scope.powerData = {}
$scope.powerData["name"] = "Power Zone " + zoneid
$scope.powerData["children"] = []
deferred = @$q.defer()
@dataService.getRackPowerZonesSupplyUnits(rackid, zoneid).success (data) ->
for num, value of data
unitsdetail = {}
unitsdetail["details"] = {}
unitsdetail["size"] = 200
for unitkey, unitval of value
if !unitsdetail["name"]
unitsdetail["name"] = value[Namespace.component_name]
if typeof unitval isnt "string"
if Array.isArray(unitval)
for idx, obj of unitval
for objkey, objval of obj
unitsdetail["details"][objkey] = objval
else
for objkey, objval of unitval
unitsdetail["details"][objkey] = objval
else
unitsdetail["details"][unitkey] = unitval
$scope.powerData["children"].push(unitsdetail)
deferred.resolve()
return deferred.promise
getRackThermalZonesFans: ($scope, rackid, zoneid) ->
$scope.thermalData = {}
$scope.thermalData["name"] = "Thermal Zone " + zoneid
$scope.thermalData["children"] = []
$q = @$q
deferred = $q.defer()
@dataService.getRackThermalZonesFans(rackid, zoneid).success (data) ->
for num, value of data
thermals = {}
thermals["details"] = {}
thermals["size"] = 200
for thermalkey, thermalval of value
if !thermals["name"]
thermals["name"] = value[Namespace.component_name]
if typeof thermalval isnt "string"
if Array.isArray(thermalval)
for idx, obj of thermalval
for objkey, objval of obj
thermals["details"][objkey] = objval
else
for objkey, objval of thermalval
thermals["details"][objkey] = objval
else
thermals["details"][thermalkey] = thermalval
$scope.thermalData["children"].push(thermals)
deferred.resolve()
return deferred.promise
getRackDrawerDetails: ($scope, drawerid) ->
dataService = @dataService
$scope.drawerData = {}
$q = @$q
deferred = $q.defer()
dataService.getRackDrawerModules(drawerid).success (data) ->
# console.log(data)
promises = []
$scope.drawerData["name"] = "drawer " + drawerid
$scope.drawerData["children"] = []
if data.length is 0
$scope.drawerData["children"].push({})
$scope.drawerData["size"] = 200
deferred.resolve()
else
for num, value of data
for key, val of value
if key is Namespace.moduleid
moduledetail = {}
moduledetail["name"] = "Module " + val
moduledetail["children"] = []
$scope.drawerData["children"].push(moduledetail)
processorPromise = dataService.getRackDrawerModuleProcessors(drawerid, val)
memoryPromise = dataService.getRackDrawerModuleMemories(drawerid, val)
storagePromise = dataService.getModuleStorage(val)
promises.push(processorPromise)
promises.push(memoryPromise)
promises.push(storagePromise)
index = 0
$q.all(promises).then (result)->
memoriesdetail = {}
processorsdetail = {}
storagesdetail = {}
angular.forEach(result, (response)->
angular.forEach(response.data, (responsedata)->
#object in one request
if responsedata[Namespace.component_name] is "Memory Module"
if !memoriesdetail["name"]
memoriesdetail["name"] = "Memories"
memoriesdetail["children"] = []
memorydetail = {}
memorydetail["name"] = "memory " + responsedata[Namespace.memoryid]
memorydetail["details"] = {}
memorydetail["size"] = 200
for mkey, mval of responsedata
if typeof mval isnt "object"
memorydetail["details"][mkey] = mval
else
str = "{"
for kmval, vmval of mval
str+= "\n"+ kmval + ": " + vmval+ "\n"
str+="}"
memorydetail["details"][mkey] = str
memoriesdetail["children"].push(memorydetail)
else if responsedata[Namespace.component_name] is "Processor"
if !processorsdetail["name"]
processorsdetail["name"] = "Processors"
processorsdetail["children"] = []
processordetail = {}
processordetail["name"] = "processor " + responsedata[Namespace.processorid]
processordetail["details"] = {}
processordetail["size"] = 200
for processorkey, processorval of responsedata
if typeof processorval isnt "object"
processordetail["details"][processorkey]=processorval
processorsdetail["children"].push(processordetail)
else
if !storagesdetail["name"]
storagesdetail["name"] = "Storage"
storagesdetail["children"] = []
storagedetail = {}
storagedetail["name"] = "device " + responsedata[Namespace.storageid]
storagedetail["details"] = {}
storagedetail["size"] = 200
for storagekey, storageval of responsedata
storagedetail["details"][storagekey] = storageval
storagesdetail["children"].push(storagedetail)
)
# if Object.keys(storagesdetail).length isnt 0
# $scope.drawerData["children"][index]["children"].push(storagesdetail)
if Object.keys(memoriesdetail).length isnt 0 and Object.keys(processorsdetail).length isnt 0
$scope.drawerData["children"][index]["children"].push(processorsdetail)
$scope.drawerData["children"][index]["children"].push(memoriesdetail)
memoriesdetail = {}
processorsdetail = {}
storagesdetail = {}
index++
)
deferred.resolve()
return deferred.promise
angular.module('compass.services').service('rsaService',['dataService','$q','rsaFactory', (dataService, $q, rsaFactory) -> new RsaService(dataService, $q, rsaFactory)])
)
|
[
{
"context": "(@ws, el) ->\n Cesium.BingMapsApi.defaultKey = 'Ah1VAfThdeX7JxKOS0BldGGAAcvjmW72i89XYRt42hc0stR5QkjCqnFKKX3MPCvg'\n Cesium.MapboxApi.defaultAccessToken = 'pk.ey",
"end": 287,
"score": 0.9997390508651733,
"start": 223,
"tag": "KEY",
"value": "Ah1VAfThdeX7JxKOS0BldGGAAcvjmW72i89XYRt42hc0stR5QkjCqnFKKX3MPCvg"
},
{
"context": "MPCvg'\n Cesium.MapboxApi.defaultAccessToken = 'pk.eyJ1IjoiZ2luYS1hbGFza2EiLCJhIjoiN0lJVnk5QSJ9.CsQYpUUXtdCpnUdwurAYcQ'\n\n @activeBaseLayer = @ws.ui.getActiveBasemap(",
"end": 402,
"score": 0.9996039867401123,
"start": 332,
"tag": "KEY",
"value": "pk.eyJ1IjoiZ2luYS1hbGFza2EiLCJhIjoiN0lJVnk5QSJ9.CsQYpUUXtdCpnUdwurAYcQ"
}
] | app/assets/javascripts/workspace/cesium_view.coffee | gina-alaska/nasa-ace-web | 0 | @CESIUM_BASE_URL = "http://#{document.location.host}/cesium"
class @Workspace.CesiumView
BASE_HEIGHT: 554678932
supports: {
perspective: false
}
constructor: (@ws, el) ->
Cesium.BingMapsApi.defaultKey = 'Ah1VAfThdeX7JxKOS0BldGGAAcvjmW72i89XYRt42hc0stR5QkjCqnFKKX3MPCvg'
Cesium.MapboxApi.defaultAccessToken = 'pk.eyJ1IjoiZ2luYS1hbGFza2EiLCJhIjoiN0lJVnk5QSJ9.CsQYpUUXtdCpnUdwurAYcQ'
@activeBaseLayer = @ws.ui.getActiveBasemap()
@center = $(el).find('.map').data('center')
@zoom = $(el).find('.map').data('zoom')
@map = new Cesium.Viewer('map', {
baseLayerPicker: false,
homeButton: false,
sceneModePicker: false,
timeline: false,
geocode: false,
imageryProvider: @getLayerProvider(@activeBaseLayer)
})
@initializeCamera()
@initializeEvents()
setTimeout =>
@ws.trigger('ws.view.loaded')
, 100
initializeEvents: () =>
@registerMoveEndHandler()
@ws.on 'ws.view.move', (e, data) =>
@moveTo(data)
@ws.on 'ws.basemap.show', (e, data) =>
@setBaseLayer(data.name)
registerMoveEndHandler: () =>
if @clearRegisterMoveEnd?
@clearRegisterMoveEnd()
delete @clearRegisterMoveEnd
unless @clearMoveEnd?
# @map.camera.moveEnd.removeEventListener(@registerMoveEndHandler)
@clearMoveEnd = @map.camera.moveEnd.addEventListener(@afterMoveEnd)
afterMoveEnd: () =>
center = @map.camera.positionCartographic
heading = @map.camera.heading
zoom = Math.max(1, Math.log2(@BASE_HEIGHT / center.height) - 4)
@ws.trigger('ws.view.moved', { center: { lng: @radianToDegree(center.longitude), lat: @radianToDegree(center.latitude) }, zoom: zoom, bearing: @radianToDegree(@map.camera.heading) })
radianToDegree: (value) =>
parseFloat((value * (180/Math.PI)).toFixed(16))
moveTo: (data) =>
if data.zoom?
height = @BASE_HEIGHT / Math.pow(2, data.zoom + 4)
if data.height?
height = data.height
if @clearMoveEnd?
@clearMoveEnd()
delete @clearMoveEnd
# @map.camera.moveEnd.removeEventListener(@afterMoveEnd)
@map.camera.flyTo({
destination : Cesium.Cartesian3.fromDegrees(data.center.lng, data.center.lat, height),
heading : data.bearing / (180/Math.PI),
pitch : -Cesium.Math.PI_OVER_TWO,
roll : 0.0
})
@clearRegisterMoveEnd = @map.camera.moveEnd.addEventListener(@registerMoveEndHandler)
setBaseLayer: (name) =>
layers = @map.imageryLayers
@ws.layers.removeAll()
layers.addImageryProvider(@getLayerProvider(name))
@activeBaseLayer = name
@ws.trigger('ws.basemap.shown', { name: name })
getLayerProvider: (name) =>
if name == 'satellite-streets'
name = 'satellite'
mapId = "mapbox.#{name}"
new Cesium.MapboxImageryProvider({
mapId: mapId
})
initializeCamera: () =>
@map.camera.flyTo({
destination : Cesium.Cartesian3.fromDegrees(@center[0], @center[1], 15000000),
heading : 0.0,
pitch : -Cesium.Math.PI_OVER_TWO,
roll : 0.0
})
| 84743 | @CESIUM_BASE_URL = "http://#{document.location.host}/cesium"
class @Workspace.CesiumView
BASE_HEIGHT: 554678932
supports: {
perspective: false
}
constructor: (@ws, el) ->
Cesium.BingMapsApi.defaultKey = '<KEY>'
Cesium.MapboxApi.defaultAccessToken = '<KEY>'
@activeBaseLayer = @ws.ui.getActiveBasemap()
@center = $(el).find('.map').data('center')
@zoom = $(el).find('.map').data('zoom')
@map = new Cesium.Viewer('map', {
baseLayerPicker: false,
homeButton: false,
sceneModePicker: false,
timeline: false,
geocode: false,
imageryProvider: @getLayerProvider(@activeBaseLayer)
})
@initializeCamera()
@initializeEvents()
setTimeout =>
@ws.trigger('ws.view.loaded')
, 100
initializeEvents: () =>
@registerMoveEndHandler()
@ws.on 'ws.view.move', (e, data) =>
@moveTo(data)
@ws.on 'ws.basemap.show', (e, data) =>
@setBaseLayer(data.name)
registerMoveEndHandler: () =>
if @clearRegisterMoveEnd?
@clearRegisterMoveEnd()
delete @clearRegisterMoveEnd
unless @clearMoveEnd?
# @map.camera.moveEnd.removeEventListener(@registerMoveEndHandler)
@clearMoveEnd = @map.camera.moveEnd.addEventListener(@afterMoveEnd)
afterMoveEnd: () =>
center = @map.camera.positionCartographic
heading = @map.camera.heading
zoom = Math.max(1, Math.log2(@BASE_HEIGHT / center.height) - 4)
@ws.trigger('ws.view.moved', { center: { lng: @radianToDegree(center.longitude), lat: @radianToDegree(center.latitude) }, zoom: zoom, bearing: @radianToDegree(@map.camera.heading) })
radianToDegree: (value) =>
parseFloat((value * (180/Math.PI)).toFixed(16))
moveTo: (data) =>
if data.zoom?
height = @BASE_HEIGHT / Math.pow(2, data.zoom + 4)
if data.height?
height = data.height
if @clearMoveEnd?
@clearMoveEnd()
delete @clearMoveEnd
# @map.camera.moveEnd.removeEventListener(@afterMoveEnd)
@map.camera.flyTo({
destination : Cesium.Cartesian3.fromDegrees(data.center.lng, data.center.lat, height),
heading : data.bearing / (180/Math.PI),
pitch : -Cesium.Math.PI_OVER_TWO,
roll : 0.0
})
@clearRegisterMoveEnd = @map.camera.moveEnd.addEventListener(@registerMoveEndHandler)
setBaseLayer: (name) =>
layers = @map.imageryLayers
@ws.layers.removeAll()
layers.addImageryProvider(@getLayerProvider(name))
@activeBaseLayer = name
@ws.trigger('ws.basemap.shown', { name: name })
getLayerProvider: (name) =>
if name == 'satellite-streets'
name = 'satellite'
mapId = "mapbox.#{name}"
new Cesium.MapboxImageryProvider({
mapId: mapId
})
initializeCamera: () =>
@map.camera.flyTo({
destination : Cesium.Cartesian3.fromDegrees(@center[0], @center[1], 15000000),
heading : 0.0,
pitch : -Cesium.Math.PI_OVER_TWO,
roll : 0.0
})
| true | @CESIUM_BASE_URL = "http://#{document.location.host}/cesium"
class @Workspace.CesiumView
BASE_HEIGHT: 554678932
supports: {
perspective: false
}
constructor: (@ws, el) ->
Cesium.BingMapsApi.defaultKey = 'PI:KEY:<KEY>END_PI'
Cesium.MapboxApi.defaultAccessToken = 'PI:KEY:<KEY>END_PI'
@activeBaseLayer = @ws.ui.getActiveBasemap()
@center = $(el).find('.map').data('center')
@zoom = $(el).find('.map').data('zoom')
@map = new Cesium.Viewer('map', {
baseLayerPicker: false,
homeButton: false,
sceneModePicker: false,
timeline: false,
geocode: false,
imageryProvider: @getLayerProvider(@activeBaseLayer)
})
@initializeCamera()
@initializeEvents()
setTimeout =>
@ws.trigger('ws.view.loaded')
, 100
initializeEvents: () =>
@registerMoveEndHandler()
@ws.on 'ws.view.move', (e, data) =>
@moveTo(data)
@ws.on 'ws.basemap.show', (e, data) =>
@setBaseLayer(data.name)
registerMoveEndHandler: () =>
if @clearRegisterMoveEnd?
@clearRegisterMoveEnd()
delete @clearRegisterMoveEnd
unless @clearMoveEnd?
# @map.camera.moveEnd.removeEventListener(@registerMoveEndHandler)
@clearMoveEnd = @map.camera.moveEnd.addEventListener(@afterMoveEnd)
afterMoveEnd: () =>
center = @map.camera.positionCartographic
heading = @map.camera.heading
zoom = Math.max(1, Math.log2(@BASE_HEIGHT / center.height) - 4)
@ws.trigger('ws.view.moved', { center: { lng: @radianToDegree(center.longitude), lat: @radianToDegree(center.latitude) }, zoom: zoom, bearing: @radianToDegree(@map.camera.heading) })
radianToDegree: (value) =>
parseFloat((value * (180/Math.PI)).toFixed(16))
moveTo: (data) =>
if data.zoom?
height = @BASE_HEIGHT / Math.pow(2, data.zoom + 4)
if data.height?
height = data.height
if @clearMoveEnd?
@clearMoveEnd()
delete @clearMoveEnd
# @map.camera.moveEnd.removeEventListener(@afterMoveEnd)
@map.camera.flyTo({
destination : Cesium.Cartesian3.fromDegrees(data.center.lng, data.center.lat, height),
heading : data.bearing / (180/Math.PI),
pitch : -Cesium.Math.PI_OVER_TWO,
roll : 0.0
})
@clearRegisterMoveEnd = @map.camera.moveEnd.addEventListener(@registerMoveEndHandler)
setBaseLayer: (name) =>
layers = @map.imageryLayers
@ws.layers.removeAll()
layers.addImageryProvider(@getLayerProvider(name))
@activeBaseLayer = name
@ws.trigger('ws.basemap.shown', { name: name })
getLayerProvider: (name) =>
if name == 'satellite-streets'
name = 'satellite'
mapId = "mapbox.#{name}"
new Cesium.MapboxImageryProvider({
mapId: mapId
})
initializeCamera: () =>
@map.camera.flyTo({
destination : Cesium.Cartesian3.fromDegrees(@center[0], @center[1], 15000000),
heading : 0.0,
pitch : -Cesium.Math.PI_OVER_TWO,
roll : 0.0
})
|
[
{
"context": "e(header(\"/* Bacontrap v#{version}. Copyright 2013 Ville Lautanala. https://raw.githubusercontent.com/lautis/bacontr",
"end": 1681,
"score": 0.9998920559883118,
"start": 1666,
"tag": "NAME",
"value": "Ville Lautanala"
},
{
"context": "ille Lautanala. https://raw.githubusercontent.com/lautis/bacontrap/master/LICENSE.txt */\"))\n .pipe(rena",
"end": 1723,
"score": 0.9986960291862488,
"start": 1717,
"tag": "USERNAME",
"value": "lautis"
}
] | gulpfile.coffee | lautis/bacontrap | 2 | gulp = require 'gulp'
gutil = require 'gulp-util'
coffee = require 'gulp-coffee'
source = require 'vinyl-source-stream'
browserify = require 'browserify'
watchify = require 'watchify'
karma = require('karma').server
rename = require 'gulp-rename'
uglify = require 'gulp-uglify'
fs = require 'fs'
_ = require 'lodash'
header = require 'gulp-header'
coffeeify = require 'coffeeify'
karmaConfiguration =
browsers: ['PhantomJS']
frameworks: ['mocha']
files: [
'test/test.js'
]
gulp.task 'watch', (done) ->
rebundle = ->
bundler.bundle()
.pipe(source('test.js'))
.pipe(gulp.dest('./test/'))
bundler = watchify('./test/bacontrap_spec.coffee')
bundler.on('update', rebundle)
rebundle()
karma.start(_.assign({}, karmaConfiguration, singleRun: false), done)
gulp.task 'test-build', ->
browserify('./test/bacontrap_spec.coffee').transform(coffeeify).bundle()
.pipe(source('test.js'))
.pipe(gulp.dest('./test/'))
gulp.task 'test', ['test-build'], (done) ->
karma.start(_.assign({}, karmaConfiguration, singleRun: true), done)
gulp.task 'test-browser', ['test-build'], (done) ->
karma.start(_.assign({}, karmaConfiguration, singleRun: true, browsers: ['Chrome', 'Firefox']), done)
gulp.task 'dist', ->
version = JSON.parse(fs.readFileSync('package.json')).version
copyright = "/*\n Bacontrap v#{version}\n\n " + fs.readFileSync('LICENSE.txt').toString().split('\n').join('\n ').replace(/\s+$/gm, '\n') + "\n*/"
gulp.src('src/bacontrap.coffee')
.pipe(coffee(bare: true))
.pipe(header(copyright))
.pipe(gulp.dest('./'))
.pipe(uglify())
.pipe(header("/* Bacontrap v#{version}. Copyright 2013 Ville Lautanala. https://raw.githubusercontent.com/lautis/bacontrap/master/LICENSE.txt */"))
.pipe(rename('bacontrap.min.js'))
.pipe(gulp.dest('./'))
gulp.task 'default', ['test', 'dist']
| 199691 | gulp = require 'gulp'
gutil = require 'gulp-util'
coffee = require 'gulp-coffee'
source = require 'vinyl-source-stream'
browserify = require 'browserify'
watchify = require 'watchify'
karma = require('karma').server
rename = require 'gulp-rename'
uglify = require 'gulp-uglify'
fs = require 'fs'
_ = require 'lodash'
header = require 'gulp-header'
coffeeify = require 'coffeeify'
karmaConfiguration =
browsers: ['PhantomJS']
frameworks: ['mocha']
files: [
'test/test.js'
]
gulp.task 'watch', (done) ->
rebundle = ->
bundler.bundle()
.pipe(source('test.js'))
.pipe(gulp.dest('./test/'))
bundler = watchify('./test/bacontrap_spec.coffee')
bundler.on('update', rebundle)
rebundle()
karma.start(_.assign({}, karmaConfiguration, singleRun: false), done)
gulp.task 'test-build', ->
browserify('./test/bacontrap_spec.coffee').transform(coffeeify).bundle()
.pipe(source('test.js'))
.pipe(gulp.dest('./test/'))
gulp.task 'test', ['test-build'], (done) ->
karma.start(_.assign({}, karmaConfiguration, singleRun: true), done)
gulp.task 'test-browser', ['test-build'], (done) ->
karma.start(_.assign({}, karmaConfiguration, singleRun: true, browsers: ['Chrome', 'Firefox']), done)
gulp.task 'dist', ->
version = JSON.parse(fs.readFileSync('package.json')).version
copyright = "/*\n Bacontrap v#{version}\n\n " + fs.readFileSync('LICENSE.txt').toString().split('\n').join('\n ').replace(/\s+$/gm, '\n') + "\n*/"
gulp.src('src/bacontrap.coffee')
.pipe(coffee(bare: true))
.pipe(header(copyright))
.pipe(gulp.dest('./'))
.pipe(uglify())
.pipe(header("/* Bacontrap v#{version}. Copyright 2013 <NAME>. https://raw.githubusercontent.com/lautis/bacontrap/master/LICENSE.txt */"))
.pipe(rename('bacontrap.min.js'))
.pipe(gulp.dest('./'))
gulp.task 'default', ['test', 'dist']
| true | gulp = require 'gulp'
gutil = require 'gulp-util'
coffee = require 'gulp-coffee'
source = require 'vinyl-source-stream'
browserify = require 'browserify'
watchify = require 'watchify'
karma = require('karma').server
rename = require 'gulp-rename'
uglify = require 'gulp-uglify'
fs = require 'fs'
_ = require 'lodash'
header = require 'gulp-header'
coffeeify = require 'coffeeify'
karmaConfiguration =
browsers: ['PhantomJS']
frameworks: ['mocha']
files: [
'test/test.js'
]
gulp.task 'watch', (done) ->
rebundle = ->
bundler.bundle()
.pipe(source('test.js'))
.pipe(gulp.dest('./test/'))
bundler = watchify('./test/bacontrap_spec.coffee')
bundler.on('update', rebundle)
rebundle()
karma.start(_.assign({}, karmaConfiguration, singleRun: false), done)
gulp.task 'test-build', ->
browserify('./test/bacontrap_spec.coffee').transform(coffeeify).bundle()
.pipe(source('test.js'))
.pipe(gulp.dest('./test/'))
gulp.task 'test', ['test-build'], (done) ->
karma.start(_.assign({}, karmaConfiguration, singleRun: true), done)
gulp.task 'test-browser', ['test-build'], (done) ->
karma.start(_.assign({}, karmaConfiguration, singleRun: true, browsers: ['Chrome', 'Firefox']), done)
gulp.task 'dist', ->
version = JSON.parse(fs.readFileSync('package.json')).version
copyright = "/*\n Bacontrap v#{version}\n\n " + fs.readFileSync('LICENSE.txt').toString().split('\n').join('\n ').replace(/\s+$/gm, '\n') + "\n*/"
gulp.src('src/bacontrap.coffee')
.pipe(coffee(bare: true))
.pipe(header(copyright))
.pipe(gulp.dest('./'))
.pipe(uglify())
.pipe(header("/* Bacontrap v#{version}. Copyright 2013 PI:NAME:<NAME>END_PI. https://raw.githubusercontent.com/lautis/bacontrap/master/LICENSE.txt */"))
.pipe(rename('bacontrap.min.js'))
.pipe(gulp.dest('./'))
gulp.task 'default', ['test', 'dist']
|
[
{
"context": "der, apiHost) ->\n $authProvider.tokenPrefix = 'evaluator-sattelizer'\n $authProvider.httpInterceptor = true\n $au",
"end": 118,
"score": 0.8853735327720642,
"start": 98,
"tag": "PASSWORD",
"value": "evaluator-sattelizer"
}
] | frontend/src/authentication.coffee | greysteil/evaluator | 1 | angular.module 'evaluator'
.config ($authProvider, apiHost) ->
$authProvider.tokenPrefix = 'evaluator-sattelizer'
$authProvider.httpInterceptor = true
$authProvider.loginOnSignup = true
$authProvider.storage = 'localStorage'
$authProvider.baseUrl = apiHost
$authProvider.signupUrl = 'users.json'
$authProvider.loginUrl = 'tokens.json'
angular.module 'evaluator'
.run ($rootScope, UserAuth, $analytics) ->
$rootScope.userAuth = UserAuth
$rootScope.$watch 'userAuth.signedIn', (newValue) ->
if newValue
$analytics.setUserProperties {
email: UserAuth.user.email,
name: UserAuth.user.full_name
}
$analytics.setUsername UserAuth.user.id
angular.module 'evaluator'
.run ($rootScope, $state, UserAuth, redirect) ->
# Authorization checks
# applies to states that provide authRule method in their data object
$rootScope.$on '$stateChangeStart', (e, toState, toParams) ->
return if angular.isUndefined toState.data
return if !angular.isFunction toState.data.authRule
authStatus = toState.data.authRule UserAuth
if !authStatus.allowed
redirect.push
state: toState
params: toParams
e.preventDefault()
$state.go authStatus.to, authStatus.params
angular.module 'evaluator'
.run ($rootScope, $state, UserAuth) ->
$rootScope.$on 'unauthorizedResponse', ->
UserAuth.logout()
$state.go 'public.login'
# HTTP Interceptor for 401s (expired)
angular.module 'evaluator'
.config ($httpProvider) ->
$httpProvider.interceptors.push ($q, $rootScope) ->
interceptor =
responseError: (response) ->
if response.status is 401
$rootScope.$emit 'unauthorizedResponse'
$q.reject response
else
$q.reject response | 50558 | angular.module 'evaluator'
.config ($authProvider, apiHost) ->
$authProvider.tokenPrefix = '<PASSWORD>'
$authProvider.httpInterceptor = true
$authProvider.loginOnSignup = true
$authProvider.storage = 'localStorage'
$authProvider.baseUrl = apiHost
$authProvider.signupUrl = 'users.json'
$authProvider.loginUrl = 'tokens.json'
angular.module 'evaluator'
.run ($rootScope, UserAuth, $analytics) ->
$rootScope.userAuth = UserAuth
$rootScope.$watch 'userAuth.signedIn', (newValue) ->
if newValue
$analytics.setUserProperties {
email: UserAuth.user.email,
name: UserAuth.user.full_name
}
$analytics.setUsername UserAuth.user.id
angular.module 'evaluator'
.run ($rootScope, $state, UserAuth, redirect) ->
# Authorization checks
# applies to states that provide authRule method in their data object
$rootScope.$on '$stateChangeStart', (e, toState, toParams) ->
return if angular.isUndefined toState.data
return if !angular.isFunction toState.data.authRule
authStatus = toState.data.authRule UserAuth
if !authStatus.allowed
redirect.push
state: toState
params: toParams
e.preventDefault()
$state.go authStatus.to, authStatus.params
angular.module 'evaluator'
.run ($rootScope, $state, UserAuth) ->
$rootScope.$on 'unauthorizedResponse', ->
UserAuth.logout()
$state.go 'public.login'
# HTTP Interceptor for 401s (expired)
angular.module 'evaluator'
.config ($httpProvider) ->
$httpProvider.interceptors.push ($q, $rootScope) ->
interceptor =
responseError: (response) ->
if response.status is 401
$rootScope.$emit 'unauthorizedResponse'
$q.reject response
else
$q.reject response | true | angular.module 'evaluator'
.config ($authProvider, apiHost) ->
$authProvider.tokenPrefix = 'PI:PASSWORD:<PASSWORD>END_PI'
$authProvider.httpInterceptor = true
$authProvider.loginOnSignup = true
$authProvider.storage = 'localStorage'
$authProvider.baseUrl = apiHost
$authProvider.signupUrl = 'users.json'
$authProvider.loginUrl = 'tokens.json'
angular.module 'evaluator'
.run ($rootScope, UserAuth, $analytics) ->
$rootScope.userAuth = UserAuth
$rootScope.$watch 'userAuth.signedIn', (newValue) ->
if newValue
$analytics.setUserProperties {
email: UserAuth.user.email,
name: UserAuth.user.full_name
}
$analytics.setUsername UserAuth.user.id
angular.module 'evaluator'
.run ($rootScope, $state, UserAuth, redirect) ->
# Authorization checks
# applies to states that provide authRule method in their data object
$rootScope.$on '$stateChangeStart', (e, toState, toParams) ->
return if angular.isUndefined toState.data
return if !angular.isFunction toState.data.authRule
authStatus = toState.data.authRule UserAuth
if !authStatus.allowed
redirect.push
state: toState
params: toParams
e.preventDefault()
$state.go authStatus.to, authStatus.params
angular.module 'evaluator'
.run ($rootScope, $state, UserAuth) ->
$rootScope.$on 'unauthorizedResponse', ->
UserAuth.logout()
$state.go 'public.login'
# HTTP Interceptor for 401s (expired)
angular.module 'evaluator'
.config ($httpProvider) ->
$httpProvider.interceptors.push ($q, $rootScope) ->
interceptor =
responseError: (response) ->
if response.status is 401
$rootScope.$emit 'unauthorizedResponse'
$q.reject response
else
$q.reject response |
[
{
"context": "\n state: ta.attr('string')\n password: ta.attr('string') # Used in sign_up only.\n token: ",
"end": 347,
"score": 0.7127934694290161,
"start": 345,
"tag": "PASSWORD",
"value": "ta"
}
] | src/thinkspace/client/thinkspace-model/app/models/thinkspace/common/user.coffee | sixthedge/cellar | 6 | import ember from 'ember'
import ta from 'totem/ds/associations'
export default ta.Model.extend ta.add(
ta.has_many 'spaces', inverse: ta.to_p('users')
ta.has_many 'space_users'
),
email: ta.attr('string')
first_name: ta.attr('string')
last_name: ta.attr('string')
state: ta.attr('string')
password: ta.attr('string') # Used in sign_up only.
token: ta.attr('string') # Used in sign_up only.
activated_at: ta.attr('date')
profile: ta.attr()
full_name: ember.computed 'first_name', 'last_name', ->
first_name = @get('first_name') or '?'
last_name = @get('last_name') or '?'
"#{first_name} #{last_name}"
sort_name: ember.computed -> "#{@get('last_name')}, #{@get('first_name')}"
html_title: ember.computed -> "#{@get('full_name')} - #{@get('email')}"
first_initial: ember.computed 'first_name', -> @get_initial_from_name(@get('first_name'))
last_initial: ember.computed 'last_name', -> @get_initial_from_name(@get('last_name'))
display_name: ember.computed.reads 'full_name'
initials: ember.computed 'first_name', 'last_name', -> "#{@get('first_initial')} #{@get('last_initial')}"
color_string: ember.computed 'initials', -> "#{@get('initials')}-#{@get('id')}"
color: 'eeeeee'
invitation_status: ember.computed 'state', ->
return 'Yes' if @get('is_active')
return 'No' if @get('is_inactive')
is_active: ember.computed.equal 'state', 'active'
is_inactive: ember.computed.equal 'state', 'inactive'
get_initial_from_name: (name) ->
return '?' unless ember.isPresent(name)
name.charAt(0).capitalize()
# ### Profile
is_student: ember.computed 'profile.roles', ->
@has_profile_role('student')
is_teacher: ember.computed 'profile.roles', ->
@has_profile_role('teacher') or @has_profile_role('instructor')
has_profile_role: (role) ->
roles = @get('profile.roles')
return false unless ember.isPresent(roles)
ember.get(roles, "#{role}")
| 206004 | import ember from 'ember'
import ta from 'totem/ds/associations'
export default ta.Model.extend ta.add(
ta.has_many 'spaces', inverse: ta.to_p('users')
ta.has_many 'space_users'
),
email: ta.attr('string')
first_name: ta.attr('string')
last_name: ta.attr('string')
state: ta.attr('string')
password: <PASSWORD>.attr('string') # Used in sign_up only.
token: ta.attr('string') # Used in sign_up only.
activated_at: ta.attr('date')
profile: ta.attr()
full_name: ember.computed 'first_name', 'last_name', ->
first_name = @get('first_name') or '?'
last_name = @get('last_name') or '?'
"#{first_name} #{last_name}"
sort_name: ember.computed -> "#{@get('last_name')}, #{@get('first_name')}"
html_title: ember.computed -> "#{@get('full_name')} - #{@get('email')}"
first_initial: ember.computed 'first_name', -> @get_initial_from_name(@get('first_name'))
last_initial: ember.computed 'last_name', -> @get_initial_from_name(@get('last_name'))
display_name: ember.computed.reads 'full_name'
initials: ember.computed 'first_name', 'last_name', -> "#{@get('first_initial')} #{@get('last_initial')}"
color_string: ember.computed 'initials', -> "#{@get('initials')}-#{@get('id')}"
color: 'eeeeee'
invitation_status: ember.computed 'state', ->
return 'Yes' if @get('is_active')
return 'No' if @get('is_inactive')
is_active: ember.computed.equal 'state', 'active'
is_inactive: ember.computed.equal 'state', 'inactive'
get_initial_from_name: (name) ->
return '?' unless ember.isPresent(name)
name.charAt(0).capitalize()
# ### Profile
is_student: ember.computed 'profile.roles', ->
@has_profile_role('student')
is_teacher: ember.computed 'profile.roles', ->
@has_profile_role('teacher') or @has_profile_role('instructor')
has_profile_role: (role) ->
roles = @get('profile.roles')
return false unless ember.isPresent(roles)
ember.get(roles, "#{role}")
| true | import ember from 'ember'
import ta from 'totem/ds/associations'
export default ta.Model.extend ta.add(
ta.has_many 'spaces', inverse: ta.to_p('users')
ta.has_many 'space_users'
),
email: ta.attr('string')
first_name: ta.attr('string')
last_name: ta.attr('string')
state: ta.attr('string')
password: PI:PASSWORD:<PASSWORD>END_PI.attr('string') # Used in sign_up only.
token: ta.attr('string') # Used in sign_up only.
activated_at: ta.attr('date')
profile: ta.attr()
full_name: ember.computed 'first_name', 'last_name', ->
first_name = @get('first_name') or '?'
last_name = @get('last_name') or '?'
"#{first_name} #{last_name}"
sort_name: ember.computed -> "#{@get('last_name')}, #{@get('first_name')}"
html_title: ember.computed -> "#{@get('full_name')} - #{@get('email')}"
first_initial: ember.computed 'first_name', -> @get_initial_from_name(@get('first_name'))
last_initial: ember.computed 'last_name', -> @get_initial_from_name(@get('last_name'))
display_name: ember.computed.reads 'full_name'
initials: ember.computed 'first_name', 'last_name', -> "#{@get('first_initial')} #{@get('last_initial')}"
color_string: ember.computed 'initials', -> "#{@get('initials')}-#{@get('id')}"
color: 'eeeeee'
invitation_status: ember.computed 'state', ->
return 'Yes' if @get('is_active')
return 'No' if @get('is_inactive')
is_active: ember.computed.equal 'state', 'active'
is_inactive: ember.computed.equal 'state', 'inactive'
get_initial_from_name: (name) ->
return '?' unless ember.isPresent(name)
name.charAt(0).capitalize()
# ### Profile
is_student: ember.computed 'profile.roles', ->
@has_profile_role('student')
is_teacher: ember.computed 'profile.roles', ->
@has_profile_role('teacher') or @has_profile_role('instructor')
has_profile_role: (role) ->
roles = @get('profile.roles')
return false unless ember.isPresent(roles)
ember.get(roles, "#{role}")
|
[
{
"context": " weird. For example, h_range is passed in twice. Hugo or Joseph should clean it up\n if (resample_o",
"end": 921,
"score": 0.598233699798584,
"start": 917,
"tag": "NAME",
"value": "Hugo"
},
{
"context": " @mget('data_source'),\n #TODO: Joseph -- Get rid of the next four params because we're ",
"end": 1902,
"score": 0.9850670099258423,
"start": 1896,
"tag": "NAME",
"value": "Joseph"
}
] | oldsite/python_viz/bokeh/bokehjs/src/coffee/renderer/glyph/glyph.coffee | amrosado/riskars | 2 |
define [
"underscore",
"common/has_parent",
"common/logging",
"common/plot_widget",
"renderer/properties"
], (_, HasParent, Logging, PlotWidget, Properties) ->
logger = Logging.logger
class GlyphView extends PlotWidget
#TODO: There are glyph sub-type-vs-resample_op concordance issues...
setup_server_data : () ->
serversource = @mget('server_data_source')
# hack, call set data, becuase there are some attrs that we need
# that are in it
data = _.extend({}, @mget('data_source').get('data'), serversource.get('data'))
@mget('data_source').set('data', data)
@set_data(false)
transform_params = serversource.attributes['transform']
resample_op = transform_params['resample']
x_range = @plot_view.frame.get('h_range')
y_range = @plot_view.frame.get('v_range')
#TODO: This is weird. For example, h_range is passed in twice. Hugo or Joseph should clean it up
if (resample_op == 'line1d')
domain = transform_params['domain']
if domain == 'x'
serversource.listen_for_line1d_updates(
@mget('data_source'),
x_range, y_range,
@plot_view.x_range, @plot_view.y_range,
x_range,
@glyph_props.y.field,
@glyph_props.x.field,
[@glyph_props.y.field],
transform_params
)
else
throw new Error("Domains other than 'x' not supported yet.")
else if (resample_op == 'heatmap')
serversource.listen_for_heatmap_updates(
@mget('data_source'),
x_range, y_range,
@plot_view.x_range,
@plot_view.y_range,
transform_params
)
else if (resample_op == 'abstract rendering')
serversource.listen_for_ar_updates(
@plot_view
@mget('data_source'),
#TODO: Joseph -- Get rid of the next four params because we're passing in the plot_view
x_range, y_range,
@plot_view.x_range,
@plot_view.y_range,
transform_params)
else
logger.warn("unknown resample op: '#{resample_op}'")
initialize: (options) ->
super(options)
@need_set_data = true
@glyph_props = @init_glyph(@mget('glyphspec'))
@x_range_name = @mget('x_range_name')
@y_range_name = @mget('y_range_name')
@xmapper = @plot_view.frame.get('x_mappers')[@x_range_name]
@ymapper = @plot_view.frame.get('y_mappers')[@y_range_name]
@have_selection_props = false
if @mget('selection_glyphspec')
spec = _.extend({}, @mget('glyphspec'), @mget('selection_glyphspec'))
@selection_glyphprops = @init_glyph(spec)
@have_selection_props = true
else
@selection_glyphprops = @glyph_props
if @mget('nonselection_glyphspec')
spec = _.extend({}, @mget('glyphspec'), @mget('nonselection_glyphspec'))
@nonselection_glyphprops = @init_glyph(spec)
@have_selection_props = true
else
@nonselection_glyphprops = @glyph_props
if @mget('server_data_source')
@setup_server_data()
@listenTo(this, 'change:server_data_source', @setup_server_data)
init_glyph: (glyphspec) ->
props = {}
if 'line' in @_properties
props['line_properties'] = new Properties.line_properties(@, glyphspec)
if 'fill' in @_properties
props['fill_properties'] = new Properties.fill_properties(@, glyphspec)
if 'text' in @_properties
props['text_properties'] = new Properties.text_properties(@, glyphspec)
glyph_props = new Properties.glyph_properties(@, glyphspec, @_fields, props)
return glyph_props
set_data: (request_render=true) ->
source = @mget('data_source')
for field in @_fields
if field.indexOf(":") > -1
[field, junk] = field.split(":")
@[field] = @glyph_props.source_v_select(field, source)
# special cases
if field == "direction"
values = new Uint8Array(@direction.length)
for i in [0...@direction.length]
dir = @direction[i]
if dir == 'clock' then values[i] = false
else if dir == 'anticlock' then values[i] = true
else values = NaN
@direction = values
if field.indexOf("angle") > -1
@[field] = (-x for x in @[field])
# any additional customization can happen here
if @_set_data?
t0 = Date.now()
@_set_data()
dt = Date.now() - t0
type = @mget('glyphspec').type
id = @mget("id")
logger.debug("#{type} glyph (#{id}): custom _set_data finished in #{dt}ms")
# just use the length of the last added field
len = @[field].length
@all_indices = [0...len]
@have_new_data = true
if request_render
@request_render()
render: () ->
if @need_set_data
@set_data(false)
@need_set_data = false
@_map_data()
if @_mask_data? and (@plot_view.x_range.type != "FactorRange") and (@plot_view.y_range.type != "FactorRange")
indices = @_mask_data()
else
indices = @all_indices
ctx = @plot_view.canvas_view.ctx
ctx.save()
do_render = (ctx, indices, glyph_props) =>
source = @mget('data_source')
if @have_new_data
if glyph_props.fill_properties? and glyph_props.fill_properties.do_fill
glyph_props.fill_properties.set_prop_cache(source)
if glyph_props.line_properties? and glyph_props.line_properties.do_stroke
glyph_props.line_properties.set_prop_cache(source)
if glyph_props.text_properties?
glyph_props.text_properties.set_prop_cache(source)
@_render(ctx, indices, glyph_props)
selected = @mget('data_source').get('selected')
t0 = Date.now()
if selected and selected.length and @have_selection_props
# reset the selection mask
selected_mask = (false for i in @all_indices)
for idx in selected
selected_mask[idx] = true
# intersect/different selection with render mask
selected = new Array()
nonselected = new Array()
for i in indices
if selected_mask[i]
selected.push(i)
else
nonselected.push(i)
do_render(ctx, selected, @selection_glyphprops)
do_render(ctx, nonselected, @nonselection_glyphprops)
else
do_render(ctx, indices, @glyph_props)
dt = Date.now() - t0
type = @mget('glyphspec').type
id = @mget("id")
logger.trace("#{type} glyph (#{id}): do_render calls finished in #{dt}ms")
@have_new_data = false
ctx.restore()
xrange: () ->
return @plot_view.x_range
yrange: () ->
return @plot_view.y_range
bind_bokeh_events: () ->
@listenTo(@model, 'change', @request_render)
@listenTo(@mget('data_source'), 'change', @set_data)
distance_vector: (pt, span_prop_name, position, dilate=false) ->
""" returns an array """
pt_units = @glyph_props[pt].units
span_units = @glyph_props[span_prop_name].units
if pt == 'x' then mapper = @xmapper
else if pt == 'y' then mapper = @ymapper
source = @mget('data_source')
local_select = (prop_name) =>
return @glyph_props.source_v_select(prop_name, source)
span = local_select(span_prop_name)
if span_units == 'screen'
return span
if position == 'center'
halfspan = (d / 2 for d in span)
ptc = local_select(pt)
if pt_units == 'screen'
ptc = mapper.v_map_from_target(ptc)
if typeof(ptc[0]) == 'string'
ptc = mapper.v_map_to_target(ptc)
pt0 = (ptc[i] - halfspan[i] for i in [0...ptc.length])
pt1 = (ptc[i] + halfspan[i] for i in [0...ptc.length])
else
pt0 = local_select(pt)
if pt_units == 'screen'
pt0 = mapper.v_map_from_target(pt0)
pt1 = (pt0[i] + span[i] for i in [0...pt0.length])
spt0 = mapper.v_map_to_target(pt0)
spt1 = mapper.v_map_to_target(pt1)
if dilate
return (Math.ceil(Math.abs(spt1[i] - spt0[i])) for i in [0...spt0.length])
else
return (Math.abs(spt1[i] - spt0[i]) for i in [0...spt0.length])
get_reference_point: () ->
reference_point = @mget('reference_point')
if _.isNumber(reference_point)
return @data[reference_point]
else
return reference_point
draw_legend: (ctx, x0, x1, y0, y1) ->
null
_generic_line_legend: (ctx, x0, x1, y0, y1) ->
reference_point = @get_reference_point() ? 0
line_props = @glyph_props.line_properties
ctx.save()
ctx.beginPath()
ctx.moveTo(x0, (y0 + y1) /2)
ctx.lineTo(x1, (y0 + y1) /2)
if line_props.do_stroke
line_props.set_vectorize(ctx, reference_point)
ctx.stroke()
ctx.restore()
_generic_area_legend: (ctx, x0, x1, y0, y1) ->
reference_point = @get_reference_point() ? 0
indices = [reference_point]
w = Math.abs(x1-x0)
dw = w*0.1
h = Math.abs(y1-y0)
dh = h*0.1
sx0 = x0 + dw
sx1 = x1 - dw
sy0 = y0 + dh
sy1 = y1 - dh
if @glyph_props.fill_properties.do_fill
@glyph_props.fill_properties.set_vectorize(ctx, reference_point)
ctx.fillRect(sx0, sy0, sx1-sx0, sy1-sy0)
if @glyph_props.line_properties.do_stroke
ctx.beginPath()
ctx.rect(sx0, sy0, sx1-sx0, sy1-sy0)
@glyph_props.line_properties.set_vectorize(ctx, reference_point)
ctx.stroke()
hit_test: (geometry) ->
result = null
if geometry.type == "point"
if @_hit_point?
result = @_hit_point(geometry)
else if not @_point_hit_warned?
type = @mget('glyphspec').type
logger.warn("'point' selection not available on #{type} renderer")
@_point_hit_warned = true
else if geometry.type == "rect"
if @_hit_rect?
result = @_hit_rect(geometry)
else if not @_rect_hit_warned?
type = @mget('glyphspec').type
logger.warn("'rect' selection not available on #{type} renderer")
@_rect_hit_warned = true
else
logger.error("unrecognized selection geometry type '#{ geometry.type }'")
return result
class Glyph extends HasParent
defaults: ->
return _.extend {}, super(), {
x_range_name: "default"
y_range_name: "default"
data_source: null
}
display_defaults: ->
return _.extend {}, super(), {
level: 'glyph'
radius_units: 'data'
length_units: 'screen'
angle_units: 'deg'
start_angle_units: 'deg'
end_angle_units: 'deg'
}
return {
"Model": Glyph,
"View": GlyphView
}
| 27641 |
define [
"underscore",
"common/has_parent",
"common/logging",
"common/plot_widget",
"renderer/properties"
], (_, HasParent, Logging, PlotWidget, Properties) ->
logger = Logging.logger
class GlyphView extends PlotWidget
#TODO: There are glyph sub-type-vs-resample_op concordance issues...
setup_server_data : () ->
serversource = @mget('server_data_source')
# hack, call set data, becuase there are some attrs that we need
# that are in it
data = _.extend({}, @mget('data_source').get('data'), serversource.get('data'))
@mget('data_source').set('data', data)
@set_data(false)
transform_params = serversource.attributes['transform']
resample_op = transform_params['resample']
x_range = @plot_view.frame.get('h_range')
y_range = @plot_view.frame.get('v_range')
#TODO: This is weird. For example, h_range is passed in twice. <NAME> or Joseph should clean it up
if (resample_op == 'line1d')
domain = transform_params['domain']
if domain == 'x'
serversource.listen_for_line1d_updates(
@mget('data_source'),
x_range, y_range,
@plot_view.x_range, @plot_view.y_range,
x_range,
@glyph_props.y.field,
@glyph_props.x.field,
[@glyph_props.y.field],
transform_params
)
else
throw new Error("Domains other than 'x' not supported yet.")
else if (resample_op == 'heatmap')
serversource.listen_for_heatmap_updates(
@mget('data_source'),
x_range, y_range,
@plot_view.x_range,
@plot_view.y_range,
transform_params
)
else if (resample_op == 'abstract rendering')
serversource.listen_for_ar_updates(
@plot_view
@mget('data_source'),
#TODO: <NAME> -- Get rid of the next four params because we're passing in the plot_view
x_range, y_range,
@plot_view.x_range,
@plot_view.y_range,
transform_params)
else
logger.warn("unknown resample op: '#{resample_op}'")
initialize: (options) ->
super(options)
@need_set_data = true
@glyph_props = @init_glyph(@mget('glyphspec'))
@x_range_name = @mget('x_range_name')
@y_range_name = @mget('y_range_name')
@xmapper = @plot_view.frame.get('x_mappers')[@x_range_name]
@ymapper = @plot_view.frame.get('y_mappers')[@y_range_name]
@have_selection_props = false
if @mget('selection_glyphspec')
spec = _.extend({}, @mget('glyphspec'), @mget('selection_glyphspec'))
@selection_glyphprops = @init_glyph(spec)
@have_selection_props = true
else
@selection_glyphprops = @glyph_props
if @mget('nonselection_glyphspec')
spec = _.extend({}, @mget('glyphspec'), @mget('nonselection_glyphspec'))
@nonselection_glyphprops = @init_glyph(spec)
@have_selection_props = true
else
@nonselection_glyphprops = @glyph_props
if @mget('server_data_source')
@setup_server_data()
@listenTo(this, 'change:server_data_source', @setup_server_data)
init_glyph: (glyphspec) ->
props = {}
if 'line' in @_properties
props['line_properties'] = new Properties.line_properties(@, glyphspec)
if 'fill' in @_properties
props['fill_properties'] = new Properties.fill_properties(@, glyphspec)
if 'text' in @_properties
props['text_properties'] = new Properties.text_properties(@, glyphspec)
glyph_props = new Properties.glyph_properties(@, glyphspec, @_fields, props)
return glyph_props
set_data: (request_render=true) ->
source = @mget('data_source')
for field in @_fields
if field.indexOf(":") > -1
[field, junk] = field.split(":")
@[field] = @glyph_props.source_v_select(field, source)
# special cases
if field == "direction"
values = new Uint8Array(@direction.length)
for i in [0...@direction.length]
dir = @direction[i]
if dir == 'clock' then values[i] = false
else if dir == 'anticlock' then values[i] = true
else values = NaN
@direction = values
if field.indexOf("angle") > -1
@[field] = (-x for x in @[field])
# any additional customization can happen here
if @_set_data?
t0 = Date.now()
@_set_data()
dt = Date.now() - t0
type = @mget('glyphspec').type
id = @mget("id")
logger.debug("#{type} glyph (#{id}): custom _set_data finished in #{dt}ms")
# just use the length of the last added field
len = @[field].length
@all_indices = [0...len]
@have_new_data = true
if request_render
@request_render()
render: () ->
if @need_set_data
@set_data(false)
@need_set_data = false
@_map_data()
if @_mask_data? and (@plot_view.x_range.type != "FactorRange") and (@plot_view.y_range.type != "FactorRange")
indices = @_mask_data()
else
indices = @all_indices
ctx = @plot_view.canvas_view.ctx
ctx.save()
do_render = (ctx, indices, glyph_props) =>
source = @mget('data_source')
if @have_new_data
if glyph_props.fill_properties? and glyph_props.fill_properties.do_fill
glyph_props.fill_properties.set_prop_cache(source)
if glyph_props.line_properties? and glyph_props.line_properties.do_stroke
glyph_props.line_properties.set_prop_cache(source)
if glyph_props.text_properties?
glyph_props.text_properties.set_prop_cache(source)
@_render(ctx, indices, glyph_props)
selected = @mget('data_source').get('selected')
t0 = Date.now()
if selected and selected.length and @have_selection_props
# reset the selection mask
selected_mask = (false for i in @all_indices)
for idx in selected
selected_mask[idx] = true
# intersect/different selection with render mask
selected = new Array()
nonselected = new Array()
for i in indices
if selected_mask[i]
selected.push(i)
else
nonselected.push(i)
do_render(ctx, selected, @selection_glyphprops)
do_render(ctx, nonselected, @nonselection_glyphprops)
else
do_render(ctx, indices, @glyph_props)
dt = Date.now() - t0
type = @mget('glyphspec').type
id = @mget("id")
logger.trace("#{type} glyph (#{id}): do_render calls finished in #{dt}ms")
@have_new_data = false
ctx.restore()
xrange: () ->
return @plot_view.x_range
yrange: () ->
return @plot_view.y_range
bind_bokeh_events: () ->
@listenTo(@model, 'change', @request_render)
@listenTo(@mget('data_source'), 'change', @set_data)
distance_vector: (pt, span_prop_name, position, dilate=false) ->
""" returns an array """
pt_units = @glyph_props[pt].units
span_units = @glyph_props[span_prop_name].units
if pt == 'x' then mapper = @xmapper
else if pt == 'y' then mapper = @ymapper
source = @mget('data_source')
local_select = (prop_name) =>
return @glyph_props.source_v_select(prop_name, source)
span = local_select(span_prop_name)
if span_units == 'screen'
return span
if position == 'center'
halfspan = (d / 2 for d in span)
ptc = local_select(pt)
if pt_units == 'screen'
ptc = mapper.v_map_from_target(ptc)
if typeof(ptc[0]) == 'string'
ptc = mapper.v_map_to_target(ptc)
pt0 = (ptc[i] - halfspan[i] for i in [0...ptc.length])
pt1 = (ptc[i] + halfspan[i] for i in [0...ptc.length])
else
pt0 = local_select(pt)
if pt_units == 'screen'
pt0 = mapper.v_map_from_target(pt0)
pt1 = (pt0[i] + span[i] for i in [0...pt0.length])
spt0 = mapper.v_map_to_target(pt0)
spt1 = mapper.v_map_to_target(pt1)
if dilate
return (Math.ceil(Math.abs(spt1[i] - spt0[i])) for i in [0...spt0.length])
else
return (Math.abs(spt1[i] - spt0[i]) for i in [0...spt0.length])
get_reference_point: () ->
reference_point = @mget('reference_point')
if _.isNumber(reference_point)
return @data[reference_point]
else
return reference_point
draw_legend: (ctx, x0, x1, y0, y1) ->
null
_generic_line_legend: (ctx, x0, x1, y0, y1) ->
reference_point = @get_reference_point() ? 0
line_props = @glyph_props.line_properties
ctx.save()
ctx.beginPath()
ctx.moveTo(x0, (y0 + y1) /2)
ctx.lineTo(x1, (y0 + y1) /2)
if line_props.do_stroke
line_props.set_vectorize(ctx, reference_point)
ctx.stroke()
ctx.restore()
_generic_area_legend: (ctx, x0, x1, y0, y1) ->
reference_point = @get_reference_point() ? 0
indices = [reference_point]
w = Math.abs(x1-x0)
dw = w*0.1
h = Math.abs(y1-y0)
dh = h*0.1
sx0 = x0 + dw
sx1 = x1 - dw
sy0 = y0 + dh
sy1 = y1 - dh
if @glyph_props.fill_properties.do_fill
@glyph_props.fill_properties.set_vectorize(ctx, reference_point)
ctx.fillRect(sx0, sy0, sx1-sx0, sy1-sy0)
if @glyph_props.line_properties.do_stroke
ctx.beginPath()
ctx.rect(sx0, sy0, sx1-sx0, sy1-sy0)
@glyph_props.line_properties.set_vectorize(ctx, reference_point)
ctx.stroke()
hit_test: (geometry) ->
result = null
if geometry.type == "point"
if @_hit_point?
result = @_hit_point(geometry)
else if not @_point_hit_warned?
type = @mget('glyphspec').type
logger.warn("'point' selection not available on #{type} renderer")
@_point_hit_warned = true
else if geometry.type == "rect"
if @_hit_rect?
result = @_hit_rect(geometry)
else if not @_rect_hit_warned?
type = @mget('glyphspec').type
logger.warn("'rect' selection not available on #{type} renderer")
@_rect_hit_warned = true
else
logger.error("unrecognized selection geometry type '#{ geometry.type }'")
return result
class Glyph extends HasParent
defaults: ->
return _.extend {}, super(), {
x_range_name: "default"
y_range_name: "default"
data_source: null
}
display_defaults: ->
return _.extend {}, super(), {
level: 'glyph'
radius_units: 'data'
length_units: 'screen'
angle_units: 'deg'
start_angle_units: 'deg'
end_angle_units: 'deg'
}
return {
"Model": Glyph,
"View": GlyphView
}
| true |
define [
"underscore",
"common/has_parent",
"common/logging",
"common/plot_widget",
"renderer/properties"
], (_, HasParent, Logging, PlotWidget, Properties) ->
logger = Logging.logger
class GlyphView extends PlotWidget
#TODO: There are glyph sub-type-vs-resample_op concordance issues...
setup_server_data : () ->
serversource = @mget('server_data_source')
# hack, call set data, becuase there are some attrs that we need
# that are in it
data = _.extend({}, @mget('data_source').get('data'), serversource.get('data'))
@mget('data_source').set('data', data)
@set_data(false)
transform_params = serversource.attributes['transform']
resample_op = transform_params['resample']
x_range = @plot_view.frame.get('h_range')
y_range = @plot_view.frame.get('v_range')
#TODO: This is weird. For example, h_range is passed in twice. PI:NAME:<NAME>END_PI or Joseph should clean it up
if (resample_op == 'line1d')
domain = transform_params['domain']
if domain == 'x'
serversource.listen_for_line1d_updates(
@mget('data_source'),
x_range, y_range,
@plot_view.x_range, @plot_view.y_range,
x_range,
@glyph_props.y.field,
@glyph_props.x.field,
[@glyph_props.y.field],
transform_params
)
else
throw new Error("Domains other than 'x' not supported yet.")
else if (resample_op == 'heatmap')
serversource.listen_for_heatmap_updates(
@mget('data_source'),
x_range, y_range,
@plot_view.x_range,
@plot_view.y_range,
transform_params
)
else if (resample_op == 'abstract rendering')
serversource.listen_for_ar_updates(
@plot_view
@mget('data_source'),
#TODO: PI:NAME:<NAME>END_PI -- Get rid of the next four params because we're passing in the plot_view
x_range, y_range,
@plot_view.x_range,
@plot_view.y_range,
transform_params)
else
logger.warn("unknown resample op: '#{resample_op}'")
initialize: (options) ->
super(options)
@need_set_data = true
@glyph_props = @init_glyph(@mget('glyphspec'))
@x_range_name = @mget('x_range_name')
@y_range_name = @mget('y_range_name')
@xmapper = @plot_view.frame.get('x_mappers')[@x_range_name]
@ymapper = @plot_view.frame.get('y_mappers')[@y_range_name]
@have_selection_props = false
if @mget('selection_glyphspec')
spec = _.extend({}, @mget('glyphspec'), @mget('selection_glyphspec'))
@selection_glyphprops = @init_glyph(spec)
@have_selection_props = true
else
@selection_glyphprops = @glyph_props
if @mget('nonselection_glyphspec')
spec = _.extend({}, @mget('glyphspec'), @mget('nonselection_glyphspec'))
@nonselection_glyphprops = @init_glyph(spec)
@have_selection_props = true
else
@nonselection_glyphprops = @glyph_props
if @mget('server_data_source')
@setup_server_data()
@listenTo(this, 'change:server_data_source', @setup_server_data)
init_glyph: (glyphspec) ->
props = {}
if 'line' in @_properties
props['line_properties'] = new Properties.line_properties(@, glyphspec)
if 'fill' in @_properties
props['fill_properties'] = new Properties.fill_properties(@, glyphspec)
if 'text' in @_properties
props['text_properties'] = new Properties.text_properties(@, glyphspec)
glyph_props = new Properties.glyph_properties(@, glyphspec, @_fields, props)
return glyph_props
set_data: (request_render=true) ->
source = @mget('data_source')
for field in @_fields
if field.indexOf(":") > -1
[field, junk] = field.split(":")
@[field] = @glyph_props.source_v_select(field, source)
# special cases
if field == "direction"
values = new Uint8Array(@direction.length)
for i in [0...@direction.length]
dir = @direction[i]
if dir == 'clock' then values[i] = false
else if dir == 'anticlock' then values[i] = true
else values = NaN
@direction = values
if field.indexOf("angle") > -1
@[field] = (-x for x in @[field])
# any additional customization can happen here
if @_set_data?
t0 = Date.now()
@_set_data()
dt = Date.now() - t0
type = @mget('glyphspec').type
id = @mget("id")
logger.debug("#{type} glyph (#{id}): custom _set_data finished in #{dt}ms")
# just use the length of the last added field
len = @[field].length
@all_indices = [0...len]
@have_new_data = true
if request_render
@request_render()
render: () ->
if @need_set_data
@set_data(false)
@need_set_data = false
@_map_data()
if @_mask_data? and (@plot_view.x_range.type != "FactorRange") and (@plot_view.y_range.type != "FactorRange")
indices = @_mask_data()
else
indices = @all_indices
ctx = @plot_view.canvas_view.ctx
ctx.save()
do_render = (ctx, indices, glyph_props) =>
source = @mget('data_source')
if @have_new_data
if glyph_props.fill_properties? and glyph_props.fill_properties.do_fill
glyph_props.fill_properties.set_prop_cache(source)
if glyph_props.line_properties? and glyph_props.line_properties.do_stroke
glyph_props.line_properties.set_prop_cache(source)
if glyph_props.text_properties?
glyph_props.text_properties.set_prop_cache(source)
@_render(ctx, indices, glyph_props)
selected = @mget('data_source').get('selected')
t0 = Date.now()
if selected and selected.length and @have_selection_props
# reset the selection mask
selected_mask = (false for i in @all_indices)
for idx in selected
selected_mask[idx] = true
# intersect/different selection with render mask
selected = new Array()
nonselected = new Array()
for i in indices
if selected_mask[i]
selected.push(i)
else
nonselected.push(i)
do_render(ctx, selected, @selection_glyphprops)
do_render(ctx, nonselected, @nonselection_glyphprops)
else
do_render(ctx, indices, @glyph_props)
dt = Date.now() - t0
type = @mget('glyphspec').type
id = @mget("id")
logger.trace("#{type} glyph (#{id}): do_render calls finished in #{dt}ms")
@have_new_data = false
ctx.restore()
xrange: () ->
return @plot_view.x_range
yrange: () ->
return @plot_view.y_range
bind_bokeh_events: () ->
@listenTo(@model, 'change', @request_render)
@listenTo(@mget('data_source'), 'change', @set_data)
distance_vector: (pt, span_prop_name, position, dilate=false) ->
""" returns an array """
pt_units = @glyph_props[pt].units
span_units = @glyph_props[span_prop_name].units
if pt == 'x' then mapper = @xmapper
else if pt == 'y' then mapper = @ymapper
source = @mget('data_source')
local_select = (prop_name) =>
return @glyph_props.source_v_select(prop_name, source)
span = local_select(span_prop_name)
if span_units == 'screen'
return span
if position == 'center'
halfspan = (d / 2 for d in span)
ptc = local_select(pt)
if pt_units == 'screen'
ptc = mapper.v_map_from_target(ptc)
if typeof(ptc[0]) == 'string'
ptc = mapper.v_map_to_target(ptc)
pt0 = (ptc[i] - halfspan[i] for i in [0...ptc.length])
pt1 = (ptc[i] + halfspan[i] for i in [0...ptc.length])
else
pt0 = local_select(pt)
if pt_units == 'screen'
pt0 = mapper.v_map_from_target(pt0)
pt1 = (pt0[i] + span[i] for i in [0...pt0.length])
spt0 = mapper.v_map_to_target(pt0)
spt1 = mapper.v_map_to_target(pt1)
if dilate
return (Math.ceil(Math.abs(spt1[i] - spt0[i])) for i in [0...spt0.length])
else
return (Math.abs(spt1[i] - spt0[i]) for i in [0...spt0.length])
get_reference_point: () ->
reference_point = @mget('reference_point')
if _.isNumber(reference_point)
return @data[reference_point]
else
return reference_point
draw_legend: (ctx, x0, x1, y0, y1) ->
null
_generic_line_legend: (ctx, x0, x1, y0, y1) ->
reference_point = @get_reference_point() ? 0
line_props = @glyph_props.line_properties
ctx.save()
ctx.beginPath()
ctx.moveTo(x0, (y0 + y1) /2)
ctx.lineTo(x1, (y0 + y1) /2)
if line_props.do_stroke
line_props.set_vectorize(ctx, reference_point)
ctx.stroke()
ctx.restore()
_generic_area_legend: (ctx, x0, x1, y0, y1) ->
reference_point = @get_reference_point() ? 0
indices = [reference_point]
w = Math.abs(x1-x0)
dw = w*0.1
h = Math.abs(y1-y0)
dh = h*0.1
sx0 = x0 + dw
sx1 = x1 - dw
sy0 = y0 + dh
sy1 = y1 - dh
if @glyph_props.fill_properties.do_fill
@glyph_props.fill_properties.set_vectorize(ctx, reference_point)
ctx.fillRect(sx0, sy0, sx1-sx0, sy1-sy0)
if @glyph_props.line_properties.do_stroke
ctx.beginPath()
ctx.rect(sx0, sy0, sx1-sx0, sy1-sy0)
@glyph_props.line_properties.set_vectorize(ctx, reference_point)
ctx.stroke()
hit_test: (geometry) ->
result = null
if geometry.type == "point"
if @_hit_point?
result = @_hit_point(geometry)
else if not @_point_hit_warned?
type = @mget('glyphspec').type
logger.warn("'point' selection not available on #{type} renderer")
@_point_hit_warned = true
else if geometry.type == "rect"
if @_hit_rect?
result = @_hit_rect(geometry)
else if not @_rect_hit_warned?
type = @mget('glyphspec').type
logger.warn("'rect' selection not available on #{type} renderer")
@_rect_hit_warned = true
else
logger.error("unrecognized selection geometry type '#{ geometry.type }'")
return result
class Glyph extends HasParent
defaults: ->
return _.extend {}, super(), {
x_range_name: "default"
y_range_name: "default"
data_source: null
}
display_defaults: ->
return _.extend {}, super(), {
level: 'glyph'
radius_units: 'data'
length_units: 'screen'
angle_units: 'deg'
start_angle_units: 'deg'
end_angle_units: 'deg'
}
return {
"Model": Glyph,
"View": GlyphView
}
|
[
{
"context": " appId: process.env.MICROSOFT_APP_ID, appPassword: process.env.MICROSOFT_APP_PASSWORD }\n \n @server.post '/api/messages', @connect",
"end": 1049,
"score": 0.9980911612510681,
"start": 1015,
"tag": "PASSWORD",
"value": "process.env.MICROSOFT_APP_PASSWORD"
}
] | src/botframework.coffee | jasoncylam/hubot-botframework | 0 | {Robot, Adapter, TextMessage, User} = require.main.require 'hubot'
restify = require 'restify'
builder = require 'botbuilder'
class BotFrameworkBot extends Adapter
constructor: ->
super
@server = restify.createServer()
send: (envelope, strings...) ->
session = @robot.brain.get envelope.message.id
session.send strings...
return
reply: (envelope, strings...) ->
session = @robot.brain.get envelope.message.id
session.send strings...
return
run: ->
@robot.logger.info "Run"
return @robot.logger.error "No Microsoft App ID provided to Hubot" unless process.env.MICROSOFT_APP_ID
return @robot.logger.error "No Microsoft App Password provided to Hubot" unless process.env.MICROSOFT_APP_PASSWORD
@server.listen process.env.port or process.env.PORT or 3978, =>
@robot.logger.info "#{@server.name} listening to #{@server.url}"
return
@connector = new builder.ChatConnector { appId: process.env.MICROSOFT_APP_ID, appPassword: process.env.MICROSOFT_APP_PASSWORD }
@server.post '/api/messages', @connector.listen()
@emit "connected"
bot = new builder.UniversalBot @connector, (session) =>
@robot.brain.set session.message.address.id, session
user = new User session.message.user.id, { name : session.message.user.name }
message = new TextMessage user, session.message.text, session.message.address.id
@robot.receive message
return
return
exports.use = (robot) ->
new BotFrameworkBot robot | 27116 | {Robot, Adapter, TextMessage, User} = require.main.require 'hubot'
restify = require 'restify'
builder = require 'botbuilder'
class BotFrameworkBot extends Adapter
constructor: ->
super
@server = restify.createServer()
send: (envelope, strings...) ->
session = @robot.brain.get envelope.message.id
session.send strings...
return
reply: (envelope, strings...) ->
session = @robot.brain.get envelope.message.id
session.send strings...
return
run: ->
@robot.logger.info "Run"
return @robot.logger.error "No Microsoft App ID provided to Hubot" unless process.env.MICROSOFT_APP_ID
return @robot.logger.error "No Microsoft App Password provided to Hubot" unless process.env.MICROSOFT_APP_PASSWORD
@server.listen process.env.port or process.env.PORT or 3978, =>
@robot.logger.info "#{@server.name} listening to #{@server.url}"
return
@connector = new builder.ChatConnector { appId: process.env.MICROSOFT_APP_ID, appPassword: <PASSWORD> }
@server.post '/api/messages', @connector.listen()
@emit "connected"
bot = new builder.UniversalBot @connector, (session) =>
@robot.brain.set session.message.address.id, session
user = new User session.message.user.id, { name : session.message.user.name }
message = new TextMessage user, session.message.text, session.message.address.id
@robot.receive message
return
return
exports.use = (robot) ->
new BotFrameworkBot robot | true | {Robot, Adapter, TextMessage, User} = require.main.require 'hubot'
restify = require 'restify'
builder = require 'botbuilder'
class BotFrameworkBot extends Adapter
constructor: ->
super
@server = restify.createServer()
send: (envelope, strings...) ->
session = @robot.brain.get envelope.message.id
session.send strings...
return
reply: (envelope, strings...) ->
session = @robot.brain.get envelope.message.id
session.send strings...
return
run: ->
@robot.logger.info "Run"
return @robot.logger.error "No Microsoft App ID provided to Hubot" unless process.env.MICROSOFT_APP_ID
return @robot.logger.error "No Microsoft App Password provided to Hubot" unless process.env.MICROSOFT_APP_PASSWORD
@server.listen process.env.port or process.env.PORT or 3978, =>
@robot.logger.info "#{@server.name} listening to #{@server.url}"
return
@connector = new builder.ChatConnector { appId: process.env.MICROSOFT_APP_ID, appPassword: PI:PASSWORD:<PASSWORD>END_PI }
@server.post '/api/messages', @connector.listen()
@emit "connected"
bot = new builder.UniversalBot @connector, (session) =>
@robot.brain.set session.message.address.id, session
user = new User session.message.user.id, { name : session.message.user.name }
message = new TextMessage user, session.message.text, session.message.address.id
@robot.receive message
return
return
exports.use = (robot) ->
new BotFrameworkBot robot |
[
{
"context": "roduct(id: 10)\n product.updateAttributes {name: \"foobar\", id: 20}\n equal product.get('id'), 20\n equal p",
"end": 794,
"score": 0.9936458468437195,
"start": 788,
"tag": "NAME",
"value": "foobar"
},
{
"context": "oduct.get('id'), 20\n equal product.get('name'), \"foobar\"\n\ntest \"updateAttributes will returns the updated",
"end": 871,
"score": 0.9572513699531555,
"start": 865,
"tag": "NAME",
"value": "foobar"
},
{
"context": " equal product, product.updateAttributes {name: \"foobar\", id: 20}\n\ntest \"primary key can be changed by se",
"end": 1023,
"score": 0.9957185983657837,
"start": 1017,
"tag": "NAME",
"value": "foobar"
}
] | tests/batman/model/model_test.coffee | nickjs/batman | 1 | QUnit.module "Batman.Model",
setup: ->
class @Product extends Batman.Model
test "constructors should always be called with new", ->
Product = @Product
raises (-> product = Product()),
(message) -> ok message; true
Namespace = Product: Product
raises (-> product = Namespace.Product()),
(message) -> ok message; true
product = new Namespace.Product()
ok product instanceof Product
test "primary key is undefined on new models", ->
product = new @Product
ok product.isNew()
equal typeof product.get('id'), 'undefined'
test "primary key is 'id' by default", ->
product = new @Product(id: 10)
equal product.get('id'), 10
test "updateAttributes will update a model's attributes", ->
product = new @Product(id: 10)
product.updateAttributes {name: "foobar", id: 20}
equal product.get('id'), 20
equal product.get('name'), "foobar"
test "updateAttributes will returns the updated record", ->
product = new @Product(id: 10)
equal product, product.updateAttributes {name: "foobar", id: 20}
test "primary key can be changed by setting primary key on the model class", ->
@Product.primaryKey = 'uuid'
product = new @Product(uuid: "abc123")
equal product.get('id'), 'abc123'
test 'the \'state\' key should be a valid attribute name', ->
p = new @Product(state: "silly")
equal p.get('state'), "silly"
equal p.state(), "dirty"
test 'the \'batmanState\' key should be gettable and report the internal state', ->
p = new @Product(state: "silly")
equal p.state(), "dirty"
equal p.get('batmanState'), "dirty"
test 'the instantiated storage adapter should be returned when persisting', ->
returned = false
class TestStorageAdapter extends Batman.StorageAdapter
isTestStorageAdapter: true
class Product extends Batman.Model
returned = @persist TestStorageAdapter
ok returned.isTestStorageAdapter
test 'the array of instantiated storage adapters should be returned when persisting', ->
[a, b, c] = [false, false, false]
class TestStorageAdapter extends Batman.StorageAdapter
isTestStorageAdapter: true
class Product extends Batman.Model
[a,b,c] = @persist TestStorageAdapter, TestStorageAdapter, TestStorageAdapter
for instance in [a,b,c]
ok instance.isTestStorageAdapter
| 163420 | QUnit.module "Batman.Model",
setup: ->
class @Product extends Batman.Model
test "constructors should always be called with new", ->
Product = @Product
raises (-> product = Product()),
(message) -> ok message; true
Namespace = Product: Product
raises (-> product = Namespace.Product()),
(message) -> ok message; true
product = new Namespace.Product()
ok product instanceof Product
test "primary key is undefined on new models", ->
product = new @Product
ok product.isNew()
equal typeof product.get('id'), 'undefined'
test "primary key is 'id' by default", ->
product = new @Product(id: 10)
equal product.get('id'), 10
test "updateAttributes will update a model's attributes", ->
product = new @Product(id: 10)
product.updateAttributes {name: "<NAME>", id: 20}
equal product.get('id'), 20
equal product.get('name'), "<NAME>"
test "updateAttributes will returns the updated record", ->
product = new @Product(id: 10)
equal product, product.updateAttributes {name: "<NAME>", id: 20}
test "primary key can be changed by setting primary key on the model class", ->
@Product.primaryKey = 'uuid'
product = new @Product(uuid: "abc123")
equal product.get('id'), 'abc123'
test 'the \'state\' key should be a valid attribute name', ->
p = new @Product(state: "silly")
equal p.get('state'), "silly"
equal p.state(), "dirty"
test 'the \'batmanState\' key should be gettable and report the internal state', ->
p = new @Product(state: "silly")
equal p.state(), "dirty"
equal p.get('batmanState'), "dirty"
test 'the instantiated storage adapter should be returned when persisting', ->
returned = false
class TestStorageAdapter extends Batman.StorageAdapter
isTestStorageAdapter: true
class Product extends Batman.Model
returned = @persist TestStorageAdapter
ok returned.isTestStorageAdapter
test 'the array of instantiated storage adapters should be returned when persisting', ->
[a, b, c] = [false, false, false]
class TestStorageAdapter extends Batman.StorageAdapter
isTestStorageAdapter: true
class Product extends Batman.Model
[a,b,c] = @persist TestStorageAdapter, TestStorageAdapter, TestStorageAdapter
for instance in [a,b,c]
ok instance.isTestStorageAdapter
| true | QUnit.module "Batman.Model",
setup: ->
class @Product extends Batman.Model
test "constructors should always be called with new", ->
Product = @Product
raises (-> product = Product()),
(message) -> ok message; true
Namespace = Product: Product
raises (-> product = Namespace.Product()),
(message) -> ok message; true
product = new Namespace.Product()
ok product instanceof Product
test "primary key is undefined on new models", ->
product = new @Product
ok product.isNew()
equal typeof product.get('id'), 'undefined'
test "primary key is 'id' by default", ->
product = new @Product(id: 10)
equal product.get('id'), 10
test "updateAttributes will update a model's attributes", ->
product = new @Product(id: 10)
product.updateAttributes {name: "PI:NAME:<NAME>END_PI", id: 20}
equal product.get('id'), 20
equal product.get('name'), "PI:NAME:<NAME>END_PI"
test "updateAttributes will returns the updated record", ->
product = new @Product(id: 10)
equal product, product.updateAttributes {name: "PI:NAME:<NAME>END_PI", id: 20}
test "primary key can be changed by setting primary key on the model class", ->
@Product.primaryKey = 'uuid'
product = new @Product(uuid: "abc123")
equal product.get('id'), 'abc123'
test 'the \'state\' key should be a valid attribute name', ->
p = new @Product(state: "silly")
equal p.get('state'), "silly"
equal p.state(), "dirty"
test 'the \'batmanState\' key should be gettable and report the internal state', ->
p = new @Product(state: "silly")
equal p.state(), "dirty"
equal p.get('batmanState'), "dirty"
test 'the instantiated storage adapter should be returned when persisting', ->
returned = false
class TestStorageAdapter extends Batman.StorageAdapter
isTestStorageAdapter: true
class Product extends Batman.Model
returned = @persist TestStorageAdapter
ok returned.isTestStorageAdapter
test 'the array of instantiated storage adapters should be returned when persisting', ->
[a, b, c] = [false, false, false]
class TestStorageAdapter extends Batman.StorageAdapter
isTestStorageAdapter: true
class Product extends Batman.Model
[a,b,c] = @persist TestStorageAdapter, TestStorageAdapter, TestStorageAdapter
for instance in [a,b,c]
ok instance.isTestStorageAdapter
|
[
{
"context": "\n * @namespace KINOUT\n * @class Step\n *\n * @author Javier Jimenez Villar <javi@tapquo.com> || @soyjavi\n###\n\nKINOUT.Step = ",
"end": 111,
"score": 0.9998881816864014,
"start": 90,
"tag": "NAME",
"value": "Javier Jimenez Villar"
},
{
"context": " @class Step\n *\n * @author Javier Jimenez Villar <javi@tapquo.com> || @soyjavi\n###\n\nKINOUT.Step = ((knt, undefined_",
"end": 128,
"score": 0.9999361634254456,
"start": 113,
"tag": "EMAIL",
"value": "javi@tapquo.com"
},
{
"context": "@author Javier Jimenez Villar <javi@tapquo.com> || @soyjavi\n###\n\nKINOUT.Step = ((knt, undefined_) ->\n\n",
"end": 132,
"score": 0.5753704905509949,
"start": 132,
"tag": "USERNAME",
"value": ""
},
{
"context": "hor Javier Jimenez Villar <javi@tapquo.com> || @soyjavi\n###\n\nKINOUT.Step = ((knt, undefined_) ->\n\n ###",
"end": 141,
"score": 0.7734768390655518,
"start": 136,
"tag": "USERNAME",
"value": "yjavi"
}
] | components/Kinout/src/Kinout.Step.coffee | biojazzard/kirbout | 2 | ###
* Description or Responsability
*
* @namespace KINOUT
* @class Step
*
* @author Javier Jimenez Villar <javi@tapquo.com> || @soyjavi
###
KINOUT.Step = ((knt, undefined_) ->
###
###
show = -> @
###
###
hide = (horizontal_idx, vertical_idx) -> #
show: show
hide: hide
)(KINOUT) | 51751 | ###
* Description or Responsability
*
* @namespace KINOUT
* @class Step
*
* @author <NAME> <<EMAIL>> || @soyjavi
###
KINOUT.Step = ((knt, undefined_) ->
###
###
show = -> @
###
###
hide = (horizontal_idx, vertical_idx) -> #
show: show
hide: hide
)(KINOUT) | true | ###
* Description or Responsability
*
* @namespace KINOUT
* @class Step
*
* @author PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI> || @soyjavi
###
KINOUT.Step = ((knt, undefined_) ->
###
###
show = -> @
###
###
hide = (horizontal_idx, vertical_idx) -> #
show: show
hide: hide
)(KINOUT) |
[
{
"context": "# Simple Diff for Python v0.1\r\n# (C) Paul Butler 2008 <http://www.paulbutler.org/>\r\ndiff = (before",
"end": 48,
"score": 0.9998023509979248,
"start": 37,
"tag": "NAME",
"value": "Paul Butler"
}
] | coffeescript/simplediff.coffee | djmattyg007/simplediff | 247 | # Simple Diff for Python v0.1
# (C) Paul Butler 2008 <http://www.paulbutler.org/>
diff = (before, after) ->
# Find the differences between two lists. Returns a list of pairs, where the first value
# is in ['+','-','='] and represents an insertion, deletion, or no change for that list.
# The second value of the pair is the element.
# Build a hash map with elements from before as keys, and
# a list of indexes as values
ohash = {}
for val, i in before
if val not of ohash
ohash[val] = []
ohash[val].push i
# Find the largest substring common to before and after
lastRow = (0 for i in [0 ... before.length])
subStartBefore = subStartAfter = subLength = 0
for val, j in after
thisRow = (0 for i in [0 ... before.length])
for k in ohash[val] ? []
thisRow[k] = (if k and lastRow[k - 1] then 1 else 0) + 1
if thisRow[k] > subLength
subLength = thisRow[k]
subStartBefore = k - subLength + 1
subStartAfter = j - subLength + 1
lastRow = thisRow
# If no common substring is found, assume that an insert and
# delete has taken place
if subLength == 0
[].concat(
(if before.length then [['-', before]] else []),
(if after.length then [['+', after]] else []),
)
# Otherwise, the common substring is considered to have no change, and we recurse
# on the text before and after the substring
else
[].concat(
diff(before[...subStartBefore], after[...subStartAfter]),
[['=', after[subStartAfter...subStartAfter + subLength]]],
diff(before[subStartBefore + subLength...], after[subStartAfter + subLength...])
)
# The below functions are intended for simple tests and experimentation; you will want to write more sophisticated wrapper functions for real use
stringDiff = (before, after) ->
# Returns the difference between the before and after strings when split on whitespace. Considers punctuation a part of the word
diff(before.split(/[ ]+/), after.split(/[ ]+/))
htmlDiff = (before, after) ->
# Returns the difference between two strings (as in stringDiff) in HTML format.
con =
'=': ((x) -> x),
'+': ((x) -> '<ins>' + x + '</ins>'),
'-': ((x) -> '<del>' + x + '</del>')
((con[a])(b.join ' ') for [a, b] in stringDiff(before, after)).join ' '
#Examples:
#console.log htmlDiff('The world is a tragedy to those who feel, but a comedy to those who think',
# 'Life is a tragedy for those who feel, and a comedy to those who think') # Horace Walpole
#console.log htmlDiff('I have often regretted my speech, never my silence',
# 'I have regretted my speech often, my silence never') # Xenocrates
| 60365 | # Simple Diff for Python v0.1
# (C) <NAME> 2008 <http://www.paulbutler.org/>
diff = (before, after) ->
# Find the differences between two lists. Returns a list of pairs, where the first value
# is in ['+','-','='] and represents an insertion, deletion, or no change for that list.
# The second value of the pair is the element.
# Build a hash map with elements from before as keys, and
# a list of indexes as values
ohash = {}
for val, i in before
if val not of ohash
ohash[val] = []
ohash[val].push i
# Find the largest substring common to before and after
lastRow = (0 for i in [0 ... before.length])
subStartBefore = subStartAfter = subLength = 0
for val, j in after
thisRow = (0 for i in [0 ... before.length])
for k in ohash[val] ? []
thisRow[k] = (if k and lastRow[k - 1] then 1 else 0) + 1
if thisRow[k] > subLength
subLength = thisRow[k]
subStartBefore = k - subLength + 1
subStartAfter = j - subLength + 1
lastRow = thisRow
# If no common substring is found, assume that an insert and
# delete has taken place
if subLength == 0
[].concat(
(if before.length then [['-', before]] else []),
(if after.length then [['+', after]] else []),
)
# Otherwise, the common substring is considered to have no change, and we recurse
# on the text before and after the substring
else
[].concat(
diff(before[...subStartBefore], after[...subStartAfter]),
[['=', after[subStartAfter...subStartAfter + subLength]]],
diff(before[subStartBefore + subLength...], after[subStartAfter + subLength...])
)
# The below functions are intended for simple tests and experimentation; you will want to write more sophisticated wrapper functions for real use
stringDiff = (before, after) ->
# Returns the difference between the before and after strings when split on whitespace. Considers punctuation a part of the word
diff(before.split(/[ ]+/), after.split(/[ ]+/))
htmlDiff = (before, after) ->
# Returns the difference between two strings (as in stringDiff) in HTML format.
con =
'=': ((x) -> x),
'+': ((x) -> '<ins>' + x + '</ins>'),
'-': ((x) -> '<del>' + x + '</del>')
((con[a])(b.join ' ') for [a, b] in stringDiff(before, after)).join ' '
#Examples:
#console.log htmlDiff('The world is a tragedy to those who feel, but a comedy to those who think',
# 'Life is a tragedy for those who feel, and a comedy to those who think') # Horace Walpole
#console.log htmlDiff('I have often regretted my speech, never my silence',
# 'I have regretted my speech often, my silence never') # Xenocrates
| true | # Simple Diff for Python v0.1
# (C) PI:NAME:<NAME>END_PI 2008 <http://www.paulbutler.org/>
diff = (before, after) ->
# Find the differences between two lists. Returns a list of pairs, where the first value
# is in ['+','-','='] and represents an insertion, deletion, or no change for that list.
# The second value of the pair is the element.
# Build a hash map with elements from before as keys, and
# a list of indexes as values
ohash = {}
for val, i in before
if val not of ohash
ohash[val] = []
ohash[val].push i
# Find the largest substring common to before and after
lastRow = (0 for i in [0 ... before.length])
subStartBefore = subStartAfter = subLength = 0
for val, j in after
thisRow = (0 for i in [0 ... before.length])
for k in ohash[val] ? []
thisRow[k] = (if k and lastRow[k - 1] then 1 else 0) + 1
if thisRow[k] > subLength
subLength = thisRow[k]
subStartBefore = k - subLength + 1
subStartAfter = j - subLength + 1
lastRow = thisRow
# If no common substring is found, assume that an insert and
# delete has taken place
if subLength == 0
[].concat(
(if before.length then [['-', before]] else []),
(if after.length then [['+', after]] else []),
)
# Otherwise, the common substring is considered to have no change, and we recurse
# on the text before and after the substring
else
[].concat(
diff(before[...subStartBefore], after[...subStartAfter]),
[['=', after[subStartAfter...subStartAfter + subLength]]],
diff(before[subStartBefore + subLength...], after[subStartAfter + subLength...])
)
# The below functions are intended for simple tests and experimentation; you will want to write more sophisticated wrapper functions for real use
stringDiff = (before, after) ->
# Returns the difference between the before and after strings when split on whitespace. Considers punctuation a part of the word
diff(before.split(/[ ]+/), after.split(/[ ]+/))
htmlDiff = (before, after) ->
# Returns the difference between two strings (as in stringDiff) in HTML format.
con =
'=': ((x) -> x),
'+': ((x) -> '<ins>' + x + '</ins>'),
'-': ((x) -> '<del>' + x + '</del>')
((con[a])(b.join ' ') for [a, b] in stringDiff(before, after)).join ' '
#Examples:
#console.log htmlDiff('The world is a tragedy to those who feel, but a comedy to those who think',
# 'Life is a tragedy for those who feel, and a comedy to those who think') # Horace Walpole
#console.log htmlDiff('I have often regretted my speech, never my silence',
# 'I have regretted my speech often, my silence never') # Xenocrates
|
[
{
"context": "e\n banner: \"/* Chrome YouTube Resizer (c) Daniel Davison & Contributors (http://github.com/ddavison/chrome",
"end": 1618,
"score": 0.9995551109313965,
"start": 1604,
"tag": "NAME",
"value": "Daniel Davison"
},
{
"context": " Daniel Davison & Contributors (http://github.com/ddavison/chrome-youtube-resizer) */\"\n\n\n # zip up everyt",
"end": 1661,
"score": 0.8349013328552246,
"start": 1653,
"tag": "USERNAME",
"value": "ddavison"
}
] | Gruntfile.coffee | ddavison/chrome-youtube-resizer | 3 | module.exports = (grunt) ->
grunt.initConfig(
pkg: grunt.file.readJSON("package.json")
srcDir: "./src"
srcDirScss: "<%= srcDir %>/scss"
srcDirCoffee: "<%= srcDir %>/coffee"
srcDirImages: "<%= srcDir %>/images"
outputDir: "./dist"
cssOutput: "<%= outputDir %>/css"
jsOutput: "<%= outputDir %>/js"
imagesOutput: "<%= outputDir %>/images"
cssRequestPath: "/css"
jsRequestPath: "/js"
compass:
dist:
options:
sassDir: "<%= srcDirScss %>"
cssDir: "<%= cssOutput %>"
outputStyle: 'compact'
coffee:
production:
expand:true
cwd: "<%= srcDir %>"
src: ["**/*.coffee"]
dest: "<%= jsOutput %>"
ext: ".js"
watch:
coffee:
files: "<%= srcDirCoffee %>/**/*.coffee"
tasks: ["coffee:development"]
css:
files: "<%= srcDirScss %>/**/*.scss"
tasks: ["compass:dist"]
copy:
manifest:
files: [{
expand: true,
src: ['manifest.json'],
dest: '<%= outputDir %>'
}
]
images:
files: [{
expand: true
cwd: '<%= srcDirImages %>/',
src: ['*'],
dest: '<%= imagesOutput %>/'
}]
# third_party:
# files: [{
# expand: true,
# src: ['third-party/**'],
# dest: '<%= outputDir %>'
# }]
uglify:
minify:
files: [
"<%= outputDir %>/js/main.js"
]
options:
compress: true
banner: "/* Chrome YouTube Resizer (c) Daniel Davison & Contributors (http://github.com/ddavison/chrome-youtube-resizer) */"
# zip up everything for release
compress:
extension:
options:
mode: 'zip'
archive: '<%= pkg.name %>-<%= pkg.version %>.zip'
expand: true
src: ['**/*']
cwd: 'dist/'
clean: ["<%= outputDir %>"]
)
grunt.loadNpmTasks('grunt-contrib-haml')
grunt.loadNpmTasks('grunt-contrib-compass')
grunt.loadNpmTasks('grunt-contrib-coffee')
grunt.loadNpmTasks('grunt-contrib-clean')
grunt.loadNpmTasks('grunt-contrib-copy')
grunt.loadNpmTasks('grunt-contrib-uglify')
grunt.loadNpmTasks('grunt-shell')
grunt.loadNpmTasks('grunt-contrib-compress')
grunt.registerTask('default', [
'clean', # clean the distribution directory
'coffee:production', # compile the coffeescript
'compass:dist', # compile the sass
'copy:manifest', # copy the chrome manifest
'copy:images', # copy the png resize button
'copy:third_party', # copy all third party sources that are needed
])
grunt.registerTask('release', ->
grunt.task.run('default')
grunt.task.run('compress:extension')
)
| 8212 | module.exports = (grunt) ->
grunt.initConfig(
pkg: grunt.file.readJSON("package.json")
srcDir: "./src"
srcDirScss: "<%= srcDir %>/scss"
srcDirCoffee: "<%= srcDir %>/coffee"
srcDirImages: "<%= srcDir %>/images"
outputDir: "./dist"
cssOutput: "<%= outputDir %>/css"
jsOutput: "<%= outputDir %>/js"
imagesOutput: "<%= outputDir %>/images"
cssRequestPath: "/css"
jsRequestPath: "/js"
compass:
dist:
options:
sassDir: "<%= srcDirScss %>"
cssDir: "<%= cssOutput %>"
outputStyle: 'compact'
coffee:
production:
expand:true
cwd: "<%= srcDir %>"
src: ["**/*.coffee"]
dest: "<%= jsOutput %>"
ext: ".js"
watch:
coffee:
files: "<%= srcDirCoffee %>/**/*.coffee"
tasks: ["coffee:development"]
css:
files: "<%= srcDirScss %>/**/*.scss"
tasks: ["compass:dist"]
copy:
manifest:
files: [{
expand: true,
src: ['manifest.json'],
dest: '<%= outputDir %>'
}
]
images:
files: [{
expand: true
cwd: '<%= srcDirImages %>/',
src: ['*'],
dest: '<%= imagesOutput %>/'
}]
# third_party:
# files: [{
# expand: true,
# src: ['third-party/**'],
# dest: '<%= outputDir %>'
# }]
uglify:
minify:
files: [
"<%= outputDir %>/js/main.js"
]
options:
compress: true
banner: "/* Chrome YouTube Resizer (c) <NAME> & Contributors (http://github.com/ddavison/chrome-youtube-resizer) */"
# zip up everything for release
compress:
extension:
options:
mode: 'zip'
archive: '<%= pkg.name %>-<%= pkg.version %>.zip'
expand: true
src: ['**/*']
cwd: 'dist/'
clean: ["<%= outputDir %>"]
)
grunt.loadNpmTasks('grunt-contrib-haml')
grunt.loadNpmTasks('grunt-contrib-compass')
grunt.loadNpmTasks('grunt-contrib-coffee')
grunt.loadNpmTasks('grunt-contrib-clean')
grunt.loadNpmTasks('grunt-contrib-copy')
grunt.loadNpmTasks('grunt-contrib-uglify')
grunt.loadNpmTasks('grunt-shell')
grunt.loadNpmTasks('grunt-contrib-compress')
grunt.registerTask('default', [
'clean', # clean the distribution directory
'coffee:production', # compile the coffeescript
'compass:dist', # compile the sass
'copy:manifest', # copy the chrome manifest
'copy:images', # copy the png resize button
'copy:third_party', # copy all third party sources that are needed
])
grunt.registerTask('release', ->
grunt.task.run('default')
grunt.task.run('compress:extension')
)
| true | module.exports = (grunt) ->
grunt.initConfig(
pkg: grunt.file.readJSON("package.json")
srcDir: "./src"
srcDirScss: "<%= srcDir %>/scss"
srcDirCoffee: "<%= srcDir %>/coffee"
srcDirImages: "<%= srcDir %>/images"
outputDir: "./dist"
cssOutput: "<%= outputDir %>/css"
jsOutput: "<%= outputDir %>/js"
imagesOutput: "<%= outputDir %>/images"
cssRequestPath: "/css"
jsRequestPath: "/js"
compass:
dist:
options:
sassDir: "<%= srcDirScss %>"
cssDir: "<%= cssOutput %>"
outputStyle: 'compact'
coffee:
production:
expand:true
cwd: "<%= srcDir %>"
src: ["**/*.coffee"]
dest: "<%= jsOutput %>"
ext: ".js"
watch:
coffee:
files: "<%= srcDirCoffee %>/**/*.coffee"
tasks: ["coffee:development"]
css:
files: "<%= srcDirScss %>/**/*.scss"
tasks: ["compass:dist"]
copy:
manifest:
files: [{
expand: true,
src: ['manifest.json'],
dest: '<%= outputDir %>'
}
]
images:
files: [{
expand: true
cwd: '<%= srcDirImages %>/',
src: ['*'],
dest: '<%= imagesOutput %>/'
}]
# third_party:
# files: [{
# expand: true,
# src: ['third-party/**'],
# dest: '<%= outputDir %>'
# }]
uglify:
minify:
files: [
"<%= outputDir %>/js/main.js"
]
options:
compress: true
banner: "/* Chrome YouTube Resizer (c) PI:NAME:<NAME>END_PI & Contributors (http://github.com/ddavison/chrome-youtube-resizer) */"
# zip up everything for release
compress:
extension:
options:
mode: 'zip'
archive: '<%= pkg.name %>-<%= pkg.version %>.zip'
expand: true
src: ['**/*']
cwd: 'dist/'
clean: ["<%= outputDir %>"]
)
grunt.loadNpmTasks('grunt-contrib-haml')
grunt.loadNpmTasks('grunt-contrib-compass')
grunt.loadNpmTasks('grunt-contrib-coffee')
grunt.loadNpmTasks('grunt-contrib-clean')
grunt.loadNpmTasks('grunt-contrib-copy')
grunt.loadNpmTasks('grunt-contrib-uglify')
grunt.loadNpmTasks('grunt-shell')
grunt.loadNpmTasks('grunt-contrib-compress')
grunt.registerTask('default', [
'clean', # clean the distribution directory
'coffee:production', # compile the coffeescript
'compass:dist', # compile the sass
'copy:manifest', # copy the chrome manifest
'copy:images', # copy the png resize button
'copy:third_party', # copy all third party sources that are needed
])
grunt.registerTask('release', ->
grunt.task.run('default')
grunt.task.run('compress:extension')
)
|
[
{
"context": ": @authenticityToken\n 'user[username]': @user\n 'user[password]': @password\n '",
"end": 6390,
"score": 0.9994869232177734,
"start": 6385,
"tag": "USERNAME",
"value": "@user"
},
{
"context": "sername]': @user\n 'user[password]': @password\n 'user[remember_me]': 0\n 'utf8': ",
"end": 6430,
"score": 0.9957892894744873,
"start": 6421,
"tag": "PASSWORD",
"value": "@password"
}
] | src/diasp_agent.coffee | oz/diasp_agent | 1 | http = require 'http'
https = require 'https'
querystring = require 'querystring'
jsdom = require 'jsdom'
_ = require 'underscore'
fs = require 'fs'
path = require 'path'
bundleDir = path.dirname(module.filename)
bundle = fs.readFileSync("#{bundleDir}/ender.min.js").toString()
# EvilAgent
# =========
#
# EvilAgent performs HTTP requests like a true villain.
#
# It (mostly) wraps `jsdom` around node's native `http` module to
# automatically parse the HTTP responses (when they're some kind of HTML
# soup), and frees one from the pain of managing boring things like;
#
# * Cookies
# * DOM parsing (using jsdom)
# * parameter encoding
# * etc.
#
class EvilAgent
constructor: (server, options = {}) ->
server.ssl = server.port == 443
@cookies = {}
# Clone the received server param, and extend w/ other options.
@options = _.extend (_.extend {}, server), options
# Performs the HTTP request, receives a callback called when the
# request is completed. The callback in turn must receive 3 parameters that
# are:
#
# * the native `HTTPResponse` object,
# * the response body, a `String`,
# * the parsed DOM, from jsdom.
domReady: (fun) ->
@performQuery @options, (res, body) ->
jsdom.env
html: body
src: [ bundle ]
done: (errors, window) ->
fun res, body, window
# Performs the HTTP request, receives a callback called when the
# request is completed. The callback in turn must receive 2 parameters
# that are:
#
# * the native `HTTPResponse` object,
# * the response body, a `String`.
#
# Note that the received callback will be called *regardless of the
# HTTP status code*!
complete: (fun) -> @performQuery @options, fun
# Update @options
update: (opts) -> @options = _.extend @options, opts
# This is where the HTTP requests is made.
performQuery: (options, cb) =>
options.headers ?= {}
body = ''
payload = null
client = if options.ssl is true then https else http
# Fetch every cookies from the cookie jar.
cookie = @__cookiesToString()
if cookie then options.headers['Cookie'] = cookie
# When sending a query with data, set the content-length, and
# content-type headers: e.g. POST requests.
if options.payload and options.method != 'GET'
_.extend options.headers,
'Content-Type': 'application/x-www-form-urlencoded'
'Content-Length': options.payload.length
req = client.request options, (res) =>
res.setEncoding 'utf8'
# Update the agent's cookie jar
@__updateCookieJar res.headers['set-cookie']
# Store response in a String (not a buffer)
res.on 'data', (chunk) -> body += chunk
res.on 'end', () -> cb res, body
req.on 'error', (e) -> throw e
# Send POST data if it was set.
req.write options.payload if options.payload?
req.end()
# Set POST data
postData: (data) ->
if typeof data == 'string'
@options.payload = data
else
@options.payload = querystring.stringify data
# Cookie jar accessors
setCookie: (key, value) -> @cookies[key] = value
getCookie: (name) -> @cookies[name]
# Updates the internal cookie jar with the cookies received in an
# HTTPRequest response.
__updateCookieJar: (cookies) ->
_.extend @cookies, @__cookiesToObject cookies
# Get all the cookies received by the latest response, and format them
# to be sent in a 'Cookie' header.
__cookiesToString: () -> _.map(@cookies, (v, k) -> "#{ k }=#{ v }").join ';'
# Save each received cookie in a response to the @cookies jar.
__cookiesToObject: (cookies) =>
iterator = (memo, cookie) ->
[key, value] = cookie.split(';')[0].split '=', 2
memo[key] = value
memo
_.reduce cookies, iterator, {}
# DiasporaAgent
# =============
#
# DiasporaAgent is a boring web scraper using EvilAgent to do things
# like signing-in a D* node using... usernames and passwords. My bad.
#
# However, DiasporaAgent can pretty much act like a normal web client:
#
# * post public messages,
# * fetch your stream,
# * etc.
#
# Limitations, evolutions
# -----------------------
#
# There is no public resource on D* to fetch a complete list of one's
# existing aspects ; as a consequence DiasporaAgent does not provide you
# with the tools to post a message to a specific aspect. So far!
#
class DiasporaAgent
constructor: (@address, @password, @opts = {}) ->
[@user, @host] = @address.split '@', 2
port = if @opts.ssl? and @opts.ssl == false then 80 else 443
port = @opts.port if @opts.port
@server =
host: @host
port: port
# Rails' anti-CSRF token
@authenticityToken = null
# Session cookie name
@__sessionCookieName = '_diaspora_session'
# Session cookie value
@sessionCookie = null
# Poor man's flag to indicate the need to call startSession
@__sessionInitialized = false
# Start the D* session: grab an authentication-token from an HTML
# form, and register a session cookie from the HTTP responses...
startSession: (cb) ->
# Avoid initializing twice: if we've initialized our agent once,
# then just call the received callback.
return cb() if @__sessionInitialized
@__sessionInitialized = true
# Get the sign-in page...
req = @get '/users/sign_in'
# The HTML is parsed, and injected with Qwery and Bonzo to allow DOM
# searches & other manipulations.
req.domReady (res, body, dom) =>
@authenticityToken = dom.$("meta[name=csrf-token]").attr 'content'
@sessionCookie = req.getCookie @__sessionCookieName
throw "Failed to find authenticity-token" unless @authenticityToken?
throw "Failed to init. session cookie" unless @sessionCookie?
# Once the page has been parsed, and we have saved the current
# authenticity token, and session-cookie, call the received
# callback.
cb()
# Sign in against the D* node, before calling the received callback.
login: (cb) ->
# Ensure we have a running session to get the initial anti-CSRF
# token, and a session cookie.
@startSession =>
# POST the username and password to the D* sign-in URL in order to
# create a new user-session.
req = @post '/users/sign_in'
req.postData
'authenticity_token': @authenticityToken
'user[username]': @user
'user[password]': @password
'user[remember_me]': 0
'utf8': '✓'
req.complete (res, body) =>
@sessionCookie = req.getCookie @__sessionCookieName
# After sign-up, we should get redirected to the homepage...
# XXX check res.location
return cb message: 'Login failed' unless res.statusCode == 302
cb null
# ... which in turn redirects to the `/stream` page... Let's try
# to load a faster page instead, because we need to update the
# authenticity token & session cookie anyway.
#redir = @get '/activity'
#redir.domReady (res, body, dom) =>
#@sessionCookie = redir.getCookie @__sessionCookieName
#@authenticityToken = dom.$("meta[name=csrf-token]").attr 'content'
#cb null
# Post a public message to the connected D* account
publicMessage: (text, cb) ->
# Build a POST request for `/status_messages`.
req = @post '/status_messages', 'Accept': 'application/json, text/javascript'
req.postData 'status_message[text]': text, 'aspect_ids': 'public'
req.complete (res, body) ->
@sessionCookie = req.getCookie @__sessionCookieName
# When a message resource is successfuly created, the response
# code should be 201...
if res.statusCode == 201
cb null, JSON.parse body
else
cb {message: 'Message posting failed'}, null
# `get` and `post` are wrappers for `__createRequest` to build GET and
# POST HTTP requests.
get: (path, headers = {}) -> @__createRequest 'GET', path, headers
post: (path, headers = {}) -> @__createRequest 'POST', path, headers
# Get the connected user's stream page
stream: (cb) ->
@get('/stream.json').complete (res, body) ->
cb {message: "Could not get the stream (#{ res.statusCode }) "} unless res.statusCode is 200
cb null, JSON.parse body
# Get the connected user's aspects page
aspects: (cb) ->
@get('/aspects.json').complete (res, body) ->
cb {message: "Could not get the aspects (#{ res.statusCode }) "} unless res.statusCode is 200
cb null, JSON.parse body
# Create a new HTTP Request, whith the CSRF-Token and session cookie set.
__createRequest: (method, path, headers={}) =>
opts =
method: method
path: path
opts.headers = 'X-CSRF-Token': @authenticityToken if @authenticityToken
_.extend opts.headers, headers
req = new EvilAgent @server, opts
req.setCookie @__sessionCookieName, @sessionCookie
# Return directly a new EvilAgent instance.
req
# Extract the value of a cookie against a list of received cookies. If
# the cookie is not found, null is returned.
__extractCookieValue: (cookies, name) ->
find_name = (m, x) ->
[key, value] = x.split(';')[0].split '=', 2
m = value if name == key
_.reduce cookies, find_name, null
module.exports = DiasporaAgent
| 153191 | http = require 'http'
https = require 'https'
querystring = require 'querystring'
jsdom = require 'jsdom'
_ = require 'underscore'
fs = require 'fs'
path = require 'path'
bundleDir = path.dirname(module.filename)
bundle = fs.readFileSync("#{bundleDir}/ender.min.js").toString()
# EvilAgent
# =========
#
# EvilAgent performs HTTP requests like a true villain.
#
# It (mostly) wraps `jsdom` around node's native `http` module to
# automatically parse the HTTP responses (when they're some kind of HTML
# soup), and frees one from the pain of managing boring things like;
#
# * Cookies
# * DOM parsing (using jsdom)
# * parameter encoding
# * etc.
#
class EvilAgent
constructor: (server, options = {}) ->
server.ssl = server.port == 443
@cookies = {}
# Clone the received server param, and extend w/ other options.
@options = _.extend (_.extend {}, server), options
# Performs the HTTP request, receives a callback called when the
# request is completed. The callback in turn must receive 3 parameters that
# are:
#
# * the native `HTTPResponse` object,
# * the response body, a `String`,
# * the parsed DOM, from jsdom.
domReady: (fun) ->
@performQuery @options, (res, body) ->
jsdom.env
html: body
src: [ bundle ]
done: (errors, window) ->
fun res, body, window
# Performs the HTTP request, receives a callback called when the
# request is completed. The callback in turn must receive 2 parameters
# that are:
#
# * the native `HTTPResponse` object,
# * the response body, a `String`.
#
# Note that the received callback will be called *regardless of the
# HTTP status code*!
complete: (fun) -> @performQuery @options, fun
# Update @options
update: (opts) -> @options = _.extend @options, opts
# This is where the HTTP requests is made.
performQuery: (options, cb) =>
options.headers ?= {}
body = ''
payload = null
client = if options.ssl is true then https else http
# Fetch every cookies from the cookie jar.
cookie = @__cookiesToString()
if cookie then options.headers['Cookie'] = cookie
# When sending a query with data, set the content-length, and
# content-type headers: e.g. POST requests.
if options.payload and options.method != 'GET'
_.extend options.headers,
'Content-Type': 'application/x-www-form-urlencoded'
'Content-Length': options.payload.length
req = client.request options, (res) =>
res.setEncoding 'utf8'
# Update the agent's cookie jar
@__updateCookieJar res.headers['set-cookie']
# Store response in a String (not a buffer)
res.on 'data', (chunk) -> body += chunk
res.on 'end', () -> cb res, body
req.on 'error', (e) -> throw e
# Send POST data if it was set.
req.write options.payload if options.payload?
req.end()
# Set POST data
postData: (data) ->
if typeof data == 'string'
@options.payload = data
else
@options.payload = querystring.stringify data
# Cookie jar accessors
setCookie: (key, value) -> @cookies[key] = value
getCookie: (name) -> @cookies[name]
# Updates the internal cookie jar with the cookies received in an
# HTTPRequest response.
__updateCookieJar: (cookies) ->
_.extend @cookies, @__cookiesToObject cookies
# Get all the cookies received by the latest response, and format them
# to be sent in a 'Cookie' header.
__cookiesToString: () -> _.map(@cookies, (v, k) -> "#{ k }=#{ v }").join ';'
# Save each received cookie in a response to the @cookies jar.
__cookiesToObject: (cookies) =>
iterator = (memo, cookie) ->
[key, value] = cookie.split(';')[0].split '=', 2
memo[key] = value
memo
_.reduce cookies, iterator, {}
# DiasporaAgent
# =============
#
# DiasporaAgent is a boring web scraper using EvilAgent to do things
# like signing-in a D* node using... usernames and passwords. My bad.
#
# However, DiasporaAgent can pretty much act like a normal web client:
#
# * post public messages,
# * fetch your stream,
# * etc.
#
# Limitations, evolutions
# -----------------------
#
# There is no public resource on D* to fetch a complete list of one's
# existing aspects ; as a consequence DiasporaAgent does not provide you
# with the tools to post a message to a specific aspect. So far!
#
class DiasporaAgent
constructor: (@address, @password, @opts = {}) ->
[@user, @host] = @address.split '@', 2
port = if @opts.ssl? and @opts.ssl == false then 80 else 443
port = @opts.port if @opts.port
@server =
host: @host
port: port
# Rails' anti-CSRF token
@authenticityToken = null
# Session cookie name
@__sessionCookieName = '_diaspora_session'
# Session cookie value
@sessionCookie = null
# Poor man's flag to indicate the need to call startSession
@__sessionInitialized = false
# Start the D* session: grab an authentication-token from an HTML
# form, and register a session cookie from the HTTP responses...
startSession: (cb) ->
# Avoid initializing twice: if we've initialized our agent once,
# then just call the received callback.
return cb() if @__sessionInitialized
@__sessionInitialized = true
# Get the sign-in page...
req = @get '/users/sign_in'
# The HTML is parsed, and injected with Qwery and Bonzo to allow DOM
# searches & other manipulations.
req.domReady (res, body, dom) =>
@authenticityToken = dom.$("meta[name=csrf-token]").attr 'content'
@sessionCookie = req.getCookie @__sessionCookieName
throw "Failed to find authenticity-token" unless @authenticityToken?
throw "Failed to init. session cookie" unless @sessionCookie?
# Once the page has been parsed, and we have saved the current
# authenticity token, and session-cookie, call the received
# callback.
cb()
# Sign in against the D* node, before calling the received callback.
login: (cb) ->
# Ensure we have a running session to get the initial anti-CSRF
# token, and a session cookie.
@startSession =>
# POST the username and password to the D* sign-in URL in order to
# create a new user-session.
req = @post '/users/sign_in'
req.postData
'authenticity_token': @authenticityToken
'user[username]': @user
'user[password]': <PASSWORD>
'user[remember_me]': 0
'utf8': '✓'
req.complete (res, body) =>
@sessionCookie = req.getCookie @__sessionCookieName
# After sign-up, we should get redirected to the homepage...
# XXX check res.location
return cb message: 'Login failed' unless res.statusCode == 302
cb null
# ... which in turn redirects to the `/stream` page... Let's try
# to load a faster page instead, because we need to update the
# authenticity token & session cookie anyway.
#redir = @get '/activity'
#redir.domReady (res, body, dom) =>
#@sessionCookie = redir.getCookie @__sessionCookieName
#@authenticityToken = dom.$("meta[name=csrf-token]").attr 'content'
#cb null
# Post a public message to the connected D* account
publicMessage: (text, cb) ->
# Build a POST request for `/status_messages`.
req = @post '/status_messages', 'Accept': 'application/json, text/javascript'
req.postData 'status_message[text]': text, 'aspect_ids': 'public'
req.complete (res, body) ->
@sessionCookie = req.getCookie @__sessionCookieName
# When a message resource is successfuly created, the response
# code should be 201...
if res.statusCode == 201
cb null, JSON.parse body
else
cb {message: 'Message posting failed'}, null
# `get` and `post` are wrappers for `__createRequest` to build GET and
# POST HTTP requests.
get: (path, headers = {}) -> @__createRequest 'GET', path, headers
post: (path, headers = {}) -> @__createRequest 'POST', path, headers
# Get the connected user's stream page
stream: (cb) ->
@get('/stream.json').complete (res, body) ->
cb {message: "Could not get the stream (#{ res.statusCode }) "} unless res.statusCode is 200
cb null, JSON.parse body
# Get the connected user's aspects page
aspects: (cb) ->
@get('/aspects.json').complete (res, body) ->
cb {message: "Could not get the aspects (#{ res.statusCode }) "} unless res.statusCode is 200
cb null, JSON.parse body
# Create a new HTTP Request, whith the CSRF-Token and session cookie set.
__createRequest: (method, path, headers={}) =>
opts =
method: method
path: path
opts.headers = 'X-CSRF-Token': @authenticityToken if @authenticityToken
_.extend opts.headers, headers
req = new EvilAgent @server, opts
req.setCookie @__sessionCookieName, @sessionCookie
# Return directly a new EvilAgent instance.
req
# Extract the value of a cookie against a list of received cookies. If
# the cookie is not found, null is returned.
__extractCookieValue: (cookies, name) ->
find_name = (m, x) ->
[key, value] = x.split(';')[0].split '=', 2
m = value if name == key
_.reduce cookies, find_name, null
module.exports = DiasporaAgent
| true | http = require 'http'
https = require 'https'
querystring = require 'querystring'
jsdom = require 'jsdom'
_ = require 'underscore'
fs = require 'fs'
path = require 'path'
bundleDir = path.dirname(module.filename)
bundle = fs.readFileSync("#{bundleDir}/ender.min.js").toString()
# EvilAgent
# =========
#
# EvilAgent performs HTTP requests like a true villain.
#
# It (mostly) wraps `jsdom` around node's native `http` module to
# automatically parse the HTTP responses (when they're some kind of HTML
# soup), and frees one from the pain of managing boring things like;
#
# * Cookies
# * DOM parsing (using jsdom)
# * parameter encoding
# * etc.
#
class EvilAgent
constructor: (server, options = {}) ->
server.ssl = server.port == 443
@cookies = {}
# Clone the received server param, and extend w/ other options.
@options = _.extend (_.extend {}, server), options
# Performs the HTTP request, receives a callback called when the
# request is completed. The callback in turn must receive 3 parameters that
# are:
#
# * the native `HTTPResponse` object,
# * the response body, a `String`,
# * the parsed DOM, from jsdom.
domReady: (fun) ->
@performQuery @options, (res, body) ->
jsdom.env
html: body
src: [ bundle ]
done: (errors, window) ->
fun res, body, window
# Performs the HTTP request, receives a callback called when the
# request is completed. The callback in turn must receive 2 parameters
# that are:
#
# * the native `HTTPResponse` object,
# * the response body, a `String`.
#
# Note that the received callback will be called *regardless of the
# HTTP status code*!
complete: (fun) -> @performQuery @options, fun
# Update @options
update: (opts) -> @options = _.extend @options, opts
# This is where the HTTP requests is made.
performQuery: (options, cb) =>
options.headers ?= {}
body = ''
payload = null
client = if options.ssl is true then https else http
# Fetch every cookies from the cookie jar.
cookie = @__cookiesToString()
if cookie then options.headers['Cookie'] = cookie
# When sending a query with data, set the content-length, and
# content-type headers: e.g. POST requests.
if options.payload and options.method != 'GET'
_.extend options.headers,
'Content-Type': 'application/x-www-form-urlencoded'
'Content-Length': options.payload.length
req = client.request options, (res) =>
res.setEncoding 'utf8'
# Update the agent's cookie jar
@__updateCookieJar res.headers['set-cookie']
# Store response in a String (not a buffer)
res.on 'data', (chunk) -> body += chunk
res.on 'end', () -> cb res, body
req.on 'error', (e) -> throw e
# Send POST data if it was set.
req.write options.payload if options.payload?
req.end()
# Set POST data
postData: (data) ->
if typeof data == 'string'
@options.payload = data
else
@options.payload = querystring.stringify data
# Cookie jar accessors
setCookie: (key, value) -> @cookies[key] = value
getCookie: (name) -> @cookies[name]
# Updates the internal cookie jar with the cookies received in an
# HTTPRequest response.
__updateCookieJar: (cookies) ->
_.extend @cookies, @__cookiesToObject cookies
# Get all the cookies received by the latest response, and format them
# to be sent in a 'Cookie' header.
__cookiesToString: () -> _.map(@cookies, (v, k) -> "#{ k }=#{ v }").join ';'
# Save each received cookie in a response to the @cookies jar.
__cookiesToObject: (cookies) =>
iterator = (memo, cookie) ->
[key, value] = cookie.split(';')[0].split '=', 2
memo[key] = value
memo
_.reduce cookies, iterator, {}
# DiasporaAgent
# =============
#
# DiasporaAgent is a boring web scraper using EvilAgent to do things
# like signing-in a D* node using... usernames and passwords. My bad.
#
# However, DiasporaAgent can pretty much act like a normal web client:
#
# * post public messages,
# * fetch your stream,
# * etc.
#
# Limitations, evolutions
# -----------------------
#
# There is no public resource on D* to fetch a complete list of one's
# existing aspects ; as a consequence DiasporaAgent does not provide you
# with the tools to post a message to a specific aspect. So far!
#
class DiasporaAgent
constructor: (@address, @password, @opts = {}) ->
[@user, @host] = @address.split '@', 2
port = if @opts.ssl? and @opts.ssl == false then 80 else 443
port = @opts.port if @opts.port
@server =
host: @host
port: port
# Rails' anti-CSRF token
@authenticityToken = null
# Session cookie name
@__sessionCookieName = '_diaspora_session'
# Session cookie value
@sessionCookie = null
# Poor man's flag to indicate the need to call startSession
@__sessionInitialized = false
# Start the D* session: grab an authentication-token from an HTML
# form, and register a session cookie from the HTTP responses...
startSession: (cb) ->
# Avoid initializing twice: if we've initialized our agent once,
# then just call the received callback.
return cb() if @__sessionInitialized
@__sessionInitialized = true
# Get the sign-in page...
req = @get '/users/sign_in'
# The HTML is parsed, and injected with Qwery and Bonzo to allow DOM
# searches & other manipulations.
req.domReady (res, body, dom) =>
@authenticityToken = dom.$("meta[name=csrf-token]").attr 'content'
@sessionCookie = req.getCookie @__sessionCookieName
throw "Failed to find authenticity-token" unless @authenticityToken?
throw "Failed to init. session cookie" unless @sessionCookie?
# Once the page has been parsed, and we have saved the current
# authenticity token, and session-cookie, call the received
# callback.
cb()
# Sign in against the D* node, before calling the received callback.
login: (cb) ->
# Ensure we have a running session to get the initial anti-CSRF
# token, and a session cookie.
@startSession =>
# POST the username and password to the D* sign-in URL in order to
# create a new user-session.
req = @post '/users/sign_in'
req.postData
'authenticity_token': @authenticityToken
'user[username]': @user
'user[password]': PI:PASSWORD:<PASSWORD>END_PI
'user[remember_me]': 0
'utf8': '✓'
req.complete (res, body) =>
@sessionCookie = req.getCookie @__sessionCookieName
# After sign-up, we should get redirected to the homepage...
# XXX check res.location
return cb message: 'Login failed' unless res.statusCode == 302
cb null
# ... which in turn redirects to the `/stream` page... Let's try
# to load a faster page instead, because we need to update the
# authenticity token & session cookie anyway.
#redir = @get '/activity'
#redir.domReady (res, body, dom) =>
#@sessionCookie = redir.getCookie @__sessionCookieName
#@authenticityToken = dom.$("meta[name=csrf-token]").attr 'content'
#cb null
# Post a public message to the connected D* account
publicMessage: (text, cb) ->
# Build a POST request for `/status_messages`.
req = @post '/status_messages', 'Accept': 'application/json, text/javascript'
req.postData 'status_message[text]': text, 'aspect_ids': 'public'
req.complete (res, body) ->
@sessionCookie = req.getCookie @__sessionCookieName
# When a message resource is successfuly created, the response
# code should be 201...
if res.statusCode == 201
cb null, JSON.parse body
else
cb {message: 'Message posting failed'}, null
# `get` and `post` are wrappers for `__createRequest` to build GET and
# POST HTTP requests.
get: (path, headers = {}) -> @__createRequest 'GET', path, headers
post: (path, headers = {}) -> @__createRequest 'POST', path, headers
# Get the connected user's stream page
stream: (cb) ->
@get('/stream.json').complete (res, body) ->
cb {message: "Could not get the stream (#{ res.statusCode }) "} unless res.statusCode is 200
cb null, JSON.parse body
# Get the connected user's aspects page
aspects: (cb) ->
@get('/aspects.json').complete (res, body) ->
cb {message: "Could not get the aspects (#{ res.statusCode }) "} unless res.statusCode is 200
cb null, JSON.parse body
# Create a new HTTP Request, whith the CSRF-Token and session cookie set.
__createRequest: (method, path, headers={}) =>
opts =
method: method
path: path
opts.headers = 'X-CSRF-Token': @authenticityToken if @authenticityToken
_.extend opts.headers, headers
req = new EvilAgent @server, opts
req.setCookie @__sessionCookieName, @sessionCookie
# Return directly a new EvilAgent instance.
req
# Extract the value of a cookie against a list of received cookies. If
# the cookie is not found, null is returned.
__extractCookieValue: (cookies, name) ->
find_name = (m, x) ->
[key, value] = x.split(';')[0].split '=', 2
m = value if name == key
_.reduce cookies, find_name, null
module.exports = DiasporaAgent
|
[
{
"context": "tle', ->\n artist = new Backbone.Model name: 'Foo Bar'\n @lot.toPageTitle().should.equal 'Auction R",
"end": 603,
"score": 0.9994366765022278,
"start": 596,
"tag": "NAME",
"value": "Foo Bar"
},
{
"context": "ay 23, 2012 | Artsy'\n @lot.set artist_name: 'Foo Bar'\n @lot.toPageTitle(artist).should.equal 'Auc",
"end": 761,
"score": 0.9994670152664185,
"start": 754,
"tag": "NAME",
"value": "Foo Bar"
}
] | src/desktop/test/models/auction_lot.test.coffee | jo-rs/force | 0 | _ = require 'underscore'
sinon = require 'sinon'
should = require 'should'
Backbone = require 'backbone'
{ fabricate } = require '@artsy/antigravity'
AuctionLot = require '../../models/auction_lot'
describe 'AuctionLot', ->
before ->
@lot = new AuctionLot fabricate 'auction_result'
describe '#href', ->
it 'returns a URL to the auction lot', ->
@lot.href(new Backbone.Model(id: 'foo-bar')).should.equal "/artist/foo-bar/auction-result/#{@lot.id}"
describe '#toPageTitle', ->
it 'returns a string usable for the page title', ->
artist = new Backbone.Model name: 'Foo Bar'
@lot.toPageTitle().should.equal 'Auction Result for \"MADONNA PAINTING\" (1985) | Lempertz, May 23, 2012 | Artsy'
@lot.set artist_name: 'Foo Bar'
@lot.toPageTitle(artist).should.equal 'Auction Result for \"MADONNA PAINTING\" (1985) by Foo Bar | Lempertz, May 23, 2012 | Artsy'
describe '#toPageDescription', ->
it 'returns a string usable for the page description', ->
@lot.toPageDescription().should.equal 'Screenprint on canvas, 20.1 × 15.9 in. Estimate €120,000 - 160,000 from Lempertz on May 23, 2012. Find auction estimate and sale price, and research more auction results from top auction houses.'
describe '#hasDimensions', ->
it 'returns true if there is any dimension attributes present', ->
@lot.hasDimensions().should.be.ok()
@lot.unset 'dimensions'
@lot.hasDimensions().should.not.be.ok()
@lot.set 'dimensions', 'foobar'
@lot.hasDimensions().should.not.be.ok()
@lot.set 'dimensions', in: 'foo'
@lot.hasDimensions().should.be.ok()
@lot.set 'dimensions', cm: 'foo'
@lot.hasDimensions().should.be.ok()
| 79133 | _ = require 'underscore'
sinon = require 'sinon'
should = require 'should'
Backbone = require 'backbone'
{ fabricate } = require '@artsy/antigravity'
AuctionLot = require '../../models/auction_lot'
describe 'AuctionLot', ->
before ->
@lot = new AuctionLot fabricate 'auction_result'
describe '#href', ->
it 'returns a URL to the auction lot', ->
@lot.href(new Backbone.Model(id: 'foo-bar')).should.equal "/artist/foo-bar/auction-result/#{@lot.id}"
describe '#toPageTitle', ->
it 'returns a string usable for the page title', ->
artist = new Backbone.Model name: '<NAME>'
@lot.toPageTitle().should.equal 'Auction Result for \"MADONNA PAINTING\" (1985) | Lempertz, May 23, 2012 | Artsy'
@lot.set artist_name: '<NAME>'
@lot.toPageTitle(artist).should.equal 'Auction Result for \"MADONNA PAINTING\" (1985) by Foo Bar | Lempertz, May 23, 2012 | Artsy'
describe '#toPageDescription', ->
it 'returns a string usable for the page description', ->
@lot.toPageDescription().should.equal 'Screenprint on canvas, 20.1 × 15.9 in. Estimate €120,000 - 160,000 from Lempertz on May 23, 2012. Find auction estimate and sale price, and research more auction results from top auction houses.'
describe '#hasDimensions', ->
it 'returns true if there is any dimension attributes present', ->
@lot.hasDimensions().should.be.ok()
@lot.unset 'dimensions'
@lot.hasDimensions().should.not.be.ok()
@lot.set 'dimensions', 'foobar'
@lot.hasDimensions().should.not.be.ok()
@lot.set 'dimensions', in: 'foo'
@lot.hasDimensions().should.be.ok()
@lot.set 'dimensions', cm: 'foo'
@lot.hasDimensions().should.be.ok()
| true | _ = require 'underscore'
sinon = require 'sinon'
should = require 'should'
Backbone = require 'backbone'
{ fabricate } = require '@artsy/antigravity'
AuctionLot = require '../../models/auction_lot'
describe 'AuctionLot', ->
before ->
@lot = new AuctionLot fabricate 'auction_result'
describe '#href', ->
it 'returns a URL to the auction lot', ->
@lot.href(new Backbone.Model(id: 'foo-bar')).should.equal "/artist/foo-bar/auction-result/#{@lot.id}"
describe '#toPageTitle', ->
it 'returns a string usable for the page title', ->
artist = new Backbone.Model name: 'PI:NAME:<NAME>END_PI'
@lot.toPageTitle().should.equal 'Auction Result for \"MADONNA PAINTING\" (1985) | Lempertz, May 23, 2012 | Artsy'
@lot.set artist_name: 'PI:NAME:<NAME>END_PI'
@lot.toPageTitle(artist).should.equal 'Auction Result for \"MADONNA PAINTING\" (1985) by Foo Bar | Lempertz, May 23, 2012 | Artsy'
describe '#toPageDescription', ->
it 'returns a string usable for the page description', ->
@lot.toPageDescription().should.equal 'Screenprint on canvas, 20.1 × 15.9 in. Estimate €120,000 - 160,000 from Lempertz on May 23, 2012. Find auction estimate and sale price, and research more auction results from top auction houses.'
describe '#hasDimensions', ->
it 'returns true if there is any dimension attributes present', ->
@lot.hasDimensions().should.be.ok()
@lot.unset 'dimensions'
@lot.hasDimensions().should.not.be.ok()
@lot.set 'dimensions', 'foobar'
@lot.hasDimensions().should.not.be.ok()
@lot.set 'dimensions', in: 'foo'
@lot.hasDimensions().should.be.ok()
@lot.set 'dimensions', cm: 'foo'
@lot.hasDimensions().should.be.ok()
|
[
{
"context": "te in a clustered environ\n * @author Valiton GmbH, Bastian \"hereandnow\" Behrens\n###\n\n###*\n * Standard librar",
"end": 123,
"score": 0.9998583793640137,
"start": 116,
"tag": "NAME",
"value": "Bastian"
},
{
"context": "ustered environ\n * @author Valiton GmbH, Bastian \"hereandnow\" Behrens\n###\n\n###*\n * Standard library imports\n##",
"end": 135,
"score": 0.9011754989624023,
"start": 125,
"tag": "USERNAME",
"value": "hereandnow"
},
{
"context": "viron\n * @author Valiton GmbH, Bastian \"hereandnow\" Behrens\n###\n\n###*\n * Standard library imports\n###\nEventEm",
"end": 144,
"score": 0.9000790119171143,
"start": 137,
"tag": "NAME",
"value": "Behrens"
}
] | src/msghub.coffee | valiton/node-msghub | 1 | ###*
* @name msghub
* @description A simple msghub to communicate in a clustered environ
* @author Valiton GmbH, Bastian "hereandnow" Behrens
###
###*
* Standard library imports
###
EventEmitter = require('events').EventEmitter
cluster = require 'cluster'
###*
* 3rd library imports
###
SimplePool = require 'simple-pool'
class Msghub extends EventEmitter
###*
* send to the master
*
* @param {array} args where args[0] is the event and args[1] is the message
* @param {string} to which is one of all|get|random
* @private
###
_send = (args, to) ->
process.nextTick ->
msg = if args.length is 2 then args[1] else args[1..]
process.send type: 'msghub', event: args[0], msg: msg, to: to
_distribute = (msg) ->
to = @pool[msg.event][msg.to]()
return to.send msg unless Array.isArray to
to.forEach (_worker) ->
_worker.send msg
###*
* when a new Listener is added, put it into the SimplePool
*
* @param {array} args where args[0] is the event and args[1] is the message
* @param {string} to which is one of all|get|random
* @private
###
_addListener = (msg) ->
for id, worker of cluster.workers
if worker.process.pid is msg.pid
@pool[msg.listener] or= new SimplePool()
@pool[msg.listener].add worker
###*
* create a new Msghub instance
*
* @memberOf global
*
* @constructor
* @this {Msghub}
###
constructor: ->
if cluster.isMaster
@listeners = {}
@pool = {}
process.nextTick =>
for id, worker of cluster.workers
worker.on 'message', (msg) =>
return _addListener.call this, msg if msg.type is 'msghubListener'
if msg.type is 'msghub'
unless @pool[msg.event]?
return setTimeout =>
_distribute.call this, msg
, 2000
_distribute.call this, msg
else
@setMaxListeners 0
@on 'newListener', (listener) ->
process.send type: 'msghubListener', listener: listener, pid: process.pid
process.on 'message', (msg) =>
return if msg.type isnt 'msghub'
@emit msg.event, msg.msg
###*
* send a message to all workers who binded to the given event
*
* @param {string} event send only to workers which binded to this event
* @param {*} message the message which should be sent
* @function global.Msghub.prototype.send
###
send: (args...) -> _send args, 'all'
###*
* send a message to a worker who binded to the given event in a roundrobin manner
*
* @param {string} event consider only workers which binded to this event
* @param {*} message the message which should be sent
* @function global.Msghub.prototype.send
###
roundrobin: (args...) -> _send args, 'get'
###*
* send a message to a worker who binded to the given event in a random manner
*
* @param {string} event consider only workers which binded to this event
* @param {*} message the message which should be sent
* @function global.Msghub.prototype.send
###
random: (args...) -> _send args, 'random'
module.exports = new Msghub() | 178292 | ###*
* @name msghub
* @description A simple msghub to communicate in a clustered environ
* @author Valiton GmbH, <NAME> "hereandnow" <NAME>
###
###*
* Standard library imports
###
EventEmitter = require('events').EventEmitter
cluster = require 'cluster'
###*
* 3rd library imports
###
SimplePool = require 'simple-pool'
class Msghub extends EventEmitter
###*
* send to the master
*
* @param {array} args where args[0] is the event and args[1] is the message
* @param {string} to which is one of all|get|random
* @private
###
_send = (args, to) ->
process.nextTick ->
msg = if args.length is 2 then args[1] else args[1..]
process.send type: 'msghub', event: args[0], msg: msg, to: to
_distribute = (msg) ->
to = @pool[msg.event][msg.to]()
return to.send msg unless Array.isArray to
to.forEach (_worker) ->
_worker.send msg
###*
* when a new Listener is added, put it into the SimplePool
*
* @param {array} args where args[0] is the event and args[1] is the message
* @param {string} to which is one of all|get|random
* @private
###
_addListener = (msg) ->
for id, worker of cluster.workers
if worker.process.pid is msg.pid
@pool[msg.listener] or= new SimplePool()
@pool[msg.listener].add worker
###*
* create a new Msghub instance
*
* @memberOf global
*
* @constructor
* @this {Msghub}
###
constructor: ->
if cluster.isMaster
@listeners = {}
@pool = {}
process.nextTick =>
for id, worker of cluster.workers
worker.on 'message', (msg) =>
return _addListener.call this, msg if msg.type is 'msghubListener'
if msg.type is 'msghub'
unless @pool[msg.event]?
return setTimeout =>
_distribute.call this, msg
, 2000
_distribute.call this, msg
else
@setMaxListeners 0
@on 'newListener', (listener) ->
process.send type: 'msghubListener', listener: listener, pid: process.pid
process.on 'message', (msg) =>
return if msg.type isnt 'msghub'
@emit msg.event, msg.msg
###*
* send a message to all workers who binded to the given event
*
* @param {string} event send only to workers which binded to this event
* @param {*} message the message which should be sent
* @function global.Msghub.prototype.send
###
send: (args...) -> _send args, 'all'
###*
* send a message to a worker who binded to the given event in a roundrobin manner
*
* @param {string} event consider only workers which binded to this event
* @param {*} message the message which should be sent
* @function global.Msghub.prototype.send
###
roundrobin: (args...) -> _send args, 'get'
###*
* send a message to a worker who binded to the given event in a random manner
*
* @param {string} event consider only workers which binded to this event
* @param {*} message the message which should be sent
* @function global.Msghub.prototype.send
###
random: (args...) -> _send args, 'random'
module.exports = new Msghub() | true | ###*
* @name msghub
* @description A simple msghub to communicate in a clustered environ
* @author Valiton GmbH, PI:NAME:<NAME>END_PI "hereandnow" PI:NAME:<NAME>END_PI
###
###*
* Standard library imports
###
EventEmitter = require('events').EventEmitter
cluster = require 'cluster'
###*
* 3rd library imports
###
SimplePool = require 'simple-pool'
class Msghub extends EventEmitter
###*
* send to the master
*
* @param {array} args where args[0] is the event and args[1] is the message
* @param {string} to which is one of all|get|random
* @private
###
_send = (args, to) ->
process.nextTick ->
msg = if args.length is 2 then args[1] else args[1..]
process.send type: 'msghub', event: args[0], msg: msg, to: to
_distribute = (msg) ->
to = @pool[msg.event][msg.to]()
return to.send msg unless Array.isArray to
to.forEach (_worker) ->
_worker.send msg
###*
* when a new Listener is added, put it into the SimplePool
*
* @param {array} args where args[0] is the event and args[1] is the message
* @param {string} to which is one of all|get|random
* @private
###
_addListener = (msg) ->
for id, worker of cluster.workers
if worker.process.pid is msg.pid
@pool[msg.listener] or= new SimplePool()
@pool[msg.listener].add worker
###*
* create a new Msghub instance
*
* @memberOf global
*
* @constructor
* @this {Msghub}
###
constructor: ->
if cluster.isMaster
@listeners = {}
@pool = {}
process.nextTick =>
for id, worker of cluster.workers
worker.on 'message', (msg) =>
return _addListener.call this, msg if msg.type is 'msghubListener'
if msg.type is 'msghub'
unless @pool[msg.event]?
return setTimeout =>
_distribute.call this, msg
, 2000
_distribute.call this, msg
else
@setMaxListeners 0
@on 'newListener', (listener) ->
process.send type: 'msghubListener', listener: listener, pid: process.pid
process.on 'message', (msg) =>
return if msg.type isnt 'msghub'
@emit msg.event, msg.msg
###*
* send a message to all workers who binded to the given event
*
* @param {string} event send only to workers which binded to this event
* @param {*} message the message which should be sent
* @function global.Msghub.prototype.send
###
send: (args...) -> _send args, 'all'
###*
* send a message to a worker who binded to the given event in a roundrobin manner
*
* @param {string} event consider only workers which binded to this event
* @param {*} message the message which should be sent
* @function global.Msghub.prototype.send
###
roundrobin: (args...) -> _send args, 'get'
###*
* send a message to a worker who binded to the given event in a random manner
*
* @param {string} event consider only workers which binded to this event
* @param {*} message the message which should be sent
* @function global.Msghub.prototype.send
###
random: (args...) -> _send args, 'random'
module.exports = new Msghub() |
[
{
"context": "onform Bundle specification\n <https://github.com/fhirbase/fhirbase-plv8/issues/124>\n\n Hi!\n\n Issue connect",
"end": 274,
"score": 0.9991170167922974,
"start": 266,
"tag": "USERNAME",
"value": "fhirbase"
},
{
"context": "e\":[{\"use\":\"official\",\"family\":[\"Snow\"],\"given\":[\"John\"]}],\"gender\":\"male\",\"birthDate\":\"2001-01-01\"},\"re",
"end": 660,
"score": 0.9998051524162292,
"start": 656,
"tag": "NAME",
"value": "John"
},
{
"context": " {\"use\":\"official\",\"family\":[\"Snow\"],\"given\":[\"John\"]}\n ],\n \"ge",
"end": 1898,
"score": 0.9998276233673096,
"start": 1894,
"tag": "NAME",
"value": "John"
}
] | test/integration/issues/124_failed_transaction_response_doesnt_conform_bundle_specification_spec.coffee | micabe/fhirbase | 0 | plv8 = require('../../../plpl/src/plv8')
assert = require('assert')
describe 'Issues', ->
before ->
plv8.execute("SET plv8.start_proc = 'plv8_init'")
###
Issue #124
Failed transaction response doesn't conform Bundle specification
<https://github.com/fhirbase/fhirbase-plv8/issues/124>
Hi!
Issue connected with #119, but my case is related to errors during transaction:
Trying to create resource of non-existing type:
```
fhirbase=# SELECT fhir_transaction('{"resourceType":"Bundle","type":"transaction","entry":[{"resource":{"resourceType":"UnknownResource","active":true,"name":[{"use":"official","family":["Snow"],"given":["John"]}],"gender":"male","birthDate":"2001-01-01"},"request":{"method":"POST","url":"UnknownResource"}}]}');
```
a response
```
fhir_transaction | {"resourceType":"Bundle","type":"transaction-response","entry":[{"resourceType":"OperationOutcome","text":{"div":"<div>Storage for UnknownResource does not exist</div>"},"issue":[{"severity":"error","code":"not-supported"}]}]}
```
The response doesn't conform specification at: https://www.hl7.org/fhir/bundle-response.json.html
Tested on v.1.3.0.15
###
it "#124 Failed transaction response doesn't conform Bundle specification", ->
plv8.execute('''
SELECT fhir_create_storage('{"resourceType": "Patient"}');
''')
plv8.execute('''
SELECT fhir_truncate_storage('{"resourceType": "Patient"}');
''')
transaction =
JSON.parse(
plv8.execute('''
SELECT fhir_transaction('
{
"resourceType":"Bundle",
"type":"transaction",
"entry":[
{
"resource":{
"resourceType":"UnknownResource",
"active":true,
"name":[
{"use":"official","family":["Snow"],"given":["John"]}
],
"gender":"male",
"birthDate":"2001-01-01"
},
"request":{
"method":"POST","url":"UnknownResource"
}
}
]
}
');
''')[0].fhir_transaction
)
assert.equal(transaction.resourceType, 'OperationOutcome')
assert.equal(
transaction.issue[0].diagnostics,
'Storage for UnknownResource does not exist'
)
| 216403 | plv8 = require('../../../plpl/src/plv8')
assert = require('assert')
describe 'Issues', ->
before ->
plv8.execute("SET plv8.start_proc = 'plv8_init'")
###
Issue #124
Failed transaction response doesn't conform Bundle specification
<https://github.com/fhirbase/fhirbase-plv8/issues/124>
Hi!
Issue connected with #119, but my case is related to errors during transaction:
Trying to create resource of non-existing type:
```
fhirbase=# SELECT fhir_transaction('{"resourceType":"Bundle","type":"transaction","entry":[{"resource":{"resourceType":"UnknownResource","active":true,"name":[{"use":"official","family":["Snow"],"given":["<NAME>"]}],"gender":"male","birthDate":"2001-01-01"},"request":{"method":"POST","url":"UnknownResource"}}]}');
```
a response
```
fhir_transaction | {"resourceType":"Bundle","type":"transaction-response","entry":[{"resourceType":"OperationOutcome","text":{"div":"<div>Storage for UnknownResource does not exist</div>"},"issue":[{"severity":"error","code":"not-supported"}]}]}
```
The response doesn't conform specification at: https://www.hl7.org/fhir/bundle-response.json.html
Tested on v.1.3.0.15
###
it "#124 Failed transaction response doesn't conform Bundle specification", ->
plv8.execute('''
SELECT fhir_create_storage('{"resourceType": "Patient"}');
''')
plv8.execute('''
SELECT fhir_truncate_storage('{"resourceType": "Patient"}');
''')
transaction =
JSON.parse(
plv8.execute('''
SELECT fhir_transaction('
{
"resourceType":"Bundle",
"type":"transaction",
"entry":[
{
"resource":{
"resourceType":"UnknownResource",
"active":true,
"name":[
{"use":"official","family":["Snow"],"given":["<NAME>"]}
],
"gender":"male",
"birthDate":"2001-01-01"
},
"request":{
"method":"POST","url":"UnknownResource"
}
}
]
}
');
''')[0].fhir_transaction
)
assert.equal(transaction.resourceType, 'OperationOutcome')
assert.equal(
transaction.issue[0].diagnostics,
'Storage for UnknownResource does not exist'
)
| true | plv8 = require('../../../plpl/src/plv8')
assert = require('assert')
describe 'Issues', ->
before ->
plv8.execute("SET plv8.start_proc = 'plv8_init'")
###
Issue #124
Failed transaction response doesn't conform Bundle specification
<https://github.com/fhirbase/fhirbase-plv8/issues/124>
Hi!
Issue connected with #119, but my case is related to errors during transaction:
Trying to create resource of non-existing type:
```
fhirbase=# SELECT fhir_transaction('{"resourceType":"Bundle","type":"transaction","entry":[{"resource":{"resourceType":"UnknownResource","active":true,"name":[{"use":"official","family":["Snow"],"given":["PI:NAME:<NAME>END_PI"]}],"gender":"male","birthDate":"2001-01-01"},"request":{"method":"POST","url":"UnknownResource"}}]}');
```
a response
```
fhir_transaction | {"resourceType":"Bundle","type":"transaction-response","entry":[{"resourceType":"OperationOutcome","text":{"div":"<div>Storage for UnknownResource does not exist</div>"},"issue":[{"severity":"error","code":"not-supported"}]}]}
```
The response doesn't conform specification at: https://www.hl7.org/fhir/bundle-response.json.html
Tested on v.1.3.0.15
###
it "#124 Failed transaction response doesn't conform Bundle specification", ->
plv8.execute('''
SELECT fhir_create_storage('{"resourceType": "Patient"}');
''')
plv8.execute('''
SELECT fhir_truncate_storage('{"resourceType": "Patient"}');
''')
transaction =
JSON.parse(
plv8.execute('''
SELECT fhir_transaction('
{
"resourceType":"Bundle",
"type":"transaction",
"entry":[
{
"resource":{
"resourceType":"UnknownResource",
"active":true,
"name":[
{"use":"official","family":["Snow"],"given":["PI:NAME:<NAME>END_PI"]}
],
"gender":"male",
"birthDate":"2001-01-01"
},
"request":{
"method":"POST","url":"UnknownResource"
}
}
]
}
');
''')[0].fhir_transaction
)
assert.equal(transaction.resourceType, 'OperationOutcome')
assert.equal(
transaction.issue[0].diagnostics,
'Storage for UnknownResource does not exist'
)
|
[
{
"context": "'use strict'\n#\n# Ethan Mick\n# 2015\n#\n\nwinston = require 'winston'\nlevel = pro",
"end": 27,
"score": 0.9997780323028564,
"start": 17,
"tag": "NAME",
"value": "Ethan Mick"
}
] | lib/models/log.coffee | ethanmick/metrics | 0 | 'use strict'
#
# Ethan Mick
# 2015
#
winston = require 'winston'
level = process.env.WINSTON or 'debug'
winston.remove(winston.transports.Console)
winston.add(winston.transports.Console, {
level: level
colorize: yes
json: no
timestamp: yes
prettyPrint: yes
label: process.pid
})
module.exports = winston
| 20088 | 'use strict'
#
# <NAME>
# 2015
#
winston = require 'winston'
level = process.env.WINSTON or 'debug'
winston.remove(winston.transports.Console)
winston.add(winston.transports.Console, {
level: level
colorize: yes
json: no
timestamp: yes
prettyPrint: yes
label: process.pid
})
module.exports = winston
| true | 'use strict'
#
# PI:NAME:<NAME>END_PI
# 2015
#
winston = require 'winston'
level = process.env.WINSTON or 'debug'
winston.remove(winston.transports.Console)
winston.add(winston.transports.Console, {
level: level
colorize: yes
json: no
timestamp: yes
prettyPrint: yes
label: process.pid
})
module.exports = winston
|
[
{
"context": "\n\t\tif req.query.password and req.query.password is password\n\t\t\tnext()\n\t\telse\n\t\t\tres.send(401)\nmongoose.connec",
"end": 930,
"score": 0.6762269139289856,
"start": 922,
"tag": "PASSWORD",
"value": "password"
},
{
"context": "@endpoint.addMiddleware('fetch', requirePassword('asdf'))\n\t\t\t@endpoint.register(@app)\n\n\t\t\t\n\t\t\trequest(@a",
"end": 3810,
"score": 0.9128537774085999,
"start": 3806,
"tag": "PASSWORD",
"value": "asdf"
},
{
"context": "get('/api/posts/' + @mod._id).query\n\t\t\t\tpassword:'asdf'\n\t\t\t.end (err, res) ->\n\t\t\t\tres.status.should.equa",
"end": 3918,
"score": 0.9981894493103027,
"start": 3914,
"tag": "PASSWORD",
"value": "asdf"
},
{
"context": "@endpoint.addMiddleware('fetch', requirePassword('asdf'))\n\t\t\t@endpoint.register(@app)\n\n\t\t\t\n\t\t\trequest(@a",
"end": 4177,
"score": 0.9101380109786987,
"start": 4173,
"tag": "PASSWORD",
"value": "asdf"
},
{
"context": "get('/api/posts/' + @mod._id).query\n\t\t\t\tpassword:'ffff'\n\t\t\t.end (err, res) ->\n\t\t\t\tres.status.should.equa",
"end": 4285,
"score": 0.988987147808075,
"start": 4281,
"tag": "PASSWORD",
"value": "ffff"
}
] | coffee/test/fetch.coffee | lbacas/mongoose-rest-endpoints | 1 | express = require 'express'
request = require 'supertest'
should = require 'should'
Q = require 'q'
mongoose = require 'mongoose'
moment = require 'moment'
require('../lib/log').verbose(true)
tracker = require '../lib/tracker'
mre = require '../lib/endpoint'
# Custom "Post" and "Comment" documents
commentSchema = new mongoose.Schema
comment:String
otherField:Number
_post:
type:mongoose.Schema.Types.ObjectId
ref:'Post'
_author:
type:mongoose.Schema.Types.ObjectId
ref:'Author'
postSchema = new mongoose.Schema
date:Date
number:Number
string:
type:String
required:true
_comments:[
type:mongoose.Schema.Types.ObjectId
ref:'Comment'
$through:'_post'
]
otherField:mongoose.Schema.Types.Mixed
authorSchema = new mongoose.Schema
name:'String'
# Custom middleware for testing
requirePassword = (password) ->
return (req, res, next) ->
if req.query.password and req.query.password is password
next()
else
res.send(401)
mongoose.connect('mongodb://localhost/mre_test')
cascade = require 'cascading-relations'
postSchema.plugin(cascade)
commentSchema.plugin(cascade)
authorSchema.plugin(cascade)
mongoose.model('Post', postSchema)
mongoose.model('Comment', commentSchema)
mongoose.model('Author', authorSchema)
mongoose.set 'debug', true
describe 'Fetch', ->
describe 'Basic object', ->
beforeEach (done) ->
@endpoint = new mre('/api/posts', 'Post')
@app = express()
@app.use(express.bodyParser())
@app.use(express.methodOverride())
modClass = mongoose.model('Post')
mod = modClass
date:Date.now()
number:5
string:'Test'
mod.save (err, res) =>
@mod = res
done()
afterEach (done) ->
@mod.remove ->
done()
it 'should retrieve with no hooks', (done) ->
@endpoint.register(@app)
request(@app).get('/api/posts/' + @mod._id).end (err, res) ->
console.log res.text
res.status.should.equal(200)
res.body.number.should.equal(5)
res.body.string.should.equal('Test')
done()
it 'should honor bad pre_filter hook', (done) ->
@endpoint.tap 'pre_filter', 'fetch', (args, data, next) ->
data.number = 6
next(data)
.register(@app)
request(@app).get('/api/posts/' + @mod._id).end (err, res) ->
res.status.should.equal(404)
done()
it 'should honor good pre_filter hook', (done) ->
@endpoint.tap 'pre_filter', 'fetch', (args, data, next) ->
data.number = 5
next(data)
.register(@app)
request(@app).get('/api/posts/' + @mod._id).end (err, res) ->
res.status.should.equal(200)
done()
it 'should honor pre_response hook', (done) ->
@endpoint.tap 'pre_response', 'fetch', (args, model, next) ->
delete model.number
next(model)
.register(@app)
request(@app).get('/api/posts/' + @mod._id).end (err, res) ->
res.status.should.equal(200)
should.not.exist(res.body.number)
done()
it 'should honor pre_response_error hook', (done) ->
@endpoint.tap 'pre_response_error', 'fetch', (args, err, next) ->
err.message = 'Foo'
next(err)
.register(@app)
# ID must be acceptable otherwise we'll get a 400 instead of 404
request(@app).get('/api/posts/abcdabcdabcdabcdabcdabcd').end (err, res) ->
res.status.should.equal(404)
res.text.should.equal('Foo')
done()
describe 'With middleware', ->
beforeEach (done) ->
@endpoint = new mre('/api/posts', 'Post')
@app = express()
@app.use(express.bodyParser())
@app.use(express.methodOverride())
modClass = mongoose.model('Post')
mod = modClass
date:Date.now()
number:5
string:'Test'
mod.save (err, res) =>
@mod = res
done()
afterEach (done) ->
@mod.remove ->
done()
it 'should retrieve with middleware', (done) ->
@endpoint.addMiddleware('fetch', requirePassword('asdf'))
@endpoint.register(@app)
request(@app).get('/api/posts/' + @mod._id).query
password:'asdf'
.end (err, res) ->
res.status.should.equal(200)
res.body.number.should.equal(5)
res.body.string.should.equal('Test')
done()
it 'should give a 401 with wrong password', (done) ->
@endpoint.addMiddleware('fetch', requirePassword('asdf'))
@endpoint.register(@app)
request(@app).get('/api/posts/' + @mod._id).query
password:'ffff'
.end (err, res) ->
res.status.should.equal(401)
done()
describe 'Populate', ->
beforeEach (done) ->
@endpoint = new mre('/api/posts', 'Post')
@app = express()
@app.use(express.bodyParser())
@app.use(express.methodOverride())
modClass = mongoose.model('Post')
mod = modClass
date:Date.now()
number:5
string:'Test'
_related:
_comments:[
comment:'Asdf1234'
otherField:5
]
mod.cascadeSave (err, res) =>
@mod = res
done()
afterEach (done) ->
@mod.remove ->
done()
it 'should populate on _related', (done) ->
@endpoint.populate('_comments').register(@app)
request(@app).get('/api/posts/' + @mod._id).end (err, res) ->
res.status.should.equal(200)
res.body.number.should.equal(5)
res.body.string.should.equal('Test')
res.body._related._comments.length.should.equal(1)
res.body._comments.length.should.equal(1)
res.body._related._comments[0].comment.should.equal('Asdf1234')
res.body._related._comments[0].otherField.should.equal(5)
done()
it 'should populate when specifying fields', (done) ->
@endpoint.populate('_comments', 'comment').register(@app)
request(@app).get('/api/posts/' + @mod._id).end (err, res) ->
res.status.should.equal(200)
res.body.number.should.equal(5)
res.body.string.should.equal('Test')
res.body._related._comments.length.should.equal(1)
res.body._comments.length.should.equal(1)
res.body._related._comments[0].comment.should.equal('Asdf1234')
should.not.exist(res.body._related._comments[0].otherField)
done()
describe 'Tracking interface', ->
beforeEach (done) ->
@endpoint = new mre('/api/posts', 'Post')
@app = express()
@app.use(express.bodyParser())
@app.use(express.methodOverride())
done()
afterEach (done) ->
if @mod
@mod.remove ->
done()
else
done()
it 'should run tracking interface on success', (done) ->
modClass = mongoose.model('Post')
mod = modClass
date:Date.now()
number:5
string:'Test'
mod.save (err, res) =>
@mod = res
tracker.interface =
track: (params) ->
console.log 'Tracking params', params
params.response.code.should.equal(200)
(params.time < 50).should.equal(true)
done()
@endpoint.register(@app)
request(@app).get('/api/posts/' + @mod._id).end (err, res) ->
console.log 'Ended'
it 'should run tracking interface on error', (done) ->
tracker.interface =
track: (params) ->
console.log 'Tracking params:', params
params.response.code.should.equal(400)
(params.time < 50).should.equal(true)
done()
@endpoint.register(@app)
request(@app).get('/api/posts/asdf').end (err, res) ->
console.log 'Ended'
it 'should calculate time based on X-Request-Start header', (done) ->
tracker.interface =
track: (params) ->
params.response.code.should.equal(400)
params.time.should.be.greaterThan(100)
params.time.should.be.lessThan(200)
done()
@endpoint.register(@app)
requestStart = moment().valueOf() - 100
request(@app).get('/api/posts/asdf').set('X-Request-Start', requestStart.toString()).end (err, res) ->
console.log 'Ended'
| 224828 | express = require 'express'
request = require 'supertest'
should = require 'should'
Q = require 'q'
mongoose = require 'mongoose'
moment = require 'moment'
require('../lib/log').verbose(true)
tracker = require '../lib/tracker'
mre = require '../lib/endpoint'
# Custom "Post" and "Comment" documents
commentSchema = new mongoose.Schema
comment:String
otherField:Number
_post:
type:mongoose.Schema.Types.ObjectId
ref:'Post'
_author:
type:mongoose.Schema.Types.ObjectId
ref:'Author'
postSchema = new mongoose.Schema
date:Date
number:Number
string:
type:String
required:true
_comments:[
type:mongoose.Schema.Types.ObjectId
ref:'Comment'
$through:'_post'
]
otherField:mongoose.Schema.Types.Mixed
authorSchema = new mongoose.Schema
name:'String'
# Custom middleware for testing
requirePassword = (password) ->
return (req, res, next) ->
if req.query.password and req.query.password is <PASSWORD>
next()
else
res.send(401)
mongoose.connect('mongodb://localhost/mre_test')
cascade = require 'cascading-relations'
postSchema.plugin(cascade)
commentSchema.plugin(cascade)
authorSchema.plugin(cascade)
mongoose.model('Post', postSchema)
mongoose.model('Comment', commentSchema)
mongoose.model('Author', authorSchema)
mongoose.set 'debug', true
describe 'Fetch', ->
describe 'Basic object', ->
beforeEach (done) ->
@endpoint = new mre('/api/posts', 'Post')
@app = express()
@app.use(express.bodyParser())
@app.use(express.methodOverride())
modClass = mongoose.model('Post')
mod = modClass
date:Date.now()
number:5
string:'Test'
mod.save (err, res) =>
@mod = res
done()
afterEach (done) ->
@mod.remove ->
done()
it 'should retrieve with no hooks', (done) ->
@endpoint.register(@app)
request(@app).get('/api/posts/' + @mod._id).end (err, res) ->
console.log res.text
res.status.should.equal(200)
res.body.number.should.equal(5)
res.body.string.should.equal('Test')
done()
it 'should honor bad pre_filter hook', (done) ->
@endpoint.tap 'pre_filter', 'fetch', (args, data, next) ->
data.number = 6
next(data)
.register(@app)
request(@app).get('/api/posts/' + @mod._id).end (err, res) ->
res.status.should.equal(404)
done()
it 'should honor good pre_filter hook', (done) ->
@endpoint.tap 'pre_filter', 'fetch', (args, data, next) ->
data.number = 5
next(data)
.register(@app)
request(@app).get('/api/posts/' + @mod._id).end (err, res) ->
res.status.should.equal(200)
done()
it 'should honor pre_response hook', (done) ->
@endpoint.tap 'pre_response', 'fetch', (args, model, next) ->
delete model.number
next(model)
.register(@app)
request(@app).get('/api/posts/' + @mod._id).end (err, res) ->
res.status.should.equal(200)
should.not.exist(res.body.number)
done()
it 'should honor pre_response_error hook', (done) ->
@endpoint.tap 'pre_response_error', 'fetch', (args, err, next) ->
err.message = 'Foo'
next(err)
.register(@app)
# ID must be acceptable otherwise we'll get a 400 instead of 404
request(@app).get('/api/posts/abcdabcdabcdabcdabcdabcd').end (err, res) ->
res.status.should.equal(404)
res.text.should.equal('Foo')
done()
describe 'With middleware', ->
beforeEach (done) ->
@endpoint = new mre('/api/posts', 'Post')
@app = express()
@app.use(express.bodyParser())
@app.use(express.methodOverride())
modClass = mongoose.model('Post')
mod = modClass
date:Date.now()
number:5
string:'Test'
mod.save (err, res) =>
@mod = res
done()
afterEach (done) ->
@mod.remove ->
done()
it 'should retrieve with middleware', (done) ->
@endpoint.addMiddleware('fetch', requirePassword('<PASSWORD>'))
@endpoint.register(@app)
request(@app).get('/api/posts/' + @mod._id).query
password:'<PASSWORD>'
.end (err, res) ->
res.status.should.equal(200)
res.body.number.should.equal(5)
res.body.string.should.equal('Test')
done()
it 'should give a 401 with wrong password', (done) ->
@endpoint.addMiddleware('fetch', requirePassword('<PASSWORD>'))
@endpoint.register(@app)
request(@app).get('/api/posts/' + @mod._id).query
password:'<PASSWORD>'
.end (err, res) ->
res.status.should.equal(401)
done()
describe 'Populate', ->
beforeEach (done) ->
@endpoint = new mre('/api/posts', 'Post')
@app = express()
@app.use(express.bodyParser())
@app.use(express.methodOverride())
modClass = mongoose.model('Post')
mod = modClass
date:Date.now()
number:5
string:'Test'
_related:
_comments:[
comment:'Asdf1234'
otherField:5
]
mod.cascadeSave (err, res) =>
@mod = res
done()
afterEach (done) ->
@mod.remove ->
done()
it 'should populate on _related', (done) ->
@endpoint.populate('_comments').register(@app)
request(@app).get('/api/posts/' + @mod._id).end (err, res) ->
res.status.should.equal(200)
res.body.number.should.equal(5)
res.body.string.should.equal('Test')
res.body._related._comments.length.should.equal(1)
res.body._comments.length.should.equal(1)
res.body._related._comments[0].comment.should.equal('Asdf1234')
res.body._related._comments[0].otherField.should.equal(5)
done()
it 'should populate when specifying fields', (done) ->
@endpoint.populate('_comments', 'comment').register(@app)
request(@app).get('/api/posts/' + @mod._id).end (err, res) ->
res.status.should.equal(200)
res.body.number.should.equal(5)
res.body.string.should.equal('Test')
res.body._related._comments.length.should.equal(1)
res.body._comments.length.should.equal(1)
res.body._related._comments[0].comment.should.equal('Asdf1234')
should.not.exist(res.body._related._comments[0].otherField)
done()
describe 'Tracking interface', ->
beforeEach (done) ->
@endpoint = new mre('/api/posts', 'Post')
@app = express()
@app.use(express.bodyParser())
@app.use(express.methodOverride())
done()
afterEach (done) ->
if @mod
@mod.remove ->
done()
else
done()
it 'should run tracking interface on success', (done) ->
modClass = mongoose.model('Post')
mod = modClass
date:Date.now()
number:5
string:'Test'
mod.save (err, res) =>
@mod = res
tracker.interface =
track: (params) ->
console.log 'Tracking params', params
params.response.code.should.equal(200)
(params.time < 50).should.equal(true)
done()
@endpoint.register(@app)
request(@app).get('/api/posts/' + @mod._id).end (err, res) ->
console.log 'Ended'
it 'should run tracking interface on error', (done) ->
tracker.interface =
track: (params) ->
console.log 'Tracking params:', params
params.response.code.should.equal(400)
(params.time < 50).should.equal(true)
done()
@endpoint.register(@app)
request(@app).get('/api/posts/asdf').end (err, res) ->
console.log 'Ended'
it 'should calculate time based on X-Request-Start header', (done) ->
tracker.interface =
track: (params) ->
params.response.code.should.equal(400)
params.time.should.be.greaterThan(100)
params.time.should.be.lessThan(200)
done()
@endpoint.register(@app)
requestStart = moment().valueOf() - 100
request(@app).get('/api/posts/asdf').set('X-Request-Start', requestStart.toString()).end (err, res) ->
console.log 'Ended'
| true | express = require 'express'
request = require 'supertest'
should = require 'should'
Q = require 'q'
mongoose = require 'mongoose'
moment = require 'moment'
require('../lib/log').verbose(true)
tracker = require '../lib/tracker'
mre = require '../lib/endpoint'
# Custom "Post" and "Comment" documents
commentSchema = new mongoose.Schema
comment:String
otherField:Number
_post:
type:mongoose.Schema.Types.ObjectId
ref:'Post'
_author:
type:mongoose.Schema.Types.ObjectId
ref:'Author'
postSchema = new mongoose.Schema
date:Date
number:Number
string:
type:String
required:true
_comments:[
type:mongoose.Schema.Types.ObjectId
ref:'Comment'
$through:'_post'
]
otherField:mongoose.Schema.Types.Mixed
authorSchema = new mongoose.Schema
name:'String'
# Custom middleware for testing
requirePassword = (password) ->
return (req, res, next) ->
if req.query.password and req.query.password is PI:PASSWORD:<PASSWORD>END_PI
next()
else
res.send(401)
mongoose.connect('mongodb://localhost/mre_test')
cascade = require 'cascading-relations'
postSchema.plugin(cascade)
commentSchema.plugin(cascade)
authorSchema.plugin(cascade)
mongoose.model('Post', postSchema)
mongoose.model('Comment', commentSchema)
mongoose.model('Author', authorSchema)
mongoose.set 'debug', true
describe 'Fetch', ->
describe 'Basic object', ->
beforeEach (done) ->
@endpoint = new mre('/api/posts', 'Post')
@app = express()
@app.use(express.bodyParser())
@app.use(express.methodOverride())
modClass = mongoose.model('Post')
mod = modClass
date:Date.now()
number:5
string:'Test'
mod.save (err, res) =>
@mod = res
done()
afterEach (done) ->
@mod.remove ->
done()
it 'should retrieve with no hooks', (done) ->
@endpoint.register(@app)
request(@app).get('/api/posts/' + @mod._id).end (err, res) ->
console.log res.text
res.status.should.equal(200)
res.body.number.should.equal(5)
res.body.string.should.equal('Test')
done()
it 'should honor bad pre_filter hook', (done) ->
@endpoint.tap 'pre_filter', 'fetch', (args, data, next) ->
data.number = 6
next(data)
.register(@app)
request(@app).get('/api/posts/' + @mod._id).end (err, res) ->
res.status.should.equal(404)
done()
it 'should honor good pre_filter hook', (done) ->
@endpoint.tap 'pre_filter', 'fetch', (args, data, next) ->
data.number = 5
next(data)
.register(@app)
request(@app).get('/api/posts/' + @mod._id).end (err, res) ->
res.status.should.equal(200)
done()
it 'should honor pre_response hook', (done) ->
@endpoint.tap 'pre_response', 'fetch', (args, model, next) ->
delete model.number
next(model)
.register(@app)
request(@app).get('/api/posts/' + @mod._id).end (err, res) ->
res.status.should.equal(200)
should.not.exist(res.body.number)
done()
it 'should honor pre_response_error hook', (done) ->
@endpoint.tap 'pre_response_error', 'fetch', (args, err, next) ->
err.message = 'Foo'
next(err)
.register(@app)
# ID must be acceptable otherwise we'll get a 400 instead of 404
request(@app).get('/api/posts/abcdabcdabcdabcdabcdabcd').end (err, res) ->
res.status.should.equal(404)
res.text.should.equal('Foo')
done()
describe 'With middleware', ->
beforeEach (done) ->
@endpoint = new mre('/api/posts', 'Post')
@app = express()
@app.use(express.bodyParser())
@app.use(express.methodOverride())
modClass = mongoose.model('Post')
mod = modClass
date:Date.now()
number:5
string:'Test'
mod.save (err, res) =>
@mod = res
done()
afterEach (done) ->
@mod.remove ->
done()
it 'should retrieve with middleware', (done) ->
@endpoint.addMiddleware('fetch', requirePassword('PI:PASSWORD:<PASSWORD>END_PI'))
@endpoint.register(@app)
request(@app).get('/api/posts/' + @mod._id).query
password:'PI:PASSWORD:<PASSWORD>END_PI'
.end (err, res) ->
res.status.should.equal(200)
res.body.number.should.equal(5)
res.body.string.should.equal('Test')
done()
it 'should give a 401 with wrong password', (done) ->
@endpoint.addMiddleware('fetch', requirePassword('PI:PASSWORD:<PASSWORD>END_PI'))
@endpoint.register(@app)
request(@app).get('/api/posts/' + @mod._id).query
password:'PI:PASSWORD:<PASSWORD>END_PI'
.end (err, res) ->
res.status.should.equal(401)
done()
describe 'Populate', ->
beforeEach (done) ->
@endpoint = new mre('/api/posts', 'Post')
@app = express()
@app.use(express.bodyParser())
@app.use(express.methodOverride())
modClass = mongoose.model('Post')
mod = modClass
date:Date.now()
number:5
string:'Test'
_related:
_comments:[
comment:'Asdf1234'
otherField:5
]
mod.cascadeSave (err, res) =>
@mod = res
done()
afterEach (done) ->
@mod.remove ->
done()
it 'should populate on _related', (done) ->
@endpoint.populate('_comments').register(@app)
request(@app).get('/api/posts/' + @mod._id).end (err, res) ->
res.status.should.equal(200)
res.body.number.should.equal(5)
res.body.string.should.equal('Test')
res.body._related._comments.length.should.equal(1)
res.body._comments.length.should.equal(1)
res.body._related._comments[0].comment.should.equal('Asdf1234')
res.body._related._comments[0].otherField.should.equal(5)
done()
it 'should populate when specifying fields', (done) ->
@endpoint.populate('_comments', 'comment').register(@app)
request(@app).get('/api/posts/' + @mod._id).end (err, res) ->
res.status.should.equal(200)
res.body.number.should.equal(5)
res.body.string.should.equal('Test')
res.body._related._comments.length.should.equal(1)
res.body._comments.length.should.equal(1)
res.body._related._comments[0].comment.should.equal('Asdf1234')
should.not.exist(res.body._related._comments[0].otherField)
done()
describe 'Tracking interface', ->
beforeEach (done) ->
@endpoint = new mre('/api/posts', 'Post')
@app = express()
@app.use(express.bodyParser())
@app.use(express.methodOverride())
done()
afterEach (done) ->
if @mod
@mod.remove ->
done()
else
done()
it 'should run tracking interface on success', (done) ->
modClass = mongoose.model('Post')
mod = modClass
date:Date.now()
number:5
string:'Test'
mod.save (err, res) =>
@mod = res
tracker.interface =
track: (params) ->
console.log 'Tracking params', params
params.response.code.should.equal(200)
(params.time < 50).should.equal(true)
done()
@endpoint.register(@app)
request(@app).get('/api/posts/' + @mod._id).end (err, res) ->
console.log 'Ended'
it 'should run tracking interface on error', (done) ->
tracker.interface =
track: (params) ->
console.log 'Tracking params:', params
params.response.code.should.equal(400)
(params.time < 50).should.equal(true)
done()
@endpoint.register(@app)
request(@app).get('/api/posts/asdf').end (err, res) ->
console.log 'Ended'
it 'should calculate time based on X-Request-Start header', (done) ->
tracker.interface =
track: (params) ->
params.response.code.should.equal(400)
params.time.should.be.greaterThan(100)
params.time.should.be.lessThan(200)
done()
@endpoint.register(@app)
requestStart = moment().valueOf() - 100
request(@app).get('/api/posts/asdf').set('X-Request-Start', requestStart.toString()).end (err, res) ->
console.log 'Ended'
|
[
{
"context": " * BracketsExtensionTweetBot\n * http://github.com/ingorichter/BracketsExtensionTweetBot\n *\n * Copyright (c) 201",
"end": 65,
"score": 0.9996655583381653,
"start": 54,
"tag": "USERNAME",
"value": "ingorichter"
},
{
"context": "BracketsExtensionTweetBot\n *\n * Copyright (c) 2014 Ingo Richter\n * Licensed under the MIT license.\n###\n# jslint v",
"end": 129,
"score": 0.9998800158500671,
"start": 117,
"tag": "NAME",
"value": "Ingo Richter"
},
{
"context": "changeRecord.homepage} #{changeRecord.downloadUrl} @brackets\"\n\n#\n# dryRunTwitterClient for debugging and dry r",
"end": 2307,
"score": 0.9779350161552429,
"start": 2298,
"tag": "USERNAME",
"value": "@brackets"
}
] | src/bracketsextensiontweetbot.coffee | ingorichter/BracketsExtensionTweetBot | 0 | ###
* BracketsExtensionTweetBot
* http://github.com/ingorichter/BracketsExtensionTweetBot
*
* Copyright (c) 2014 Ingo Richter
* Licensed under the MIT license.
###
# jslint vars: true, plusplus: true, devel: true, node: true, nomen: true, indent: 4, maxerr: 50
'use strict'
Promise = require 'bluebird'
TwitterPublisher = require './TwitterPublisher'
RegistryUtils = require './RegistryUtils'
dotenv = require 'dotenv-safe'
process = require 'process'
dotenv.config()
NOTIFICATION_TYPE = {
'UPDATE': 'UPDATE',
'NEW': 'NEW'
}
dryRun = false
if process.argv.length == 3 && process.argv[2] == 'dryRun'
dryRun = true
createChangeset = (oldRegistry, newRegistry) ->
changesets = []
for own extensionName, extension of newRegistry
previousExtension = oldRegistry?[extensionName]
if previousExtension
previousVersionsCount = previousExtension.versions.length
type = NOTIFICATION_TYPE.UPDATE if extension.versions.length > previousVersionsCount
type = undefined if extension.versions.length is previousVersionsCount
else type = NOTIFICATION_TYPE.NEW
if type is NOTIFICATION_TYPE.UPDATE or type is NOTIFICATION_TYPE.NEW
# determine what to provide for homepage if the homepage isn't available
_homepage = extension.metadata.homepage
if not _homepage
_homepage = extension.metadata.repository?.url
changeRecord = {
type: type,
title: extension.metadata.title ? extension.metadata.name,
version: extension.metadata.version,
downloadUrl: RegistryUtils.extensionDownloadURL(extension),
description: extension.metadata.description,
homepage: _homepage ? ""
}
changesets.push changeRecord
changesets
createTwitterConfig = ->
twitterConf = {}
twitterConf.consumer_key = process.env.TWITTER_CONSUMER_KEY
twitterConf.consumer_secret = process.env.TWITTER_CONSUMER_SECRET
twitterConf.access_token = process.env.TWITTER_ACCESS_TOKEN
twitterConf.access_token_secret = process.env.TWITTER_ACCESS_TOKEN_SECRET
twitterConf
#
# createNotification
#
createNotification = (changeRecord) ->
"#{changeRecord.title} - #{changeRecord.version}
(#{changeRecord.type}) #{changeRecord.homepage} #{changeRecord.downloadUrl} @brackets"
#
# dryRunTwitterClient for debugging and dry run testing
#
dryRunTwitterClient = ->
dryRunTwitterClient = {
post: (endpoint, tweet) ->
# TODO(Ingo): replace with logging infrastructure
# console.log tweet.status
Promise.resolve(tweet.status)
}
# This is the main function
rockAndRoll = ->
new Promise (resolve, reject) ->
Promise.join(RegistryUtils.loadLocalRegistry(), RegistryUtils.downloadExtensionRegistry(),
(oldRegistry, newRegistry) ->
notifications = createChangeset(oldRegistry, newRegistry).map (changeRecord) ->
createNotification changeRecord
twitterConf = createTwitterConfig()
twitterPublisher = new TwitterPublisher twitterConf
twitterPublisher.setClient dryRunTwitterClient() if dryRun
twitterPublisher.post notification for notification in notifications
RegistryUtils.swapRegistryFiles(newRegistry).then ->
resolve()
)
# API
exports.createChangeset = createChangeset
exports.createNotification = createNotification
exports.createTwitterConfig = createTwitterConfig
exports.rockAndRoll = rockAndRoll | 34865 | ###
* BracketsExtensionTweetBot
* http://github.com/ingorichter/BracketsExtensionTweetBot
*
* Copyright (c) 2014 <NAME>
* Licensed under the MIT license.
###
# jslint vars: true, plusplus: true, devel: true, node: true, nomen: true, indent: 4, maxerr: 50
'use strict'
Promise = require 'bluebird'
TwitterPublisher = require './TwitterPublisher'
RegistryUtils = require './RegistryUtils'
dotenv = require 'dotenv-safe'
process = require 'process'
dotenv.config()
NOTIFICATION_TYPE = {
'UPDATE': 'UPDATE',
'NEW': 'NEW'
}
dryRun = false
if process.argv.length == 3 && process.argv[2] == 'dryRun'
dryRun = true
createChangeset = (oldRegistry, newRegistry) ->
changesets = []
for own extensionName, extension of newRegistry
previousExtension = oldRegistry?[extensionName]
if previousExtension
previousVersionsCount = previousExtension.versions.length
type = NOTIFICATION_TYPE.UPDATE if extension.versions.length > previousVersionsCount
type = undefined if extension.versions.length is previousVersionsCount
else type = NOTIFICATION_TYPE.NEW
if type is NOTIFICATION_TYPE.UPDATE or type is NOTIFICATION_TYPE.NEW
# determine what to provide for homepage if the homepage isn't available
_homepage = extension.metadata.homepage
if not _homepage
_homepage = extension.metadata.repository?.url
changeRecord = {
type: type,
title: extension.metadata.title ? extension.metadata.name,
version: extension.metadata.version,
downloadUrl: RegistryUtils.extensionDownloadURL(extension),
description: extension.metadata.description,
homepage: _homepage ? ""
}
changesets.push changeRecord
changesets
createTwitterConfig = ->
twitterConf = {}
twitterConf.consumer_key = process.env.TWITTER_CONSUMER_KEY
twitterConf.consumer_secret = process.env.TWITTER_CONSUMER_SECRET
twitterConf.access_token = process.env.TWITTER_ACCESS_TOKEN
twitterConf.access_token_secret = process.env.TWITTER_ACCESS_TOKEN_SECRET
twitterConf
#
# createNotification
#
createNotification = (changeRecord) ->
"#{changeRecord.title} - #{changeRecord.version}
(#{changeRecord.type}) #{changeRecord.homepage} #{changeRecord.downloadUrl} @brackets"
#
# dryRunTwitterClient for debugging and dry run testing
#
dryRunTwitterClient = ->
dryRunTwitterClient = {
post: (endpoint, tweet) ->
# TODO(Ingo): replace with logging infrastructure
# console.log tweet.status
Promise.resolve(tweet.status)
}
# This is the main function
rockAndRoll = ->
new Promise (resolve, reject) ->
Promise.join(RegistryUtils.loadLocalRegistry(), RegistryUtils.downloadExtensionRegistry(),
(oldRegistry, newRegistry) ->
notifications = createChangeset(oldRegistry, newRegistry).map (changeRecord) ->
createNotification changeRecord
twitterConf = createTwitterConfig()
twitterPublisher = new TwitterPublisher twitterConf
twitterPublisher.setClient dryRunTwitterClient() if dryRun
twitterPublisher.post notification for notification in notifications
RegistryUtils.swapRegistryFiles(newRegistry).then ->
resolve()
)
# API
exports.createChangeset = createChangeset
exports.createNotification = createNotification
exports.createTwitterConfig = createTwitterConfig
exports.rockAndRoll = rockAndRoll | true | ###
* BracketsExtensionTweetBot
* http://github.com/ingorichter/BracketsExtensionTweetBot
*
* Copyright (c) 2014 PI:NAME:<NAME>END_PI
* Licensed under the MIT license.
###
# jslint vars: true, plusplus: true, devel: true, node: true, nomen: true, indent: 4, maxerr: 50
'use strict'
Promise = require 'bluebird'
TwitterPublisher = require './TwitterPublisher'
RegistryUtils = require './RegistryUtils'
dotenv = require 'dotenv-safe'
process = require 'process'
dotenv.config()
NOTIFICATION_TYPE = {
'UPDATE': 'UPDATE',
'NEW': 'NEW'
}
dryRun = false
if process.argv.length == 3 && process.argv[2] == 'dryRun'
dryRun = true
createChangeset = (oldRegistry, newRegistry) ->
changesets = []
for own extensionName, extension of newRegistry
previousExtension = oldRegistry?[extensionName]
if previousExtension
previousVersionsCount = previousExtension.versions.length
type = NOTIFICATION_TYPE.UPDATE if extension.versions.length > previousVersionsCount
type = undefined if extension.versions.length is previousVersionsCount
else type = NOTIFICATION_TYPE.NEW
if type is NOTIFICATION_TYPE.UPDATE or type is NOTIFICATION_TYPE.NEW
# determine what to provide for homepage if the homepage isn't available
_homepage = extension.metadata.homepage
if not _homepage
_homepage = extension.metadata.repository?.url
changeRecord = {
type: type,
title: extension.metadata.title ? extension.metadata.name,
version: extension.metadata.version,
downloadUrl: RegistryUtils.extensionDownloadURL(extension),
description: extension.metadata.description,
homepage: _homepage ? ""
}
changesets.push changeRecord
changesets
createTwitterConfig = ->
twitterConf = {}
twitterConf.consumer_key = process.env.TWITTER_CONSUMER_KEY
twitterConf.consumer_secret = process.env.TWITTER_CONSUMER_SECRET
twitterConf.access_token = process.env.TWITTER_ACCESS_TOKEN
twitterConf.access_token_secret = process.env.TWITTER_ACCESS_TOKEN_SECRET
twitterConf
#
# createNotification
#
createNotification = (changeRecord) ->
"#{changeRecord.title} - #{changeRecord.version}
(#{changeRecord.type}) #{changeRecord.homepage} #{changeRecord.downloadUrl} @brackets"
#
# dryRunTwitterClient for debugging and dry run testing
#
dryRunTwitterClient = ->
dryRunTwitterClient = {
post: (endpoint, tweet) ->
# TODO(Ingo): replace with logging infrastructure
# console.log tweet.status
Promise.resolve(tweet.status)
}
# This is the main function
rockAndRoll = ->
new Promise (resolve, reject) ->
Promise.join(RegistryUtils.loadLocalRegistry(), RegistryUtils.downloadExtensionRegistry(),
(oldRegistry, newRegistry) ->
notifications = createChangeset(oldRegistry, newRegistry).map (changeRecord) ->
createNotification changeRecord
twitterConf = createTwitterConfig()
twitterPublisher = new TwitterPublisher twitterConf
twitterPublisher.setClient dryRunTwitterClient() if dryRun
twitterPublisher.post notification for notification in notifications
RegistryUtils.swapRegistryFiles(newRegistry).then ->
resolve()
)
# API
exports.createChangeset = createChangeset
exports.createNotification = createNotification
exports.createTwitterConfig = createTwitterConfig
exports.rockAndRoll = rockAndRoll |
[
{
"context": "\n\ndnschain\nhttp://dnschain.net\n\nCopyright (c) 2013 Greg Slepak\nLicensed under the BSD 3-Clause license.\n\n###\n\n# ",
"end": 65,
"score": 0.9998666048049927,
"start": 54,
"tag": "NAME",
"value": "Greg Slepak"
},
{
"context": "of NODE_DNS method\n # dns.setServers ['8.8.8.8']\n \n if dns.getServers? and",
"end": 828,
"score": 0.9997374415397644,
"start": 821,
"tag": "IP_ADDRESS",
"value": "8.8.8.8"
},
{
"context": "hod'\n blacklist = _.intersection ['127.0.0.1', '::1', 'localhost'], dns.getServers()\n ",
"end": 989,
"score": 0.9997561573982239,
"start": 980,
"tag": "IP_ADDRESS",
"value": "127.0.0.1"
},
{
"context": " blacklist = _.intersection ['127.0.0.1', '::1', 'localhost'], dns.getServers()\n ",
"end": 996,
"score": 0.9992016553878784,
"start": 992,
"tag": "IP_ADDRESS",
"value": "'::1"
},
{
"context": " # https://github.com/namecoin/wiki/blob/master/Domain-Name-Specification-2.0.md",
"end": 6190,
"score": 0.9992771148681641,
"start": 6182,
"tag": "USERNAME",
"value": "namecoin"
}
] | src/lib/dns.coffee | wartron/dnschain | 1 | ###
dnschain
http://dnschain.net
Copyright (c) 2013 Greg Slepak
Licensed under the BSD 3-Clause license.
###
# TODO: go through 'TODO's!
#
# TODO: check if we're missing any edns support
module.exports = (dnschain) ->
# expose these into our namespace
for k of dnschain.globals
eval "var #{k} = dnschain.globals.#{k};"
ResolverStream = require('./resolver-stream')(dnschain)
QTYPE_NAME = dns2.consts.QTYPE_TO_NAME
NAME_QTYPE = dns2.consts.NAME_TO_QTYPE
NAME_RCODE = dns2.consts.NAME_TO_RCODE
RCODE_NAME = dns2.consts.RCODE_TO_NAME
class DNSServer
constructor: (@dnschain) ->
@log = newLogger 'DNS'
@log.debug "Loading DNSServer..."
# this is just for development testing of NODE_DNS method
# dns.setServers ['8.8.8.8']
if dns.getServers? and consts.oldDNS.NODE_DNS is config.get 'dns:oldDNSMethod'
blacklist = _.intersection ['127.0.0.1', '::1', 'localhost'], dns.getServers()
if blacklist.length > 0
tErr "Cannot use NODE_DNS method when system DNS lists %j as a resolver! Would lead to infinite loop!", blacklist
@server = dns2.createServer() or tErr "dns2 create"
@server.on 'socketError', (err) -> tErr err
@server.on 'request', @callback.bind(@)
@server.serve config.get('dns:port'), config.get('dns:host')
@log.info 'started DNS', config.get 'dns'
shutdown: ->
@log.debug 'shutting down!'
@server.close()
namecoinizeDomain: (domain) ->
nmcDomain = S(domain).chompRight('.bit').s
if (dotIdx = nmcDomain.indexOf('.')) != -1
nmcDomain = nmcDomain.slice(dotIdx+1) # rm subdomain
'd/' + nmcDomain # add 'd/' namespace
oldDNSLookup: (q, res) ->
method = config.get 'dns:oldDNSMethod'
sig = "oldDNS{#{method}}"
@log.debug {fn:sig+':start', q:q}
if method is consts.oldDNS.NATIVE_DNS
req = new dns2.Request
question: q
server : config.get 'dns:oldDNS'
try_edns: q.type is NAME_QTYPE.ANY
success = false
req.on 'message', (err, answer) =>
if err?
@log.error "should not have an error here!", {fn:sig+':error', err:err, answer:answer}
req.DNSErr ?= err
else
success = true
res.edns_version = 0 if req.try_edns
res.header.ra = answer.header.ra
for a in ['answer', 'authority', 'additional'] when answer[a].length?
res[a].push answer[a]...
@log.debug {fn:sig+':message', answer:answer}
req.on 'error', (err) =>
@log.error {fn:sig+':error', err:err, answer:answer}
req.DNSErr = err
req.on 'end', =>
if success
@log.debug {fn:sig+':success', q:q, res: _.omit(res, '_socket')}
res.send()
else
# TODO: this is noisy.
# also make log output look good in journalctl
@log.warn {fn:sig+':fail', q:q, err:req.DNSErr, res:_.omit(res, '_socket')}
@sendErr res
req.send()
else
dns.resolve q.name, QTYPE_NAME[q.type], (err, addrs) =>
if err
@log.debug {fn:sig+':fail', q:q, err:err}
@sendErr res
else
# USING THIS METHOD IS DISCOURAGED BECAUSE IT DOESN'T
# PROVIDE US WITH CORRECT TTL VALUES!!
# TODO: pick an appropriate TTL value!
ttl = Math.floor(Math.random() * 3600) + 30
res.answer.push (addrs.map ip2type(q.name, ttl, QTYPE_NAME[q.type]))...
@log.debug {fn:sig+':success', answer:res.answer, q:q.name}
res.send()
sendErr: (res, code) ->
res.header.rcode = code ? NAME_RCODE.SERVFAIL
@log.debug {fn:'sendErr', code:RCODE_NAME[code]}
res.send()
callback: (req, res) ->
# answering multiple questions in a query appears to be problematic,
# and few servers do it, so we only answer the first question:
# https://stackoverflow.com/questions/4082081/requesting-a-and-aaaa-records-in-single-dns-query
q = req.question[0]
ttl = Math.floor(Math.random() * 3600) + 30 # TODO: pick an appropriate TTL value!
@log.debug "received question", q
# TODO: make sure we correctly handle AAAA
# if q.type != NAME_QTYPE.A
# @log.debug "only support 'A' types ATM, deferring request!", {q:q}
# @oldDNSLookup(q, res)
if S(q.name).endsWith '.bit'
nmcDomain = @namecoinizeDomain q.name
@log.debug {fn: 'cb|.bit', nmcDomain:nmcDomain, q:q}
@dnschain.nmc.resolve nmcDomain, (err, result) =>
@log.debug {fn: 'nmc_show|cb'}
if err
@log.error {fn:'nmc_show', err:err, result:result, q:q}
@sendErr res
else
@log.debug {fn:'nmc_show', q:q, result:result}
try
info = JSON.parse result.value
catch e
@log.warn "bad JSON!", {err:e, result:result, q:q}
return @sendErr res, NAME_RCODE.FORMERR
# TODO: handle all the types specified in the specification!
# https://github.com/namecoin/wiki/blob/master/Domain-Name-Specification-2.0.md
# TODO: handle other info outside of the specification!
# - GNS support
# - DNSSEC/DANE support?
# According to NMC specification, specifying 'ns'
# overrules 'ip' value, so check it here and resolve using
# old-style DNS.
if info.ns?.length > 0
# 1. Create a stream of nameserver IP addresses out of info.ns
# 2. Send request to each of the servers, separated by a two
# second delay. On receiving the first answer from any of
# them, cancel all other pending requests and respond to
# our client.
#
# TODO: handle ns = IPv6 addr!
[nsIPs, nsCNAMEs] = [[],[]]
for ip in info.ns
(if net.isIP(ip) then nsIPs else nsCNAMEs).push(ip)
# ResolverStream will clone 'resolvOpts' in the constructor
nsCNAME2IP = new ResolverStream(resolvOpts = log:@log)
nsIPs = es.merge(sa(nsIPs), sa(nsCNAMEs).pipe(nsCNAME2IP))
# safe to do becase ResolverStream clones the opts
resolvOpts.stackedDelay = 1000
resolvOpts.reqMaker = (nsIP) =>
req = dns2.Request
question: q
server: {address: nsIP}
stackedQuery = new ResolverStream resolvOpts
stackedQuery.errors = 0
nsIPs.on 'data', (nsIP) ->
stackedQuery.write nsIP
stackedQuery.on 'error', (err) =>
if ++stackedQuery.errors == info.ns.length
@log.warn "errors on all NS!", {fn:'nmc_show', q:q, err:err}
@sendErr(res)
stackedQuery.on 'answers', (answers) =>
nsCNAME2IP.cancelRequests(true)
stackedQuery.cancelRequests(true)
res.answer.push answers...
@log.debug "sending answers!", {fn:'nmc_show', answers:answers, q:q}
res.send()
else if info.ip
# we have its IP! send reply to client
# TODO: pick an appropriate 'ttl' for the response!
# TODO: handle more info! send the rest of the
# stuff in 'info', and all the IPs!
info.ip = [info.ip] if typeof info.ip is 'string'
# info.ip.forEach (a)-> res.answer.push ip2type(q.name, ttl)(a)
res.answer.push (info.ip.map ip2type(q.name, ttl, QTYPE_NAME[q.type]))...
@log.debug {fn:'nmc_show|ip', q:q, answer:res.answer}
res.send()
else
@log.warn {fn: 'nmc_show|404', q:q}
@sendErr res, NAME_RCODE.NOTFOUND
else if S(q.name).endsWith '.dns'
# TODO: right now we're doing a catch-all and pretending they asked
# for namecoin.dns...
res.answer.push ip2type(q.name,ttl,QTYPE_NAME[q.type])(config.get 'dns:externalIP')
@log.debug {fn:'cb|.dns', q:q, answer:res.answer}
res.send()
else
@log.debug "deferring question", {fn: "cb|else", q:q}
@oldDNSLookup q, res
| 61977 | ###
dnschain
http://dnschain.net
Copyright (c) 2013 <NAME>
Licensed under the BSD 3-Clause license.
###
# TODO: go through 'TODO's!
#
# TODO: check if we're missing any edns support
module.exports = (dnschain) ->
# expose these into our namespace
for k of dnschain.globals
eval "var #{k} = dnschain.globals.#{k};"
ResolverStream = require('./resolver-stream')(dnschain)
QTYPE_NAME = dns2.consts.QTYPE_TO_NAME
NAME_QTYPE = dns2.consts.NAME_TO_QTYPE
NAME_RCODE = dns2.consts.NAME_TO_RCODE
RCODE_NAME = dns2.consts.RCODE_TO_NAME
class DNSServer
constructor: (@dnschain) ->
@log = newLogger 'DNS'
@log.debug "Loading DNSServer..."
# this is just for development testing of NODE_DNS method
# dns.setServers ['8.8.8.8']
if dns.getServers? and consts.oldDNS.NODE_DNS is config.get 'dns:oldDNSMethod'
blacklist = _.intersection ['127.0.0.1', '::1', 'localhost'], dns.getServers()
if blacklist.length > 0
tErr "Cannot use NODE_DNS method when system DNS lists %j as a resolver! Would lead to infinite loop!", blacklist
@server = dns2.createServer() or tErr "dns2 create"
@server.on 'socketError', (err) -> tErr err
@server.on 'request', @callback.bind(@)
@server.serve config.get('dns:port'), config.get('dns:host')
@log.info 'started DNS', config.get 'dns'
shutdown: ->
@log.debug 'shutting down!'
@server.close()
namecoinizeDomain: (domain) ->
nmcDomain = S(domain).chompRight('.bit').s
if (dotIdx = nmcDomain.indexOf('.')) != -1
nmcDomain = nmcDomain.slice(dotIdx+1) # rm subdomain
'd/' + nmcDomain # add 'd/' namespace
oldDNSLookup: (q, res) ->
method = config.get 'dns:oldDNSMethod'
sig = "oldDNS{#{method}}"
@log.debug {fn:sig+':start', q:q}
if method is consts.oldDNS.NATIVE_DNS
req = new dns2.Request
question: q
server : config.get 'dns:oldDNS'
try_edns: q.type is NAME_QTYPE.ANY
success = false
req.on 'message', (err, answer) =>
if err?
@log.error "should not have an error here!", {fn:sig+':error', err:err, answer:answer}
req.DNSErr ?= err
else
success = true
res.edns_version = 0 if req.try_edns
res.header.ra = answer.header.ra
for a in ['answer', 'authority', 'additional'] when answer[a].length?
res[a].push answer[a]...
@log.debug {fn:sig+':message', answer:answer}
req.on 'error', (err) =>
@log.error {fn:sig+':error', err:err, answer:answer}
req.DNSErr = err
req.on 'end', =>
if success
@log.debug {fn:sig+':success', q:q, res: _.omit(res, '_socket')}
res.send()
else
# TODO: this is noisy.
# also make log output look good in journalctl
@log.warn {fn:sig+':fail', q:q, err:req.DNSErr, res:_.omit(res, '_socket')}
@sendErr res
req.send()
else
dns.resolve q.name, QTYPE_NAME[q.type], (err, addrs) =>
if err
@log.debug {fn:sig+':fail', q:q, err:err}
@sendErr res
else
# USING THIS METHOD IS DISCOURAGED BECAUSE IT DOESN'T
# PROVIDE US WITH CORRECT TTL VALUES!!
# TODO: pick an appropriate TTL value!
ttl = Math.floor(Math.random() * 3600) + 30
res.answer.push (addrs.map ip2type(q.name, ttl, QTYPE_NAME[q.type]))...
@log.debug {fn:sig+':success', answer:res.answer, q:q.name}
res.send()
sendErr: (res, code) ->
res.header.rcode = code ? NAME_RCODE.SERVFAIL
@log.debug {fn:'sendErr', code:RCODE_NAME[code]}
res.send()
callback: (req, res) ->
# answering multiple questions in a query appears to be problematic,
# and few servers do it, so we only answer the first question:
# https://stackoverflow.com/questions/4082081/requesting-a-and-aaaa-records-in-single-dns-query
q = req.question[0]
ttl = Math.floor(Math.random() * 3600) + 30 # TODO: pick an appropriate TTL value!
@log.debug "received question", q
# TODO: make sure we correctly handle AAAA
# if q.type != NAME_QTYPE.A
# @log.debug "only support 'A' types ATM, deferring request!", {q:q}
# @oldDNSLookup(q, res)
if S(q.name).endsWith '.bit'
nmcDomain = @namecoinizeDomain q.name
@log.debug {fn: 'cb|.bit', nmcDomain:nmcDomain, q:q}
@dnschain.nmc.resolve nmcDomain, (err, result) =>
@log.debug {fn: 'nmc_show|cb'}
if err
@log.error {fn:'nmc_show', err:err, result:result, q:q}
@sendErr res
else
@log.debug {fn:'nmc_show', q:q, result:result}
try
info = JSON.parse result.value
catch e
@log.warn "bad JSON!", {err:e, result:result, q:q}
return @sendErr res, NAME_RCODE.FORMERR
# TODO: handle all the types specified in the specification!
# https://github.com/namecoin/wiki/blob/master/Domain-Name-Specification-2.0.md
# TODO: handle other info outside of the specification!
# - GNS support
# - DNSSEC/DANE support?
# According to NMC specification, specifying 'ns'
# overrules 'ip' value, so check it here and resolve using
# old-style DNS.
if info.ns?.length > 0
# 1. Create a stream of nameserver IP addresses out of info.ns
# 2. Send request to each of the servers, separated by a two
# second delay. On receiving the first answer from any of
# them, cancel all other pending requests and respond to
# our client.
#
# TODO: handle ns = IPv6 addr!
[nsIPs, nsCNAMEs] = [[],[]]
for ip in info.ns
(if net.isIP(ip) then nsIPs else nsCNAMEs).push(ip)
# ResolverStream will clone 'resolvOpts' in the constructor
nsCNAME2IP = new ResolverStream(resolvOpts = log:@log)
nsIPs = es.merge(sa(nsIPs), sa(nsCNAMEs).pipe(nsCNAME2IP))
# safe to do becase ResolverStream clones the opts
resolvOpts.stackedDelay = 1000
resolvOpts.reqMaker = (nsIP) =>
req = dns2.Request
question: q
server: {address: nsIP}
stackedQuery = new ResolverStream resolvOpts
stackedQuery.errors = 0
nsIPs.on 'data', (nsIP) ->
stackedQuery.write nsIP
stackedQuery.on 'error', (err) =>
if ++stackedQuery.errors == info.ns.length
@log.warn "errors on all NS!", {fn:'nmc_show', q:q, err:err}
@sendErr(res)
stackedQuery.on 'answers', (answers) =>
nsCNAME2IP.cancelRequests(true)
stackedQuery.cancelRequests(true)
res.answer.push answers...
@log.debug "sending answers!", {fn:'nmc_show', answers:answers, q:q}
res.send()
else if info.ip
# we have its IP! send reply to client
# TODO: pick an appropriate 'ttl' for the response!
# TODO: handle more info! send the rest of the
# stuff in 'info', and all the IPs!
info.ip = [info.ip] if typeof info.ip is 'string'
# info.ip.forEach (a)-> res.answer.push ip2type(q.name, ttl)(a)
res.answer.push (info.ip.map ip2type(q.name, ttl, QTYPE_NAME[q.type]))...
@log.debug {fn:'nmc_show|ip', q:q, answer:res.answer}
res.send()
else
@log.warn {fn: 'nmc_show|404', q:q}
@sendErr res, NAME_RCODE.NOTFOUND
else if S(q.name).endsWith '.dns'
# TODO: right now we're doing a catch-all and pretending they asked
# for namecoin.dns...
res.answer.push ip2type(q.name,ttl,QTYPE_NAME[q.type])(config.get 'dns:externalIP')
@log.debug {fn:'cb|.dns', q:q, answer:res.answer}
res.send()
else
@log.debug "deferring question", {fn: "cb|else", q:q}
@oldDNSLookup q, res
| true | ###
dnschain
http://dnschain.net
Copyright (c) 2013 PI:NAME:<NAME>END_PI
Licensed under the BSD 3-Clause license.
###
# TODO: go through 'TODO's!
#
# TODO: check if we're missing any edns support
module.exports = (dnschain) ->
# expose these into our namespace
for k of dnschain.globals
eval "var #{k} = dnschain.globals.#{k};"
ResolverStream = require('./resolver-stream')(dnschain)
QTYPE_NAME = dns2.consts.QTYPE_TO_NAME
NAME_QTYPE = dns2.consts.NAME_TO_QTYPE
NAME_RCODE = dns2.consts.NAME_TO_RCODE
RCODE_NAME = dns2.consts.RCODE_TO_NAME
class DNSServer
constructor: (@dnschain) ->
@log = newLogger 'DNS'
@log.debug "Loading DNSServer..."
# this is just for development testing of NODE_DNS method
# dns.setServers ['8.8.8.8']
if dns.getServers? and consts.oldDNS.NODE_DNS is config.get 'dns:oldDNSMethod'
blacklist = _.intersection ['127.0.0.1', '::1', 'localhost'], dns.getServers()
if blacklist.length > 0
tErr "Cannot use NODE_DNS method when system DNS lists %j as a resolver! Would lead to infinite loop!", blacklist
@server = dns2.createServer() or tErr "dns2 create"
@server.on 'socketError', (err) -> tErr err
@server.on 'request', @callback.bind(@)
@server.serve config.get('dns:port'), config.get('dns:host')
@log.info 'started DNS', config.get 'dns'
shutdown: ->
@log.debug 'shutting down!'
@server.close()
namecoinizeDomain: (domain) ->
nmcDomain = S(domain).chompRight('.bit').s
if (dotIdx = nmcDomain.indexOf('.')) != -1
nmcDomain = nmcDomain.slice(dotIdx+1) # rm subdomain
'd/' + nmcDomain # add 'd/' namespace
oldDNSLookup: (q, res) ->
method = config.get 'dns:oldDNSMethod'
sig = "oldDNS{#{method}}"
@log.debug {fn:sig+':start', q:q}
if method is consts.oldDNS.NATIVE_DNS
req = new dns2.Request
question: q
server : config.get 'dns:oldDNS'
try_edns: q.type is NAME_QTYPE.ANY
success = false
req.on 'message', (err, answer) =>
if err?
@log.error "should not have an error here!", {fn:sig+':error', err:err, answer:answer}
req.DNSErr ?= err
else
success = true
res.edns_version = 0 if req.try_edns
res.header.ra = answer.header.ra
for a in ['answer', 'authority', 'additional'] when answer[a].length?
res[a].push answer[a]...
@log.debug {fn:sig+':message', answer:answer}
req.on 'error', (err) =>
@log.error {fn:sig+':error', err:err, answer:answer}
req.DNSErr = err
req.on 'end', =>
if success
@log.debug {fn:sig+':success', q:q, res: _.omit(res, '_socket')}
res.send()
else
# TODO: this is noisy.
# also make log output look good in journalctl
@log.warn {fn:sig+':fail', q:q, err:req.DNSErr, res:_.omit(res, '_socket')}
@sendErr res
req.send()
else
dns.resolve q.name, QTYPE_NAME[q.type], (err, addrs) =>
if err
@log.debug {fn:sig+':fail', q:q, err:err}
@sendErr res
else
# USING THIS METHOD IS DISCOURAGED BECAUSE IT DOESN'T
# PROVIDE US WITH CORRECT TTL VALUES!!
# TODO: pick an appropriate TTL value!
ttl = Math.floor(Math.random() * 3600) + 30
res.answer.push (addrs.map ip2type(q.name, ttl, QTYPE_NAME[q.type]))...
@log.debug {fn:sig+':success', answer:res.answer, q:q.name}
res.send()
sendErr: (res, code) ->
res.header.rcode = code ? NAME_RCODE.SERVFAIL
@log.debug {fn:'sendErr', code:RCODE_NAME[code]}
res.send()
callback: (req, res) ->
# answering multiple questions in a query appears to be problematic,
# and few servers do it, so we only answer the first question:
# https://stackoverflow.com/questions/4082081/requesting-a-and-aaaa-records-in-single-dns-query
q = req.question[0]
ttl = Math.floor(Math.random() * 3600) + 30 # TODO: pick an appropriate TTL value!
@log.debug "received question", q
# TODO: make sure we correctly handle AAAA
# if q.type != NAME_QTYPE.A
# @log.debug "only support 'A' types ATM, deferring request!", {q:q}
# @oldDNSLookup(q, res)
if S(q.name).endsWith '.bit'
nmcDomain = @namecoinizeDomain q.name
@log.debug {fn: 'cb|.bit', nmcDomain:nmcDomain, q:q}
@dnschain.nmc.resolve nmcDomain, (err, result) =>
@log.debug {fn: 'nmc_show|cb'}
if err
@log.error {fn:'nmc_show', err:err, result:result, q:q}
@sendErr res
else
@log.debug {fn:'nmc_show', q:q, result:result}
try
info = JSON.parse result.value
catch e
@log.warn "bad JSON!", {err:e, result:result, q:q}
return @sendErr res, NAME_RCODE.FORMERR
# TODO: handle all the types specified in the specification!
# https://github.com/namecoin/wiki/blob/master/Domain-Name-Specification-2.0.md
# TODO: handle other info outside of the specification!
# - GNS support
# - DNSSEC/DANE support?
# According to NMC specification, specifying 'ns'
# overrules 'ip' value, so check it here and resolve using
# old-style DNS.
if info.ns?.length > 0
# 1. Create a stream of nameserver IP addresses out of info.ns
# 2. Send request to each of the servers, separated by a two
# second delay. On receiving the first answer from any of
# them, cancel all other pending requests and respond to
# our client.
#
# TODO: handle ns = IPv6 addr!
[nsIPs, nsCNAMEs] = [[],[]]
for ip in info.ns
(if net.isIP(ip) then nsIPs else nsCNAMEs).push(ip)
# ResolverStream will clone 'resolvOpts' in the constructor
nsCNAME2IP = new ResolverStream(resolvOpts = log:@log)
nsIPs = es.merge(sa(nsIPs), sa(nsCNAMEs).pipe(nsCNAME2IP))
# safe to do becase ResolverStream clones the opts
resolvOpts.stackedDelay = 1000
resolvOpts.reqMaker = (nsIP) =>
req = dns2.Request
question: q
server: {address: nsIP}
stackedQuery = new ResolverStream resolvOpts
stackedQuery.errors = 0
nsIPs.on 'data', (nsIP) ->
stackedQuery.write nsIP
stackedQuery.on 'error', (err) =>
if ++stackedQuery.errors == info.ns.length
@log.warn "errors on all NS!", {fn:'nmc_show', q:q, err:err}
@sendErr(res)
stackedQuery.on 'answers', (answers) =>
nsCNAME2IP.cancelRequests(true)
stackedQuery.cancelRequests(true)
res.answer.push answers...
@log.debug "sending answers!", {fn:'nmc_show', answers:answers, q:q}
res.send()
else if info.ip
# we have its IP! send reply to client
# TODO: pick an appropriate 'ttl' for the response!
# TODO: handle more info! send the rest of the
# stuff in 'info', and all the IPs!
info.ip = [info.ip] if typeof info.ip is 'string'
# info.ip.forEach (a)-> res.answer.push ip2type(q.name, ttl)(a)
res.answer.push (info.ip.map ip2type(q.name, ttl, QTYPE_NAME[q.type]))...
@log.debug {fn:'nmc_show|ip', q:q, answer:res.answer}
res.send()
else
@log.warn {fn: 'nmc_show|404', q:q}
@sendErr res, NAME_RCODE.NOTFOUND
else if S(q.name).endsWith '.dns'
# TODO: right now we're doing a catch-all and pretending they asked
# for namecoin.dns...
res.answer.push ip2type(q.name,ttl,QTYPE_NAME[q.type])(config.get 'dns:externalIP')
@log.debug {fn:'cb|.dns', q:q, answer:res.answer}
res.send()
else
@log.debug "deferring question", {fn: "cb|else", q:q}
@oldDNSLookup q, res
|
[
{
"context": "###\n© Copyright 2013-2014 Stephan Jorek <stephan.jorek@gmail.com> \n\nLicensed under the A",
"end": 39,
"score": 0.9998924136161804,
"start": 26,
"tag": "NAME",
"value": "Stephan Jorek"
},
{
"context": "###\n© Copyright 2013-2014 Stephan Jorek <stephan.jorek@gmail.com> \n\nLicensed under the Apache License, Version 2.",
"end": 64,
"score": 0.9999321699142456,
"start": 41,
"tag": "EMAIL",
"value": "stephan.jorek@gmail.com"
}
] | src/Action/Scope/Decorator/WithStatementEvaluation.coffee | sjorek/goatee.js | 0 | ###
© Copyright 2013-2014 Stephan Jorek <stephan.jorek@gmail.com>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
<http://www.apache.org/licenses/LICENSE-2.0>
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied. See the License for the specific language governing
permissions and limitations under the License.
###
{Constants:{
CHAR_comma
}} = require '../../Core/Constants'
{WithStatement} = require './WithStatement'
exports = module?.exports ? this
exports.WithStatementEvaluation = \
class WithStatementEvaluation extends WithStatement
compile: (args, code) ->
fn = null
eval "fn = function(#{args.implode(CHAR_comma)}) { #{code} }"
| 95126 | ###
© Copyright 2013-2014 <NAME> <<EMAIL>>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
<http://www.apache.org/licenses/LICENSE-2.0>
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied. See the License for the specific language governing
permissions and limitations under the License.
###
{Constants:{
CHAR_comma
}} = require '../../Core/Constants'
{WithStatement} = require './WithStatement'
exports = module?.exports ? this
exports.WithStatementEvaluation = \
class WithStatementEvaluation extends WithStatement
compile: (args, code) ->
fn = null
eval "fn = function(#{args.implode(CHAR_comma)}) { #{code} }"
| true | ###
© Copyright 2013-2014 PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
<http://www.apache.org/licenses/LICENSE-2.0>
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied. See the License for the specific language governing
permissions and limitations under the License.
###
{Constants:{
CHAR_comma
}} = require '../../Core/Constants'
{WithStatement} = require './WithStatement'
exports = module?.exports ? this
exports.WithStatementEvaluation = \
class WithStatementEvaluation extends WithStatement
compile: (args, code) ->
fn = null
eval "fn = function(#{args.implode(CHAR_comma)}) { #{code} }"
|
[
{
"context": "\n @sandbox.stub(git, \"_getAuthor\").resolves(\"brian\")\n @sandbox.stub(git, \"_getEmail\").resol",
"end": 2071,
"score": 0.5556120872497559,
"start": 2070,
"tag": "USERNAME",
"value": "b"
},
{
"context": " @sandbox.stub(git, \"_getAuthor\").resolves(\"brian\")\n @sandbox.stub(git, \"_getEmail\").resolves(",
"end": 2075,
"score": 0.9208471179008484,
"start": 2071,
"tag": "NAME",
"value": "rian"
},
{
"context": ")\n @sandbox.stub(git, \"_getEmail\").resolves(\"brian@cypress.io\")\n @sandbox.stub(git, \"_getMessage\").resolve",
"end": 2142,
"score": 0.999912679195404,
"start": 2126,
"tag": "EMAIL",
"value": "brian@cypress.io"
},
{
"context": " \"_getRemoteOrigin\").resolves(\"https://github.com/foo/bar.git\")\n @sandbox.stub(api, \"createRun\")\n\n",
"end": 2339,
"score": 0.9930775165557861,
"start": 2336,
"tag": "USERNAME",
"value": "foo"
},
{
"context": "1\n\n api.createRun.rejects(err)\n\n key = \"3206e6d9-51b6-4766-b2a5-9d173f5158aa\"\n\n record.generateProjectBuildI",
"end": 3410,
"score": 0.9996946454048157,
"start": 3387,
"tag": "KEY",
"value": "3206e6d9-51b6-4766-b2a5"
},
{
"context": "ctPath: \"/_test-output/path/to/project\", key: \"key-foo\"})\n .then ->\n expect(record.generateP",
"end": 11436,
"score": 0.855259895324707,
"start": 11433,
"tag": "KEY",
"value": "foo"
}
] | packages/server/test/unit/modes/record_spec.coffee | smartmanru/cypress | 0 | require("../../spec_helper")
os = require("os")
api = require("#{root}../lib/api")
stdout = require("#{root}../lib/stdout")
errors = require("#{root}../lib/errors")
logger = require("#{root}../lib/logger")
Project = require("#{root}../lib/project")
terminal = require("#{root}../lib/util/terminal")
record = require("#{root}../lib/modes/record")
headless = require("#{root}../lib/modes/headless")
git = require("#{root}../lib/util/git")
ciProvider = require("#{root}../lib/util/ci_provider")
snapshot = require("snap-shot-it")
describe "lib/modes/record", ->
beforeEach ->
@sandbox.stub(ciProvider, "name").returns("circle")
@sandbox.stub(ciProvider, "params").returns({foo: "bar"})
@sandbox.stub(ciProvider, "buildNum").returns("build-123")
context ".getBranch", ->
beforeEach ->
@repo = @sandbox.stub({
getBranch: ->
})
afterEach ->
delete process.env.CIRCLE_BRANCH
delete process.env.TRAVIS_BRANCH
delete process.env.CI_BRANCH
it "gets branch from process.env.CIRCLE_BRANCH", ->
process.env.CIRCLE_BRANCH = "bem/circle"
process.env.TRAVIS_BRANCH = "bem/travis"
process.env.CI_BRANCH = "bem/ci"
record.getBranch(@repo).then (ret) ->
expect(ret).to.eq("bem/circle")
it "gets branch from process.env.TRAVIS_BRANCH", ->
process.env.TRAVIS_BRANCH = "bem/travis"
process.env.CI_BRANCH = "bem/ci"
record.getBranch(@repo).then (ret) ->
expect(ret).to.eq("bem/travis")
it "gets branch from process.env.CI_BRANCH", ->
process.env.CI_BRANCH = "bem/ci"
record.getBranch(@repo).then (ret) ->
expect(ret).to.eq("bem/ci")
it "gets branch from git", ->
@repo.getBranch.resolves("regular-branch")
record.getBranch(@repo).then (ret) ->
expect(ret).to.eq("regular-branch")
context ".generateProjectBuildId", ->
beforeEach ->
@sandbox.stub(git, "_getBranch").resolves("master")
@sandbox.stub(git, "_getAuthor").resolves("brian")
@sandbox.stub(git, "_getEmail").resolves("brian@cypress.io")
@sandbox.stub(git, "_getMessage").resolves("such hax")
@sandbox.stub(git, "_getSha").resolves("sha-123")
@sandbox.stub(git, "_getRemoteOrigin").resolves("https://github.com/foo/bar.git")
@sandbox.stub(api, "createRun")
it "calls api.createRun with args", ->
api.createRun.resolves()
record.generateProjectBuildId("id-123", "/_test-output/path/to/project", "project", "key-123").then ->
snapshot(api.createRun.firstCall.args)
it "passes groupId", ->
api.createRun.resolves()
group = true
groupId = "gr123"
record.generateProjectBuildId("id-123", "/_test-output/path/to/project", "project", "key-123", group, groupId).then ->
snapshot(api.createRun.firstCall.args)
it "figures out groupId from CI environment variables", ->
@sandbox.stub(ciProvider, "groupId").returns("ci-group-123")
api.createRun.resolves()
group = true
record.generateProjectBuildId("id-123", "/_test-output/path/to/project", "project", "key-123", group).then ->
snapshot(api.createRun.firstCall.args)
it "handles status code errors of 401", ->
err = new Error
err.statusCode = 401
api.createRun.rejects(err)
key = "3206e6d9-51b6-4766-b2a5-9d173f5158aa"
record.generateProjectBuildId("id-123", "path", "project", key)
.then ->
throw new Error("should have failed but did not")
.catch (err) ->
expect(err.type).to.eq("RECORD_KEY_NOT_VALID")
expect(err.message).to.include("Key")
expect(err.message).to.include("3206e...158aa")
expect(err.message).to.include("invalid")
it "handles status code errors of 404", ->
err = new Error
err.statusCode = 404
api.createRun.rejects(err)
record.generateProjectBuildId("id-123", "path", "project", "key-123")
.then ->
throw new Error("should have failed but did not")
.catch (err) ->
expect(err.type).to.eq("DASHBOARD_PROJECT_NOT_FOUND")
it "handles all other errors", ->
err = new Error("foo")
api.createRun.rejects(err)
@sandbox.spy(errors, "warning")
@sandbox.spy(logger, "createException")
@sandbox.spy(console, "log")
## this should not throw
record.generateProjectBuildId(1,2,3,4)
.then (ret) ->
expect(ret).to.be.null
expect(errors.warning).to.be.calledWith("DASHBOARD_CANNOT_CREATE_RUN_OR_INSTANCE", err)
expect(console.log).to.be.calledWithMatch("Warning: We encountered an error talking to our servers.")
expect(console.log).to.be.calledWithMatch("Error: foo")
expect(logger.createException).to.be.calledWith(err)
context ".uploadStdout", ->
beforeEach ->
@sandbox.stub(api, "updateInstanceStdout")
it "calls api.updateInstanceStdout", ->
api.updateInstanceStdout.resolves()
record.uploadStdout("id-123", "foobarbaz\n")
expect(api.updateInstanceStdout).to.be.calledWith({
instanceId: "id-123"
stdout: "foobarbaz\n"
})
it "logs warning on error", ->
err = new Error("foo")
@sandbox.spy(errors, "warning")
@sandbox.spy(logger, "createException")
@sandbox.spy(console, "log")
api.updateInstanceStdout.rejects(err)
record.uploadStdout("id-123", "asdf")
.then ->
expect(errors.warning).to.be.calledWith("DASHBOARD_CANNOT_CREATE_RUN_OR_INSTANCE", err)
expect(console.log).to.be.calledWithMatch("This run will not be recorded.")
expect(console.log).to.be.calledWithMatch("Error: foo")
expect(logger.createException).to.be.calledWith(err)
it "does not createException when statusCode is 503", ->
err = new Error("foo")
err.statusCode = 503
@sandbox.spy(logger, "createException")
api.updateInstanceStdout.rejects(err)
record.uploadStdout("id-123", "Asdfasd")
.then ->
expect(logger.createException).not.to.be.called
context ".uploadAssets", ->
beforeEach ->
@sandbox.stub(api, "updateInstance")
it "calls api.updateInstance", ->
api.updateInstance.resolves()
record.uploadAssets("id-123", {
tests: 1
passes: 2
failures: 3
pending: 4
duration: 5
video: "path/to/video"
error: "err msg"
screenshots: [{
name: "foo"
path: "path/to/screenshot"
}]
failingTests: ["foo"]
config: {foo: "bar"}
}, "foobarbaz")
expect(api.updateInstance).to.be.calledWith({
instanceId: "id-123"
tests: 1
passes: 2
failures: 3
pending: 4
duration: 5
error: "err msg"
video: true
screenshots: [{name: "foo"}]
failingTests: ["foo"]
cypressConfig: {foo: "bar"}
ciProvider: "circle"
stdout: "foobarbaz"
})
it "calls record.upload on success", ->
resp = {
videoUploadUrl: "https://s3.upload.video"
screenshotUploadUrls: [
{ clientId: 1, uploadUrl: "https://s3.upload.screenshot/1"}
{ clientId: 2, uploadUrl: "https://s3.upload.screenshot/2"}
]
}
api.updateInstance.resolves(resp)
@sandbox.stub(record, "upload").resolves()
record.uploadAssets("id-123", {
tests: 1
passes: 2
failures: 3
pending: 4
duration: 5
video: "path/to/video"
screenshots: [{
name: "foo"
path: "path/to/screenshot"
}]
failingTests: ["foo"]
config: {foo: "bar"}
})
.then ->
expect(record.upload).to.be.calledWith({
video: "path/to/video"
screenshots: [{
name: "foo"
path: "path/to/screenshot"
}]
videoUrl: resp.videoUploadUrl
screenshotUrls: resp.screenshotUploadUrls
})
it "logs warning on error", ->
err = new Error("foo")
@sandbox.spy(errors, "warning")
@sandbox.spy(logger, "createException")
@sandbox.spy(console, "log")
api.updateInstance.rejects(err)
record.uploadAssets("id-123", {})
.then ->
expect(errors.warning).to.be.calledWith("DASHBOARD_CANNOT_CREATE_RUN_OR_INSTANCE", err)
expect(console.log).to.be.calledWithMatch("This run will not be recorded.")
expect(console.log).to.be.calledWithMatch("Error: foo")
expect(logger.createException).to.be.calledWith(err)
it "does not createException when statusCode is 503", ->
err = new Error("foo")
err.statusCode = 503
@sandbox.spy(logger, "createException")
api.updateInstance.rejects(err)
record.uploadAssets("id-123", {})
.then ->
expect(logger.createException).not.to.be.called
context ".createInstance", ->
beforeEach ->
@sandbox.stub(api, "createInstance")
it "calls api.createInstance", ->
api.createInstance.resolves()
record.createInstance("id-123", "cypress/integration/app_spec.coffee")
expect(api.createInstance).to.be.calledWith({
buildId: "id-123"
spec: "cypress/integration/app_spec.coffee"
})
it "logs warning on error", ->
err = new Error("foo")
@sandbox.spy(errors, "warning")
@sandbox.spy(logger, "createException")
@sandbox.spy(console, "log")
api.createInstance.rejects(err)
record.createInstance("id-123", null)
.then (ret) ->
expect(ret).to.be.null
expect(errors.warning).to.be.calledWith("DASHBOARD_CANNOT_CREATE_RUN_OR_INSTANCE", err)
expect(console.log).to.be.calledWithMatch("This run will not be recorded.")
expect(console.log).to.be.calledWithMatch("Error: foo")
expect(logger.createException).to.be.calledWith(err)
it "does not createException when statusCode is 503", ->
err = new Error("foo")
err.statusCode = 503
@sandbox.spy(logger, "createException")
api.createInstance.rejects(err)
record.createInstance("id-123", null)
.then (ret) ->
expect(ret).to.be.null
expect(logger.createException).not.to.be.called
context ".run", ->
beforeEach ->
@sandbox.stub(record, "generateProjectBuildId").resolves("build-id-123")
@sandbox.stub(record, "createInstance").resolves("instance-id-123")
@sandbox.stub(record, "uploadAssets").resolves()
@sandbox.stub(record, "uploadStdout").resolves()
@sandbox.stub(Project, "id").resolves("id-123")
@sandbox.stub(Project, "config").resolves({projectName: "projectName"})
@sandbox.stub(headless, "run").resolves({tests: 2, passes: 1})
@sandbox.spy(Project, "add")
it "ensures id", ->
record.run({projectPath: "/_test-output/path/to/project"})
.then ->
expect(Project.id).to.be.calledWith("/_test-output/path/to/project")
it "adds project with projectPath", ->
record.run({projectPath: "/_test-output/path/to/project"})
.then ->
expect(Project.add).to.be.calledWith("/_test-output/path/to/project")
it "passes id + projectPath + options.key to generateProjectBuildId", ->
record.run({projectPath: "/_test-output/path/to/project", key: "key-foo"})
.then ->
expect(record.generateProjectBuildId).to.be.calledWith("id-123", "/_test-output/path/to/project", "projectName", "key-foo")
it "passes buildId + options.spec to createInstance", ->
record.run({spec: "foo/bar/spec"})
.then ->
expect(record.createInstance).to.be.calledWith("build-id-123", "foo/bar/spec")
it "does not call record.createInstance or record.uploadAssets when no buildId", ->
record.generateProjectBuildId.resolves(null)
record.run({})
.then (stats) ->
expect(record.createInstance).not.to.be.called
expect(record.uploadAssets).not.to.be.called
expect(stats).to.deep.eq({
tests: 2
passes: 1
})
it "calls headless.run + ensureAuthToken + allDone into options", ->
opts = {foo: "bar"}
record.run(opts)
.then ->
expect(headless.run).to.be.calledWith({projectId: "id-123", foo: "bar", ensureAuthToken: false, allDone: false})
it "calls uploadAssets with instanceId, stats, and stdout", ->
@sandbox.stub(stdout, "capture").returns({
toString: -> "foobarbaz"
})
record.run({})
.then ->
expect(record.uploadAssets).to.be.calledWith("instance-id-123", {tests: 2, passes: 1}, "foobarbaz")
it "does not call uploadAssets with no instanceId", ->
record.createInstance.resolves(null)
record.run({})
.then (stats) ->
expect(record.uploadAssets).not.to.be.called
expect(stats).to.deep.eq({
tests: 2
passes: 1
})
it "does not call uploadStdout with no instanceId", ->
record.createInstance.resolves(null)
record.run({})
.then (stats) ->
expect(record.uploadStdout).not.to.be.called
it "does not call uploadStdout on uploadAssets failure", ->
record.uploadAssets.restore()
@sandbox.stub(api, "updateInstance").rejects(new Error)
record.run({})
.then (stats) ->
expect(record.uploadStdout).not.to.be.called
it "calls record.uploadStdout on uploadAssets success", ->
@sandbox.stub(stdout, "capture").returns({
toString: -> "foobarbaz"
})
record.run({})
.then (stats) ->
expect(record.uploadStdout).to.be.calledWith("instance-id-123", "foobarbaz")
it "captures stdout from headless.run and headless.allDone", ->
fn = ->
console.log("foo")
console.log("bar")
process.stdout.write("baz")
Promise.resolve({failures: 0})
headless.run.restore()
@sandbox.stub(headless, "run", fn)
record.run({})
.then (stats) ->
str = record.uploadStdout.getCall(0).args[1]
expect(str).to.include("foo\nbar\nbaz")
expect(str).to.include("All Done")
it "calls headless.allDone on uploadAssets success", ->
@sandbox.spy(terminal, "header")
record.run({})
.then (stats) ->
expect(terminal.header).to.be.calledWith("All Done")
expect(stats).to.deep.eq({
tests: 2
passes: 1
})
it "calls headless.allDone on uploadAssets failure", ->
@sandbox.spy(terminal, "header")
@sandbox.stub(api, "updateInstance").rejects(new Error)
record.uploadAssets.restore()
record.run({})
.then (stats) ->
expect(terminal.header).to.be.calledWith("All Done")
expect(stats).to.deep.eq({
tests: 2
passes: 1
})
it "calls headless.allDone on createInstance failure", ->
@sandbox.spy(terminal, "header")
record.createInstance.resolves(null)
record.run({})
.then (stats) ->
expect(terminal.header).to.be.calledWith("All Done")
expect(stats).to.deep.eq({
tests: 2
passes: 1
})
| 106156 | require("../../spec_helper")
os = require("os")
api = require("#{root}../lib/api")
stdout = require("#{root}../lib/stdout")
errors = require("#{root}../lib/errors")
logger = require("#{root}../lib/logger")
Project = require("#{root}../lib/project")
terminal = require("#{root}../lib/util/terminal")
record = require("#{root}../lib/modes/record")
headless = require("#{root}../lib/modes/headless")
git = require("#{root}../lib/util/git")
ciProvider = require("#{root}../lib/util/ci_provider")
snapshot = require("snap-shot-it")
describe "lib/modes/record", ->
beforeEach ->
@sandbox.stub(ciProvider, "name").returns("circle")
@sandbox.stub(ciProvider, "params").returns({foo: "bar"})
@sandbox.stub(ciProvider, "buildNum").returns("build-123")
context ".getBranch", ->
beforeEach ->
@repo = @sandbox.stub({
getBranch: ->
})
afterEach ->
delete process.env.CIRCLE_BRANCH
delete process.env.TRAVIS_BRANCH
delete process.env.CI_BRANCH
it "gets branch from process.env.CIRCLE_BRANCH", ->
process.env.CIRCLE_BRANCH = "bem/circle"
process.env.TRAVIS_BRANCH = "bem/travis"
process.env.CI_BRANCH = "bem/ci"
record.getBranch(@repo).then (ret) ->
expect(ret).to.eq("bem/circle")
it "gets branch from process.env.TRAVIS_BRANCH", ->
process.env.TRAVIS_BRANCH = "bem/travis"
process.env.CI_BRANCH = "bem/ci"
record.getBranch(@repo).then (ret) ->
expect(ret).to.eq("bem/travis")
it "gets branch from process.env.CI_BRANCH", ->
process.env.CI_BRANCH = "bem/ci"
record.getBranch(@repo).then (ret) ->
expect(ret).to.eq("bem/ci")
it "gets branch from git", ->
@repo.getBranch.resolves("regular-branch")
record.getBranch(@repo).then (ret) ->
expect(ret).to.eq("regular-branch")
context ".generateProjectBuildId", ->
beforeEach ->
@sandbox.stub(git, "_getBranch").resolves("master")
@sandbox.stub(git, "_getAuthor").resolves("b<NAME>")
@sandbox.stub(git, "_getEmail").resolves("<EMAIL>")
@sandbox.stub(git, "_getMessage").resolves("such hax")
@sandbox.stub(git, "_getSha").resolves("sha-123")
@sandbox.stub(git, "_getRemoteOrigin").resolves("https://github.com/foo/bar.git")
@sandbox.stub(api, "createRun")
it "calls api.createRun with args", ->
api.createRun.resolves()
record.generateProjectBuildId("id-123", "/_test-output/path/to/project", "project", "key-123").then ->
snapshot(api.createRun.firstCall.args)
it "passes groupId", ->
api.createRun.resolves()
group = true
groupId = "gr123"
record.generateProjectBuildId("id-123", "/_test-output/path/to/project", "project", "key-123", group, groupId).then ->
snapshot(api.createRun.firstCall.args)
it "figures out groupId from CI environment variables", ->
@sandbox.stub(ciProvider, "groupId").returns("ci-group-123")
api.createRun.resolves()
group = true
record.generateProjectBuildId("id-123", "/_test-output/path/to/project", "project", "key-123", group).then ->
snapshot(api.createRun.firstCall.args)
it "handles status code errors of 401", ->
err = new Error
err.statusCode = 401
api.createRun.rejects(err)
key = "<KEY>-9d173f5158aa"
record.generateProjectBuildId("id-123", "path", "project", key)
.then ->
throw new Error("should have failed but did not")
.catch (err) ->
expect(err.type).to.eq("RECORD_KEY_NOT_VALID")
expect(err.message).to.include("Key")
expect(err.message).to.include("3206e...158aa")
expect(err.message).to.include("invalid")
it "handles status code errors of 404", ->
err = new Error
err.statusCode = 404
api.createRun.rejects(err)
record.generateProjectBuildId("id-123", "path", "project", "key-123")
.then ->
throw new Error("should have failed but did not")
.catch (err) ->
expect(err.type).to.eq("DASHBOARD_PROJECT_NOT_FOUND")
it "handles all other errors", ->
err = new Error("foo")
api.createRun.rejects(err)
@sandbox.spy(errors, "warning")
@sandbox.spy(logger, "createException")
@sandbox.spy(console, "log")
## this should not throw
record.generateProjectBuildId(1,2,3,4)
.then (ret) ->
expect(ret).to.be.null
expect(errors.warning).to.be.calledWith("DASHBOARD_CANNOT_CREATE_RUN_OR_INSTANCE", err)
expect(console.log).to.be.calledWithMatch("Warning: We encountered an error talking to our servers.")
expect(console.log).to.be.calledWithMatch("Error: foo")
expect(logger.createException).to.be.calledWith(err)
context ".uploadStdout", ->
beforeEach ->
@sandbox.stub(api, "updateInstanceStdout")
it "calls api.updateInstanceStdout", ->
api.updateInstanceStdout.resolves()
record.uploadStdout("id-123", "foobarbaz\n")
expect(api.updateInstanceStdout).to.be.calledWith({
instanceId: "id-123"
stdout: "foobarbaz\n"
})
it "logs warning on error", ->
err = new Error("foo")
@sandbox.spy(errors, "warning")
@sandbox.spy(logger, "createException")
@sandbox.spy(console, "log")
api.updateInstanceStdout.rejects(err)
record.uploadStdout("id-123", "asdf")
.then ->
expect(errors.warning).to.be.calledWith("DASHBOARD_CANNOT_CREATE_RUN_OR_INSTANCE", err)
expect(console.log).to.be.calledWithMatch("This run will not be recorded.")
expect(console.log).to.be.calledWithMatch("Error: foo")
expect(logger.createException).to.be.calledWith(err)
it "does not createException when statusCode is 503", ->
err = new Error("foo")
err.statusCode = 503
@sandbox.spy(logger, "createException")
api.updateInstanceStdout.rejects(err)
record.uploadStdout("id-123", "Asdfasd")
.then ->
expect(logger.createException).not.to.be.called
context ".uploadAssets", ->
beforeEach ->
@sandbox.stub(api, "updateInstance")
it "calls api.updateInstance", ->
api.updateInstance.resolves()
record.uploadAssets("id-123", {
tests: 1
passes: 2
failures: 3
pending: 4
duration: 5
video: "path/to/video"
error: "err msg"
screenshots: [{
name: "foo"
path: "path/to/screenshot"
}]
failingTests: ["foo"]
config: {foo: "bar"}
}, "foobarbaz")
expect(api.updateInstance).to.be.calledWith({
instanceId: "id-123"
tests: 1
passes: 2
failures: 3
pending: 4
duration: 5
error: "err msg"
video: true
screenshots: [{name: "foo"}]
failingTests: ["foo"]
cypressConfig: {foo: "bar"}
ciProvider: "circle"
stdout: "foobarbaz"
})
it "calls record.upload on success", ->
resp = {
videoUploadUrl: "https://s3.upload.video"
screenshotUploadUrls: [
{ clientId: 1, uploadUrl: "https://s3.upload.screenshot/1"}
{ clientId: 2, uploadUrl: "https://s3.upload.screenshot/2"}
]
}
api.updateInstance.resolves(resp)
@sandbox.stub(record, "upload").resolves()
record.uploadAssets("id-123", {
tests: 1
passes: 2
failures: 3
pending: 4
duration: 5
video: "path/to/video"
screenshots: [{
name: "foo"
path: "path/to/screenshot"
}]
failingTests: ["foo"]
config: {foo: "bar"}
})
.then ->
expect(record.upload).to.be.calledWith({
video: "path/to/video"
screenshots: [{
name: "foo"
path: "path/to/screenshot"
}]
videoUrl: resp.videoUploadUrl
screenshotUrls: resp.screenshotUploadUrls
})
it "logs warning on error", ->
err = new Error("foo")
@sandbox.spy(errors, "warning")
@sandbox.spy(logger, "createException")
@sandbox.spy(console, "log")
api.updateInstance.rejects(err)
record.uploadAssets("id-123", {})
.then ->
expect(errors.warning).to.be.calledWith("DASHBOARD_CANNOT_CREATE_RUN_OR_INSTANCE", err)
expect(console.log).to.be.calledWithMatch("This run will not be recorded.")
expect(console.log).to.be.calledWithMatch("Error: foo")
expect(logger.createException).to.be.calledWith(err)
it "does not createException when statusCode is 503", ->
err = new Error("foo")
err.statusCode = 503
@sandbox.spy(logger, "createException")
api.updateInstance.rejects(err)
record.uploadAssets("id-123", {})
.then ->
expect(logger.createException).not.to.be.called
context ".createInstance", ->
beforeEach ->
@sandbox.stub(api, "createInstance")
it "calls api.createInstance", ->
api.createInstance.resolves()
record.createInstance("id-123", "cypress/integration/app_spec.coffee")
expect(api.createInstance).to.be.calledWith({
buildId: "id-123"
spec: "cypress/integration/app_spec.coffee"
})
it "logs warning on error", ->
err = new Error("foo")
@sandbox.spy(errors, "warning")
@sandbox.spy(logger, "createException")
@sandbox.spy(console, "log")
api.createInstance.rejects(err)
record.createInstance("id-123", null)
.then (ret) ->
expect(ret).to.be.null
expect(errors.warning).to.be.calledWith("DASHBOARD_CANNOT_CREATE_RUN_OR_INSTANCE", err)
expect(console.log).to.be.calledWithMatch("This run will not be recorded.")
expect(console.log).to.be.calledWithMatch("Error: foo")
expect(logger.createException).to.be.calledWith(err)
it "does not createException when statusCode is 503", ->
err = new Error("foo")
err.statusCode = 503
@sandbox.spy(logger, "createException")
api.createInstance.rejects(err)
record.createInstance("id-123", null)
.then (ret) ->
expect(ret).to.be.null
expect(logger.createException).not.to.be.called
context ".run", ->
beforeEach ->
@sandbox.stub(record, "generateProjectBuildId").resolves("build-id-123")
@sandbox.stub(record, "createInstance").resolves("instance-id-123")
@sandbox.stub(record, "uploadAssets").resolves()
@sandbox.stub(record, "uploadStdout").resolves()
@sandbox.stub(Project, "id").resolves("id-123")
@sandbox.stub(Project, "config").resolves({projectName: "projectName"})
@sandbox.stub(headless, "run").resolves({tests: 2, passes: 1})
@sandbox.spy(Project, "add")
it "ensures id", ->
record.run({projectPath: "/_test-output/path/to/project"})
.then ->
expect(Project.id).to.be.calledWith("/_test-output/path/to/project")
it "adds project with projectPath", ->
record.run({projectPath: "/_test-output/path/to/project"})
.then ->
expect(Project.add).to.be.calledWith("/_test-output/path/to/project")
it "passes id + projectPath + options.key to generateProjectBuildId", ->
record.run({projectPath: "/_test-output/path/to/project", key: "key-<KEY>"})
.then ->
expect(record.generateProjectBuildId).to.be.calledWith("id-123", "/_test-output/path/to/project", "projectName", "key-foo")
it "passes buildId + options.spec to createInstance", ->
record.run({spec: "foo/bar/spec"})
.then ->
expect(record.createInstance).to.be.calledWith("build-id-123", "foo/bar/spec")
it "does not call record.createInstance or record.uploadAssets when no buildId", ->
record.generateProjectBuildId.resolves(null)
record.run({})
.then (stats) ->
expect(record.createInstance).not.to.be.called
expect(record.uploadAssets).not.to.be.called
expect(stats).to.deep.eq({
tests: 2
passes: 1
})
it "calls headless.run + ensureAuthToken + allDone into options", ->
opts = {foo: "bar"}
record.run(opts)
.then ->
expect(headless.run).to.be.calledWith({projectId: "id-123", foo: "bar", ensureAuthToken: false, allDone: false})
it "calls uploadAssets with instanceId, stats, and stdout", ->
@sandbox.stub(stdout, "capture").returns({
toString: -> "foobarbaz"
})
record.run({})
.then ->
expect(record.uploadAssets).to.be.calledWith("instance-id-123", {tests: 2, passes: 1}, "foobarbaz")
it "does not call uploadAssets with no instanceId", ->
record.createInstance.resolves(null)
record.run({})
.then (stats) ->
expect(record.uploadAssets).not.to.be.called
expect(stats).to.deep.eq({
tests: 2
passes: 1
})
it "does not call uploadStdout with no instanceId", ->
record.createInstance.resolves(null)
record.run({})
.then (stats) ->
expect(record.uploadStdout).not.to.be.called
it "does not call uploadStdout on uploadAssets failure", ->
record.uploadAssets.restore()
@sandbox.stub(api, "updateInstance").rejects(new Error)
record.run({})
.then (stats) ->
expect(record.uploadStdout).not.to.be.called
it "calls record.uploadStdout on uploadAssets success", ->
@sandbox.stub(stdout, "capture").returns({
toString: -> "foobarbaz"
})
record.run({})
.then (stats) ->
expect(record.uploadStdout).to.be.calledWith("instance-id-123", "foobarbaz")
it "captures stdout from headless.run and headless.allDone", ->
fn = ->
console.log("foo")
console.log("bar")
process.stdout.write("baz")
Promise.resolve({failures: 0})
headless.run.restore()
@sandbox.stub(headless, "run", fn)
record.run({})
.then (stats) ->
str = record.uploadStdout.getCall(0).args[1]
expect(str).to.include("foo\nbar\nbaz")
expect(str).to.include("All Done")
it "calls headless.allDone on uploadAssets success", ->
@sandbox.spy(terminal, "header")
record.run({})
.then (stats) ->
expect(terminal.header).to.be.calledWith("All Done")
expect(stats).to.deep.eq({
tests: 2
passes: 1
})
it "calls headless.allDone on uploadAssets failure", ->
@sandbox.spy(terminal, "header")
@sandbox.stub(api, "updateInstance").rejects(new Error)
record.uploadAssets.restore()
record.run({})
.then (stats) ->
expect(terminal.header).to.be.calledWith("All Done")
expect(stats).to.deep.eq({
tests: 2
passes: 1
})
it "calls headless.allDone on createInstance failure", ->
@sandbox.spy(terminal, "header")
record.createInstance.resolves(null)
record.run({})
.then (stats) ->
expect(terminal.header).to.be.calledWith("All Done")
expect(stats).to.deep.eq({
tests: 2
passes: 1
})
| true | require("../../spec_helper")
os = require("os")
api = require("#{root}../lib/api")
stdout = require("#{root}../lib/stdout")
errors = require("#{root}../lib/errors")
logger = require("#{root}../lib/logger")
Project = require("#{root}../lib/project")
terminal = require("#{root}../lib/util/terminal")
record = require("#{root}../lib/modes/record")
headless = require("#{root}../lib/modes/headless")
git = require("#{root}../lib/util/git")
ciProvider = require("#{root}../lib/util/ci_provider")
snapshot = require("snap-shot-it")
describe "lib/modes/record", ->
beforeEach ->
@sandbox.stub(ciProvider, "name").returns("circle")
@sandbox.stub(ciProvider, "params").returns({foo: "bar"})
@sandbox.stub(ciProvider, "buildNum").returns("build-123")
context ".getBranch", ->
beforeEach ->
@repo = @sandbox.stub({
getBranch: ->
})
afterEach ->
delete process.env.CIRCLE_BRANCH
delete process.env.TRAVIS_BRANCH
delete process.env.CI_BRANCH
it "gets branch from process.env.CIRCLE_BRANCH", ->
process.env.CIRCLE_BRANCH = "bem/circle"
process.env.TRAVIS_BRANCH = "bem/travis"
process.env.CI_BRANCH = "bem/ci"
record.getBranch(@repo).then (ret) ->
expect(ret).to.eq("bem/circle")
it "gets branch from process.env.TRAVIS_BRANCH", ->
process.env.TRAVIS_BRANCH = "bem/travis"
process.env.CI_BRANCH = "bem/ci"
record.getBranch(@repo).then (ret) ->
expect(ret).to.eq("bem/travis")
it "gets branch from process.env.CI_BRANCH", ->
process.env.CI_BRANCH = "bem/ci"
record.getBranch(@repo).then (ret) ->
expect(ret).to.eq("bem/ci")
it "gets branch from git", ->
@repo.getBranch.resolves("regular-branch")
record.getBranch(@repo).then (ret) ->
expect(ret).to.eq("regular-branch")
context ".generateProjectBuildId", ->
beforeEach ->
@sandbox.stub(git, "_getBranch").resolves("master")
@sandbox.stub(git, "_getAuthor").resolves("bPI:NAME:<NAME>END_PI")
@sandbox.stub(git, "_getEmail").resolves("PI:EMAIL:<EMAIL>END_PI")
@sandbox.stub(git, "_getMessage").resolves("such hax")
@sandbox.stub(git, "_getSha").resolves("sha-123")
@sandbox.stub(git, "_getRemoteOrigin").resolves("https://github.com/foo/bar.git")
@sandbox.stub(api, "createRun")
it "calls api.createRun with args", ->
api.createRun.resolves()
record.generateProjectBuildId("id-123", "/_test-output/path/to/project", "project", "key-123").then ->
snapshot(api.createRun.firstCall.args)
it "passes groupId", ->
api.createRun.resolves()
group = true
groupId = "gr123"
record.generateProjectBuildId("id-123", "/_test-output/path/to/project", "project", "key-123", group, groupId).then ->
snapshot(api.createRun.firstCall.args)
it "figures out groupId from CI environment variables", ->
@sandbox.stub(ciProvider, "groupId").returns("ci-group-123")
api.createRun.resolves()
group = true
record.generateProjectBuildId("id-123", "/_test-output/path/to/project", "project", "key-123", group).then ->
snapshot(api.createRun.firstCall.args)
it "handles status code errors of 401", ->
err = new Error
err.statusCode = 401
api.createRun.rejects(err)
key = "PI:KEY:<KEY>END_PI-9d173f5158aa"
record.generateProjectBuildId("id-123", "path", "project", key)
.then ->
throw new Error("should have failed but did not")
.catch (err) ->
expect(err.type).to.eq("RECORD_KEY_NOT_VALID")
expect(err.message).to.include("Key")
expect(err.message).to.include("3206e...158aa")
expect(err.message).to.include("invalid")
it "handles status code errors of 404", ->
err = new Error
err.statusCode = 404
api.createRun.rejects(err)
record.generateProjectBuildId("id-123", "path", "project", "key-123")
.then ->
throw new Error("should have failed but did not")
.catch (err) ->
expect(err.type).to.eq("DASHBOARD_PROJECT_NOT_FOUND")
it "handles all other errors", ->
err = new Error("foo")
api.createRun.rejects(err)
@sandbox.spy(errors, "warning")
@sandbox.spy(logger, "createException")
@sandbox.spy(console, "log")
## this should not throw
record.generateProjectBuildId(1,2,3,4)
.then (ret) ->
expect(ret).to.be.null
expect(errors.warning).to.be.calledWith("DASHBOARD_CANNOT_CREATE_RUN_OR_INSTANCE", err)
expect(console.log).to.be.calledWithMatch("Warning: We encountered an error talking to our servers.")
expect(console.log).to.be.calledWithMatch("Error: foo")
expect(logger.createException).to.be.calledWith(err)
context ".uploadStdout", ->
beforeEach ->
@sandbox.stub(api, "updateInstanceStdout")
it "calls api.updateInstanceStdout", ->
api.updateInstanceStdout.resolves()
record.uploadStdout("id-123", "foobarbaz\n")
expect(api.updateInstanceStdout).to.be.calledWith({
instanceId: "id-123"
stdout: "foobarbaz\n"
})
it "logs warning on error", ->
err = new Error("foo")
@sandbox.spy(errors, "warning")
@sandbox.spy(logger, "createException")
@sandbox.spy(console, "log")
api.updateInstanceStdout.rejects(err)
record.uploadStdout("id-123", "asdf")
.then ->
expect(errors.warning).to.be.calledWith("DASHBOARD_CANNOT_CREATE_RUN_OR_INSTANCE", err)
expect(console.log).to.be.calledWithMatch("This run will not be recorded.")
expect(console.log).to.be.calledWithMatch("Error: foo")
expect(logger.createException).to.be.calledWith(err)
it "does not createException when statusCode is 503", ->
err = new Error("foo")
err.statusCode = 503
@sandbox.spy(logger, "createException")
api.updateInstanceStdout.rejects(err)
record.uploadStdout("id-123", "Asdfasd")
.then ->
expect(logger.createException).not.to.be.called
context ".uploadAssets", ->
beforeEach ->
@sandbox.stub(api, "updateInstance")
it "calls api.updateInstance", ->
api.updateInstance.resolves()
record.uploadAssets("id-123", {
tests: 1
passes: 2
failures: 3
pending: 4
duration: 5
video: "path/to/video"
error: "err msg"
screenshots: [{
name: "foo"
path: "path/to/screenshot"
}]
failingTests: ["foo"]
config: {foo: "bar"}
}, "foobarbaz")
expect(api.updateInstance).to.be.calledWith({
instanceId: "id-123"
tests: 1
passes: 2
failures: 3
pending: 4
duration: 5
error: "err msg"
video: true
screenshots: [{name: "foo"}]
failingTests: ["foo"]
cypressConfig: {foo: "bar"}
ciProvider: "circle"
stdout: "foobarbaz"
})
it "calls record.upload on success", ->
resp = {
videoUploadUrl: "https://s3.upload.video"
screenshotUploadUrls: [
{ clientId: 1, uploadUrl: "https://s3.upload.screenshot/1"}
{ clientId: 2, uploadUrl: "https://s3.upload.screenshot/2"}
]
}
api.updateInstance.resolves(resp)
@sandbox.stub(record, "upload").resolves()
record.uploadAssets("id-123", {
tests: 1
passes: 2
failures: 3
pending: 4
duration: 5
video: "path/to/video"
screenshots: [{
name: "foo"
path: "path/to/screenshot"
}]
failingTests: ["foo"]
config: {foo: "bar"}
})
.then ->
expect(record.upload).to.be.calledWith({
video: "path/to/video"
screenshots: [{
name: "foo"
path: "path/to/screenshot"
}]
videoUrl: resp.videoUploadUrl
screenshotUrls: resp.screenshotUploadUrls
})
it "logs warning on error", ->
err = new Error("foo")
@sandbox.spy(errors, "warning")
@sandbox.spy(logger, "createException")
@sandbox.spy(console, "log")
api.updateInstance.rejects(err)
record.uploadAssets("id-123", {})
.then ->
expect(errors.warning).to.be.calledWith("DASHBOARD_CANNOT_CREATE_RUN_OR_INSTANCE", err)
expect(console.log).to.be.calledWithMatch("This run will not be recorded.")
expect(console.log).to.be.calledWithMatch("Error: foo")
expect(logger.createException).to.be.calledWith(err)
it "does not createException when statusCode is 503", ->
err = new Error("foo")
err.statusCode = 503
@sandbox.spy(logger, "createException")
api.updateInstance.rejects(err)
record.uploadAssets("id-123", {})
.then ->
expect(logger.createException).not.to.be.called
context ".createInstance", ->
beforeEach ->
@sandbox.stub(api, "createInstance")
it "calls api.createInstance", ->
api.createInstance.resolves()
record.createInstance("id-123", "cypress/integration/app_spec.coffee")
expect(api.createInstance).to.be.calledWith({
buildId: "id-123"
spec: "cypress/integration/app_spec.coffee"
})
it "logs warning on error", ->
err = new Error("foo")
@sandbox.spy(errors, "warning")
@sandbox.spy(logger, "createException")
@sandbox.spy(console, "log")
api.createInstance.rejects(err)
record.createInstance("id-123", null)
.then (ret) ->
expect(ret).to.be.null
expect(errors.warning).to.be.calledWith("DASHBOARD_CANNOT_CREATE_RUN_OR_INSTANCE", err)
expect(console.log).to.be.calledWithMatch("This run will not be recorded.")
expect(console.log).to.be.calledWithMatch("Error: foo")
expect(logger.createException).to.be.calledWith(err)
it "does not createException when statusCode is 503", ->
err = new Error("foo")
err.statusCode = 503
@sandbox.spy(logger, "createException")
api.createInstance.rejects(err)
record.createInstance("id-123", null)
.then (ret) ->
expect(ret).to.be.null
expect(logger.createException).not.to.be.called
context ".run", ->
beforeEach ->
@sandbox.stub(record, "generateProjectBuildId").resolves("build-id-123")
@sandbox.stub(record, "createInstance").resolves("instance-id-123")
@sandbox.stub(record, "uploadAssets").resolves()
@sandbox.stub(record, "uploadStdout").resolves()
@sandbox.stub(Project, "id").resolves("id-123")
@sandbox.stub(Project, "config").resolves({projectName: "projectName"})
@sandbox.stub(headless, "run").resolves({tests: 2, passes: 1})
@sandbox.spy(Project, "add")
it "ensures id", ->
record.run({projectPath: "/_test-output/path/to/project"})
.then ->
expect(Project.id).to.be.calledWith("/_test-output/path/to/project")
it "adds project with projectPath", ->
record.run({projectPath: "/_test-output/path/to/project"})
.then ->
expect(Project.add).to.be.calledWith("/_test-output/path/to/project")
it "passes id + projectPath + options.key to generateProjectBuildId", ->
record.run({projectPath: "/_test-output/path/to/project", key: "key-PI:KEY:<KEY>END_PI"})
.then ->
expect(record.generateProjectBuildId).to.be.calledWith("id-123", "/_test-output/path/to/project", "projectName", "key-foo")
it "passes buildId + options.spec to createInstance", ->
record.run({spec: "foo/bar/spec"})
.then ->
expect(record.createInstance).to.be.calledWith("build-id-123", "foo/bar/spec")
it "does not call record.createInstance or record.uploadAssets when no buildId", ->
record.generateProjectBuildId.resolves(null)
record.run({})
.then (stats) ->
expect(record.createInstance).not.to.be.called
expect(record.uploadAssets).not.to.be.called
expect(stats).to.deep.eq({
tests: 2
passes: 1
})
it "calls headless.run + ensureAuthToken + allDone into options", ->
opts = {foo: "bar"}
record.run(opts)
.then ->
expect(headless.run).to.be.calledWith({projectId: "id-123", foo: "bar", ensureAuthToken: false, allDone: false})
it "calls uploadAssets with instanceId, stats, and stdout", ->
@sandbox.stub(stdout, "capture").returns({
toString: -> "foobarbaz"
})
record.run({})
.then ->
expect(record.uploadAssets).to.be.calledWith("instance-id-123", {tests: 2, passes: 1}, "foobarbaz")
it "does not call uploadAssets with no instanceId", ->
record.createInstance.resolves(null)
record.run({})
.then (stats) ->
expect(record.uploadAssets).not.to.be.called
expect(stats).to.deep.eq({
tests: 2
passes: 1
})
it "does not call uploadStdout with no instanceId", ->
record.createInstance.resolves(null)
record.run({})
.then (stats) ->
expect(record.uploadStdout).not.to.be.called
it "does not call uploadStdout on uploadAssets failure", ->
record.uploadAssets.restore()
@sandbox.stub(api, "updateInstance").rejects(new Error)
record.run({})
.then (stats) ->
expect(record.uploadStdout).not.to.be.called
it "calls record.uploadStdout on uploadAssets success", ->
@sandbox.stub(stdout, "capture").returns({
toString: -> "foobarbaz"
})
record.run({})
.then (stats) ->
expect(record.uploadStdout).to.be.calledWith("instance-id-123", "foobarbaz")
it "captures stdout from headless.run and headless.allDone", ->
fn = ->
console.log("foo")
console.log("bar")
process.stdout.write("baz")
Promise.resolve({failures: 0})
headless.run.restore()
@sandbox.stub(headless, "run", fn)
record.run({})
.then (stats) ->
str = record.uploadStdout.getCall(0).args[1]
expect(str).to.include("foo\nbar\nbaz")
expect(str).to.include("All Done")
it "calls headless.allDone on uploadAssets success", ->
@sandbox.spy(terminal, "header")
record.run({})
.then (stats) ->
expect(terminal.header).to.be.calledWith("All Done")
expect(stats).to.deep.eq({
tests: 2
passes: 1
})
it "calls headless.allDone on uploadAssets failure", ->
@sandbox.spy(terminal, "header")
@sandbox.stub(api, "updateInstance").rejects(new Error)
record.uploadAssets.restore()
record.run({})
.then (stats) ->
expect(terminal.header).to.be.calledWith("All Done")
expect(stats).to.deep.eq({
tests: 2
passes: 1
})
it "calls headless.allDone on createInstance failure", ->
@sandbox.spy(terminal, "header")
record.createInstance.resolves(null)
record.run({})
.then (stats) ->
expect(terminal.header).to.be.calledWith("All Done")
expect(stats).to.deep.eq({
tests: 2
passes: 1
})
|
[
{
"context": "= $('#user_current_password').val()\n password = $('#user_password').val()\n confirm = $('#user_password_confirmat",
"end": 400,
"score": 0.8633158802986145,
"start": 383,
"tag": "PASSWORD",
"value": "$('#user_password"
}
] | app/assets/javascripts/profile.coffee | Salzig/Portus | 7 |
jQuery ->
email = $('#user_email').val()
$('#user_email').keyup ->
val = $('#user_email').val()
if val == email || val == ''
$('#edit_user.profile .btn').attr('disabled', 'disabled')
else
$('#edit_user.profile .btn').removeAttr('disabled')
$('#edit_user.password .form-control').keyup ->
current = $('#user_current_password').val()
password = $('#user_password').val()
confirm = $('#user_password_confirmation').val()
if current != '' && password != '' && confirm != '' && password == confirm
$('#edit_user.password .btn').removeAttr('disabled')
else
$('#edit_user.password .btn').attr('disabled', 'disabled')
| 151343 |
jQuery ->
email = $('#user_email').val()
$('#user_email').keyup ->
val = $('#user_email').val()
if val == email || val == ''
$('#edit_user.profile .btn').attr('disabled', 'disabled')
else
$('#edit_user.profile .btn').removeAttr('disabled')
$('#edit_user.password .form-control').keyup ->
current = $('#user_current_password').val()
password = <PASSWORD>').val()
confirm = $('#user_password_confirmation').val()
if current != '' && password != '' && confirm != '' && password == confirm
$('#edit_user.password .btn').removeAttr('disabled')
else
$('#edit_user.password .btn').attr('disabled', 'disabled')
| true |
jQuery ->
email = $('#user_email').val()
$('#user_email').keyup ->
val = $('#user_email').val()
if val == email || val == ''
$('#edit_user.profile .btn').attr('disabled', 'disabled')
else
$('#edit_user.profile .btn').removeAttr('disabled')
$('#edit_user.password .form-control').keyup ->
current = $('#user_current_password').val()
password = PI:PASSWORD:<PASSWORD>END_PI').val()
confirm = $('#user_password_confirmation').val()
if current != '' && password != '' && confirm != '' && password == confirm
$('#edit_user.password .btn').removeAttr('disabled')
else
$('#edit_user.password .btn').attr('disabled', 'disabled')
|
[
{
"context": "h.credentials()).to.deep.equal(\n key: 'client-id', oauthStateParam: 'oauth-state')\n\n describe '",
"end": 536,
"score": 0.9418292045593262,
"start": 527,
"tag": "KEY",
"value": "client-id"
},
{
"context": "tials()).to.deep.equal(\n key: 'client-id', oauthCode: 'authorization-code')\n\n describ",
"end": 1899,
"score": 0.6179461479187012,
"start": 1897,
"tag": "KEY",
"value": "id"
},
{
"context": "tials()).to.deep.equal(\n key: 'client-id', token: 'bearer-token')\n\n describe 'with a ",
"end": 2339,
"score": 0.8098605871200562,
"start": 2337,
"tag": "KEY",
"value": "id"
},
{
"context": "ep.equal(\n key: 'client-id', token: 'bearer-token')\n\n describe 'with a MAC token', ->\n ",
"end": 2356,
"score": 0.4677914083003998,
"start": 2350,
"tag": "PASSWORD",
"value": "bearer"
},
{
"context": "al(\n key: 'client-id', token: 'bearer-token')\n\n describe 'with a MAC token', ->\n ",
"end": 2356,
"score": 0.3926168382167816,
"start": 2356,
"tag": "KEY",
"value": ""
},
{
"context": "l(\n key: 'client-id', token: 'bearer-token')\n\n describe 'with a MAC token', ->\n ",
"end": 2362,
"score": 0.39267051219940186,
"start": 2357,
"tag": "PASSWORD",
"value": "token"
},
{
"context": "(\n token_type: 'mac', access_token: 'mac-token',\n kid: 'mac-server-kid', mac_key: '",
"end": 2523,
"score": 0.6539530158042908,
"start": 2514,
"tag": "KEY",
"value": "mac-token"
},
{
"context": "',\n kid: 'mac-server-kid', mac_key: 'mac-token-key',\n mac_algorithm: 'hmac-sha-1')\n\n ",
"end": 2586,
"score": 0.9299329519271851,
"start": 2573,
"tag": "KEY",
"value": "mac-token-key"
},
{
"context": "tials()).to.deep.equal(\n key: 'client-id', token: 'mac-token', tokenKid: 'mac-server-kid',",
"end": 2868,
"score": 0.874944806098938,
"start": 2866,
"tag": "KEY",
"value": "id"
},
{
"context": "ep.equal(\n key: 'client-id', token: 'mac-token', tokenKid: 'mac-server-kid',\n token",
"end": 2888,
"score": 0.602595329284668,
"start": 2879,
"tag": "KEY",
"value": "mac-token"
},
{
"context": " key: 'client-id', token: 'mac-token', tokenKid: 'mac-server-kid',\n tokenKey: 'mac-token-key')\n\n ",
"end": 2916,
"score": 0.7091830968856812,
"start": 2902,
"tag": "KEY",
"value": "mac-server-kid"
},
{
"context": "enKid: 'mac-server-kid',\n tokenKey: 'mac-token-key')\n\n describe 'with an OAuth error response',",
"end": 2957,
"score": 0.9162411689758301,
"start": 2944,
"tag": "KEY",
"value": "mac-token-key"
},
{
"context": "ey: 'client-id', secret: 'client-secret', token: 'bearer-token')\n\n describe 'with a MAC token', ->\n ",
"end": 6610,
"score": 0.45029711723327637,
"start": 6598,
"tag": "PASSWORD",
"value": "bearer-token"
},
{
"context": "(\n token_type: 'mac', access_token: 'mac-token',\n kid: 'mac-server-kid', mac_key: '",
"end": 6771,
"score": 0.7423486113548279,
"start": 6762,
"tag": "KEY",
"value": "mac-token"
},
{
"context": "',\n kid: 'mac-server-kid', mac_key: 'mac-token-key',\n mac_algorithm: 'hmac-sha-1')\n\n ",
"end": 6834,
"score": 0.8485040664672852,
"start": 6821,
"tag": "KEY",
"value": "mac-token-key"
},
{
"context": "', secret: 'client-secret',\n token: 'mac-token', tokenKid: 'mac-server-kid',\n token",
"end": 7173,
"score": 0.7665555477142334,
"start": 7164,
"tag": "KEY",
"value": "mac-token"
},
{
"context": " token: 'mac-token', tokenKid: 'mac-server-kid',\n tokenKey: 'mac-token-key')\n\n ",
"end": 7201,
"score": 0.5060511231422424,
"start": 7198,
"tag": "KEY",
"value": "kid"
},
{
"context": "enKid: 'mac-server-kid',\n tokenKey: 'mac-token-key')\n\n describe 'with an OAuth error response',",
"end": 7242,
"score": 0.9545502662658691,
"start": 7229,
"tag": "KEY",
"value": "mac-token-key"
},
{
"context": " token_type: 'mac', access_token: 'mac-token',\n kid: 'mac-server-kid', mac_k",
"end": 10088,
"score": 0.5096874237060547,
"start": 10088,
"tag": "KEY",
"value": ""
},
{
"context": " kid: 'mac-server-kid', mac_key: 'mac-token-key',\n mac_algorithm: 'hmac-sha-1')\n",
"end": 10153,
"score": 0.5713857412338257,
"start": 10148,
"tag": "KEY",
"value": "token"
},
{
"context": "ntials()).to.deep.equal(\n token: 'mac-token', tokenKid: 'mac-server-kid',\n token",
"end": 10441,
"score": 0.7181708216667175,
"start": 10436,
"tag": "KEY",
"value": "token"
},
{
"context": "enKid: 'mac-server-kid',\n tokenKey: 'mac-token-key')\n\n describe 'without a code or token', ->\n ",
"end": 10510,
"score": 0.8127115368843079,
"start": 10497,
"tag": "KEY",
"value": "mac-token-key"
},
{
"context": " client_id: 'client-id', client_secret: 'client-secret',\n answer: 42, other: 43)\n\n describ",
"end": 13016,
"score": 0.9757332801818848,
"start": 13003,
"tag": "KEY",
"value": "client-secret"
},
{
"context": " @oauth = new Dropbox.Util.Oauth(\n key: 'client-id', oauthStateParam: 'oauth-state')\n\n describe '",
"end": 13363,
"score": 0.7602159380912781,
"start": 13354,
"tag": "KEY",
"value": "client-id"
},
{
"context": "h.credentials()).to.deep.equal(\n key: 'client-id', oauthStateParam: 'oauth-state')\n\n describ",
"end": 13558,
"score": 0.6605151295661926,
"start": 13552,
"tag": "KEY",
"value": "client"
},
{
"context": "ntials()).to.deep.equal(\n key: 'client-id', oauthStateParam: 'oauth-state')\n\n describe '",
"end": 13561,
"score": 0.8766522407531738,
"start": 13559,
"tag": "KEY",
"value": "id"
},
{
"context": "eep.equal(\n key: 'client-id', secret: 'client-secret',\n oauthStateParam: 'oauth-stat",
"end": 15972,
"score": 0.8166946768760681,
"start": 15966,
"tag": "KEY",
"value": "client"
},
{
"context": "al(\n key: 'client-id', secret: 'client-secret',\n oauthStateParam: 'oauth-state')\n\n ",
"end": 15979,
"score": 0.842240035533905,
"start": 15973,
"tag": "KEY",
"value": "secret"
},
{
"context": "eep.equal(\n key: 'client-id', secret: 'client-secret', token: 'access-token')\n\n describe '#s",
"end": 22772,
"score": 0.5783741474151611,
"start": 22766,
"tag": "KEY",
"value": "client"
},
{
"context": "al(\n key: 'client-id', secret: 'client-secret', token: 'access-token')\n\n describe '#step', -",
"end": 22779,
"score": 0.7041170001029968,
"start": 22773,
"tag": "KEY",
"value": "secret"
},
{
"context": "ey: 'client-id', secret: 'client-secret', token: 'access-token')\n\n describe '#step', ->\n it 'retur",
"end": 22796,
"score": 0.382742315530777,
"start": 22790,
"tag": "KEY",
"value": "access"
},
{
"context": " tokenKey: 'token-key', tokenKid: 'token-kid')\n @stub = sinon.stub Dropbox.Util.Oauth, 't",
"end": 24579,
"score": 0.8829419612884521,
"start": 24576,
"tag": "KEY",
"value": "kid"
},
{
"context": "deep.equal(\n key: 'client-id', token: 'access-token',\n tokenKey: 'token-key', tokenK",
"end": 24891,
"score": 0.4772757589817047,
"start": 24885,
"tag": "KEY",
"value": "access"
},
{
"context": "ken: 'access-token',\n tokenKey: 'token-key', tokenKid: 'token-kid')\n\n describe '#step', -",
"end": 24932,
"score": 0.7030412554740906,
"start": 24929,
"tag": "KEY",
"value": "key"
},
{
"context": " tokenKey: 'token-key', tokenKid: 'token-kid')\n\n describe '#step', ->\n it 'returns DON",
"end": 24955,
"score": 0.8480944037437439,
"start": 24952,
"tag": "KEY",
"value": "kid"
},
{
"context": "ecret', token: 'access-token',\n tokenKey: 'token-key', tokenKid: 'token-kid')\n @stub = sinon.",
"end": 26025,
"score": 0.6611108183860779,
"start": 26020,
"tag": "KEY",
"value": "token"
},
{
"context": ", token: 'access-token',\n tokenKey: 'token-key', tokenKid: 'token-kid')\n @stub = sinon.stub",
"end": 26029,
"score": 0.8487693071365356,
"start": 26026,
"tag": "KEY",
"value": "key"
},
{
"context": "token',\n tokenKey: 'token-key', tokenKid: 'token-kid')\n @stub = sinon.stub Dropbox.Util.Oauth",
"end": 26048,
"score": 0.6396230459213257,
"start": 26043,
"tag": "KEY",
"value": "token"
},
{
"context": ",\n tokenKey: 'token-key', tokenKid: 'token-kid')\n @stub = sinon.stub Dropbox.Util.Oauth, 't",
"end": 26052,
"score": 0.9655197262763977,
"start": 26049,
"tag": "KEY",
"value": "kid"
},
{
"context": "ey: 'client-id', secret: 'client-secret', token: 'access-token',\n tokenKey: 'token-key', tokenKid: 't",
"end": 26404,
"score": 0.7095754742622375,
"start": 26392,
"tag": "KEY",
"value": "access-token"
},
{
"context": "n',\n tokenKey: 'token-key', tokenKid: 'token-kid')\n\n describe '#step', ->\n it 'returns DON",
"end": 26462,
"score": 0.7635625004768372,
"start": 26453,
"tag": "KEY",
"value": "token-kid"
},
{
"context": "arams)).to.deep.equal(\n access_token: 'access-token', kid: 'token-kid',\n mac: 'tlkfj",
"end": 27152,
"score": 0.5246176719665527,
"start": 27146,
"tag": "KEY",
"value": "access"
},
{
"context": "kenKey: 'token-key',\n tokenKid: 'token-kid')\n\n describe '#step', ->\n it 'returns DON",
"end": 27842,
"score": 0.6932865381240845,
"start": 27839,
"tag": "KEY",
"value": "kid"
},
{
"context": "arams)).to.deep.equal(\n access_token: 'access-token', kid: 'token-kid',\n mac: 'tlkfj",
"end": 28532,
"score": 0.5418930053710938,
"start": 28526,
"tag": "PASSWORD",
"value": "access"
},
{
"context": ".to.deep.equal(\n access_token: 'access-token', kid: 'token-kid',\n mac: 'tlkfjonwKYi",
"end": 28538,
"score": 0.421161413192749,
"start": 28533,
"tag": "KEY",
"value": "token"
}
] | test/src/fast/util/oauth_test.coffee | expo/dropbox-js | 64 | describe 'Dropbox.Util.Oauth', ->
beforeEach ->
@method = 'GET'
@url = '/photos'
@params = answer: 42, other: 43
@timestamp = 1370129543574
buildSecretlessTransitionTests = ->
describe '#setAuthStateParam', ->
beforeEach ->
@oauth.setAuthStateParam 'oauth-state'
it 'makes #step return PARAM_SET', ->
expect(@oauth.step()).to.equal Dropbox.Client.PARAM_SET
it 'adds the param to credentials', ->
expect(@oauth.credentials()).to.deep.equal(
key: 'client-id', oauthStateParam: 'oauth-state')
describe '#processRedirectParams', ->
it 'returns true when the query params contain a code', ->
expect(@oauth.processRedirectParams(code: 'authorization-code')).
to.equal true
it 'returns true when the query params contain a token', ->
expect(@oauth.processRedirectParams(
token_type: 'Bearer', access_token: 'access-token')).
to.equal true
it 'returns true when the query params contain a error', ->
expect(@oauth.processRedirectParams(error: 'access_denied')).
to.equal true
it 'throws an exception on unimplemented token types', ->
expect(=> @oauth.processRedirectParams(token_type: 'unimplemented')).
to.throw(Error, /unimplemented token/i)
it "returns false when the query params don't contain a code/token", ->
expect(@oauth.processRedirectParams(random_param: 'random')).
to.equal false
describe 'with an authorization code', ->
beforeEach ->
@oauth.processRedirectParams code: 'authorization-code'
it 'makes #step return AUTHORIZED', ->
expect(@oauth.step()).to.equal Dropbox.Client.AUTHORIZED
it 'adds the code to credentials', ->
expect(@oauth.credentials()).to.deep.equal(
key: 'client-id', oauthCode: 'authorization-code')
describe 'with a Bearer token', ->
beforeEach ->
@oauth.processRedirectParams(
token_type: 'Bearer', access_token: 'bearer-token')
it 'makes #step return DONE', ->
expect(@oauth.step()).to.equal Dropbox.Client.DONE
it 'adds the token to credentials', ->
expect(@oauth.credentials()).to.deep.equal(
key: 'client-id', token: 'bearer-token')
describe 'with a MAC token', ->
beforeEach ->
@oauth.processRedirectParams(
token_type: 'mac', access_token: 'mac-token',
kid: 'mac-server-kid', mac_key: 'mac-token-key',
mac_algorithm: 'hmac-sha-1')
it 'makes #step() return DONE', ->
expect(@oauth.step()).to.equal Dropbox.Client.DONE
it 'adds the token to credentials', ->
expect(@oauth.credentials()).to.deep.equal(
key: 'client-id', token: 'mac-token', tokenKid: 'mac-server-kid',
tokenKey: 'mac-token-key')
describe 'with an OAuth error response', ->
beforeEach ->
@oauth.processRedirectParams(
error: 'access_denied',
error_description: "The application didn't seem trustworthy")
it 'makes #step() return ERROR', ->
expect(@oauth.step()).to.equal Dropbox.Client.ERROR
it 'preserves the api key in the credentials', ->
expect(@oauth.credentials()).to.deep.equal key: 'client-id'
it 'makes #error() return the error', ->
error = @oauth.error()
expect(error).to.be.instanceOf Dropbox.AuthError
expect(error.code).to.equal Dropbox.AuthError.ACCESS_DENIED
expect(error.description).to.equal(
"The application didn't seem trustworthy")
it 'lets #reset() return to RESET', ->
@oauth.reset()
expect(@oauth.step()).to.equal Dropbox.Client.RESET
describe 'without a code or token', ->
beforeEach ->
@oldStep = @oauth.step()
@oauth.processRedirectParams random_param: 'random'
it 'does not change the auth step', ->
expect(@oauth.step()).to.equal @oldStep
describe '#reset', ->
beforeEach ->
@oauth.reset()
it 'makes #step() return RESET', ->
expect(@oauth.step()).to.equal Dropbox.Client.RESET
buildSecretTransitionTests = ->
describe '#setAuthStateParam', ->
beforeEach ->
@oauth.setAuthStateParam 'oauth-state'
it 'makes #step return PARAM_SET', ->
expect(@oauth.step()).to.equal Dropbox.Client.PARAM_SET
it 'adds the param to credentials', ->
expect(@oauth.credentials()).to.deep.equal(
key: 'client-id', secret: 'client-secret',
oauthStateParam: 'oauth-state')
describe '#processRedirectParams', ->
it 'returns true when the query params contain a code', ->
expect(@oauth.processRedirectParams(code: 'authorization-code')).
to.equal true
it 'returns true when the query params contain a token', ->
expect(@oauth.processRedirectParams(
token_type: 'Bearer', access_token: 'access-token')).
to.equal true
it 'returns true when the query params contain a error', ->
expect(@oauth.processRedirectParams(error: 'access_denied')).
to.equal true
it 'throws an exception on unimplemented token types', ->
expect(=> @oauth.processRedirectParams(token_type: 'unimplemented')).
to.throw(Error, /unimplemented token/i)
it "returns false when the query params don't contain a code/token", ->
expect(@oauth.processRedirectParams(random_param: 'random')).
to.equal false
describe 'with an authorization code', ->
beforeEach ->
@oauth.processRedirectParams code: 'authorization-code'
it 'makes #step return AUTHORIZED', ->
expect(@oauth.step()).to.equal Dropbox.Client.AUTHORIZED
it 'adds the code to credentials', ->
expect(@oauth.credentials()).to.deep.equal(
key: 'client-id', secret: 'client-secret',
oauthCode: 'authorization-code')
describe 'with a Bearer token', ->
beforeEach ->
@oauth.processRedirectParams(
token_type: 'Bearer', access_token: 'bearer-token')
it 'makes #step return DONE', ->
expect(@oauth.step()).to.equal Dropbox.Client.DONE
it 'adds the token to credentials', ->
expect(@oauth.credentials()).to.deep.equal(
key: 'client-id', secret: 'client-secret', token: 'bearer-token')
describe 'with a MAC token', ->
beforeEach ->
@oauth.processRedirectParams(
token_type: 'mac', access_token: 'mac-token',
kid: 'mac-server-kid', mac_key: 'mac-token-key',
mac_algorithm: 'hmac-sha-1')
it 'makes #step return DONE', ->
expect(@oauth.step()).to.equal Dropbox.Client.DONE
it 'adds the token to credentials', ->
expect(@oauth.credentials()).to.deep.equal(
key: 'client-id', secret: 'client-secret',
token: 'mac-token', tokenKid: 'mac-server-kid',
tokenKey: 'mac-token-key')
describe 'with an OAuth error response', ->
beforeEach ->
@oauth.processRedirectParams(
error: 'access_denied',
error_description: "The application didn't seem trustworthy")
it 'makes #step() return ERROR', ->
expect(@oauth.step()).to.equal Dropbox.Client.ERROR
it 'preserves the app key and secret in the credentials', ->
expect(@oauth.credentials()).to.deep.equal(
key: 'client-id', secret: 'client-secret')
it 'lets #reset() return to RESET', ->
@oauth.reset()
expect(@oauth.step()).to.equal Dropbox.Client.RESET
describe 'without a code or token', ->
beforeEach ->
@oldStep = @oauth.step()
@oauth.processRedirectParams random_param: 'random'
it 'does not change the step', ->
expect(@oauth.step()).to.equal @oldStep
describe '#reset', ->
beforeEach ->
@oauth.reset()
it 'makes #step() return RESET', ->
expect(@oauth.step()).to.equal Dropbox.Client.RESET
buildKeylessTransitionTests = ->
describe '#setAuthStateParam', ->
it 'throws an exception', ->
expect(=> @oauth.setAuthStateParam('oauth-state')).to.throw(
Error, /no api key/i)
describe '#processRedirectParams', ->
it 'throws an exception when the query params contain a code', ->
expect(=> @oauth.processRedirectParams(code: 'authorization-code')).
to.throw(Error, /no api key/i)
it 'returns true when the query params contain a token', ->
expect(@oauth.processRedirectParams(
token_type: 'Bearer', access_token: 'access-token')).
to.equal true
it 'throws an exeception when the query params contain a error', ->
expect(=> @oauth.processRedirectParams(error: 'access_denied')).
to.throw(Error, /no api key/i)
it 'throws an exception on unimplemented token types', ->
expect(=> @oauth.processRedirectParams(token_type: 'unimplemented')).
to.throw(Error, /unimplemented token/i)
it "returns false when the query params don't contain a code/token", ->
expect(@oauth.processRedirectParams(random_param: 'random')).
to.equal false
describe 'with a Bearer token', ->
beforeEach ->
@oauth.processRedirectParams(
token_type: 'Bearer', access_token: 'bearer-token')
it 'makes #step return DONE', ->
expect(@oauth.step()).to.equal Dropbox.Client.DONE
it 'adds the token to credentials', ->
expect(@oauth.credentials()).to.deep.equal(token: 'bearer-token')
describe 'with a MAC token', ->
beforeEach ->
@oauth.processRedirectParams(
token_type: 'mac', access_token: 'mac-token',
kid: 'mac-server-kid', mac_key: 'mac-token-key',
mac_algorithm: 'hmac-sha-1')
it 'makes #step() return DONE', ->
expect(@oauth.step()).to.equal Dropbox.Client.DONE
it 'adds the token to credentials', ->
expect(@oauth.credentials()).to.deep.equal(
token: 'mac-token', tokenKid: 'mac-server-kid',
tokenKey: 'mac-token-key')
describe 'without a code or token', ->
beforeEach ->
@oldStep = @oauth.step()
@oauth.processRedirectParams random_param: 'random'
it 'does not change the auth step', ->
expect(@oauth.step()).to.equal @oldStep
describe '#reset', ->
beforeEach ->
@oauth.reset()
it 'makes #step() return RESET', ->
expect(@oauth.step()).to.equal Dropbox.Client.RESET
describe 'with an app key', ->
beforeEach ->
@oauth = new Dropbox.Util.Oauth key: 'client-id'
describe '#credentials', ->
it 'returns the app key', ->
expect(@oauth.credentials()).to.deep.equal key: 'client-id'
describe '#step', ->
it 'returns RESET', ->
expect(@oauth.step()).to.equal Dropbox.Client.RESET
describe '#authHeader', ->
it 'uses HTTP Basic authentication with the client id and no pw', ->
expect(@oauth.authHeader(@method, @url, @params)).to.equal(
'Basic Y2xpZW50LWlkOg==')
describe '#addAuthParams', ->
it 'returns the given object', ->
expect(@oauth.addAuthParams(@method, @url, @params)).to.equal @params
it 'adds the client id', ->
expect(@oauth.addAuthParams(@method, @url, @params)).to.deep.equal(
client_id: 'client-id', answer: 42, other: 43)
describe '#checkAuthStateParam', ->
it 'returns false for null', ->
expect(@oauth.checkAuthStateParam(null)).to.equal false
buildSecretlessTransitionTests()
describe 'with an app key and secret', ->
beforeEach ->
@oauth = new Dropbox.Util.Oauth key: 'client-id', secret: 'client-secret'
describe '#credentials', ->
it 'returns the app key', ->
expect(@oauth.credentials()).to.deep.equal(
key: 'client-id', secret: 'client-secret')
describe '#step', ->
it 'returns RESET', ->
expect(@oauth.step()).to.equal Dropbox.Client.RESET
describe '#authHeader', ->
it 'uses HTTP Basic authentication with the client id and secret', ->
expect(@oauth.authHeader(@method, @url, @params)).to.equal(
'Basic Y2xpZW50LWlkOmNsaWVudC1zZWNyZXQ=')
describe '#addAuthParams', ->
it 'returns the given object', ->
expect(@oauth.addAuthParams(@method, @url, @params)).to.equal @params
it 'adds the client id', ->
expect(@oauth.addAuthParams(@method, @url, @params)).to.deep.equal(
client_id: 'client-id', client_secret: 'client-secret',
answer: 42, other: 43)
describe '#checkAuthStateParam', ->
it 'returns false for null', ->
expect(@oauth.checkAuthStateParam(null)).to.equal false
buildSecretTransitionTests()
describe 'with an app key and state param', ->
beforeEach ->
@oauth = new Dropbox.Util.Oauth(
key: 'client-id', oauthStateParam: 'oauth-state')
describe '#credentials', ->
it 'returns the app key and state param', ->
expect(@oauth.credentials()).to.deep.equal(
key: 'client-id', oauthStateParam: 'oauth-state')
describe '#step', ->
it 'returns PARAM_LOADED', ->
expect(@oauth.step()).to.equal Dropbox.Client.PARAM_LOADED
describe '#checkAuthStateParam', ->
it 'returns true for the correct param', ->
expect(@oauth.checkAuthStateParam('oauth-state')).to.equal true
it 'returns false for the wrong param', ->
expect(@oauth.checkAuthStateParam('not-oauth-state')).to.equal false
it 'returns false for null', ->
expect(@oauth.checkAuthStateParam(null)).to.equal false
describe '#authorizeUrlParams', ->
beforeEach ->
@url = 'http://redirect.to/here'
describe 'with token responseType', ->
it 'asks for an access token', ->
expect(@oauth.authorizeUrlParams('token', @url)).to.deep.equal(
client_id: 'client-id', state: 'oauth-state',
response_type: 'token', redirect_uri: @url)
describe 'with code responseType', ->
it 'asks for an authorization code', ->
expect(@oauth.authorizeUrlParams('code', @url)).to.deep.equal(
client_id: 'client-id', state: 'oauth-state',
response_type: 'code', redirect_uri: @url)
describe 'with an un-implemented responseType', ->
it 'throws an Error', ->
expect(=> @oauth.authorizeUrlParams('other', @url)).to.
throw(Error, /unimplemented .* response type/i)
describe '#authHeader', ->
it 'uses HTTP Basic authentication with the client id and no pw', ->
expect(@oauth.authHeader(@method, @url, @params)).to.equal(
'Basic Y2xpZW50LWlkOg==')
describe '#addAuthParams', ->
it 'returns the given object', ->
expect(@oauth.addAuthParams(@method, @url, @params)).to.equal @params
it 'adds the client id', ->
expect(@oauth.addAuthParams(@method, @url, @params)).to.deep.equal(
client_id: 'client-id', answer: 42, other: 43)
buildSecretlessTransitionTests()
describe 'with an app key + secret and state param', ->
beforeEach ->
@oauth = new Dropbox.Util.Oauth(
key: 'client-id', secret: 'client-secret',
oauthStateParam: 'oauth-state')
describe '#credentials', ->
it 'returns the app key + secret and state param', ->
expect(@oauth.credentials()).to.deep.equal(
key: 'client-id', secret: 'client-secret',
oauthStateParam: 'oauth-state')
describe '#step', ->
it 'returns PARAM_LOADED', ->
expect(@oauth.step()).to.equal Dropbox.Client.PARAM_LOADED
describe '#checkAuthStateParam', ->
it 'returns true for the correct param', ->
expect(@oauth.checkAuthStateParam('oauth-state')).to.equal true
it 'returns false for the wrong param', ->
expect(@oauth.checkAuthStateParam('not-oauth-state')).to.equal false
it 'returns false for null', ->
expect(@oauth.checkAuthStateParam(null)).to.equal false
describe '#authorizeUrlParams', ->
beforeEach ->
@url = 'http://redirect.to/here'
describe 'with token responseType', ->
it 'asks for an access token', ->
expect(@oauth.authorizeUrlParams('token', @url)).to.deep.equal(
client_id: 'client-id', state: 'oauth-state',
response_type: 'token', redirect_uri: @url)
describe 'with code responseType', ->
it 'asks for an authorization code', ->
expect(@oauth.authorizeUrlParams('code', @url)).to.deep.equal(
client_id: 'client-id', state: 'oauth-state',
response_type: 'code', redirect_uri: @url)
describe 'with an un-implemented responseType', ->
it 'throws an Error', ->
expect(=> @oauth.authorizeUrlParams('other', @url)).to.
throw(Error, /unimplemented .* response type/i)
describe '#authHeader', ->
it 'uses HTTP Basic authentication with the id as the username', ->
expect(@oauth.authHeader(@method, @url, @params)).to.equal(
'Basic Y2xpZW50LWlkOmNsaWVudC1zZWNyZXQ=')
describe '#addAuthParams', ->
it 'returns the given object', ->
expect(@oauth.addAuthParams(@method, @url, @params)).to.equal @params
it 'adds the client id', ->
expect(@oauth.addAuthParams(@method, @url, @params)).to.deep.equal(
client_id: 'client-id', client_secret: 'client-secret',
answer: 42, other: 43)
buildSecretTransitionTests()
describe 'with an app key and authorization code', ->
beforeEach ->
@oauth = new Dropbox.Util.Oauth key: 'client-id', oauthCode: 'auth-code'
describe '#credentials', ->
it 'returns the app key and authorization code', ->
expect(@oauth.credentials()).to.deep.equal(
key: 'client-id', oauthCode: 'auth-code')
describe '#step', ->
it 'returns AUTHORIZED', ->
expect(@oauth.step()).to.equal Dropbox.Client.AUTHORIZED
describe '#accessTokenParams', ->
describe 'without a redirect URL', ->
it 'matches the spec', ->
expect(@oauth.accessTokenParams()).to.deep.equal(
grant_type: 'authorization_code', code: 'auth-code')
describe 'with a redirect URL', ->
it 'matches the spec and includes the URL', ->
url = 'http://redirect.to/here'
expect(@oauth.accessTokenParams(url)).to.deep.equal(
grant_type: 'authorization_code', code: 'auth-code',
redirect_uri: url)
describe '#authHeader', ->
it 'uses HTTP Basic authentication with the client id and no pw', ->
expect(@oauth.authHeader(@method, @url, @params)).to.equal(
'Basic Y2xpZW50LWlkOg==')
describe '#addAuthParams', ->
it 'returns the given object', ->
expect(@oauth.addAuthParams(@method, @url, @params)).to.equal @params
it 'adds the client id', ->
expect(@oauth.addAuthParams(@method, @url, @params)).to.deep.equal(
client_id: 'client-id', answer: 42, other: 43)
buildSecretlessTransitionTests()
describe 'with an app key + secret and authorization code', ->
beforeEach ->
@oauth = new Dropbox.Util.Oauth(
key: 'client-id', secret: 'client-secret', oauthCode: 'auth-code')
describe '#credentials', ->
it 'returns the app key + secret and state param', ->
expect(@oauth.credentials()).to.deep.equal(
key: 'client-id', secret: 'client-secret', oauthCode: 'auth-code')
describe '#step', ->
it 'returns AUTHORIZED', ->
expect(@oauth.step()).to.equal Dropbox.Client.AUTHORIZED
describe '#accessTokenParams', ->
describe 'without a redirect URL', ->
it 'matches the spec', ->
expect(@oauth.accessTokenParams()).to.deep.equal(
grant_type: 'authorization_code', code: 'auth-code')
describe 'with a redirect URL', ->
it 'matches the spec and includes the URL', ->
url = 'http://redirect.to/here'
expect(@oauth.accessTokenParams(url)).to.deep.equal(
grant_type: 'authorization_code', code: 'auth-code',
redirect_uri: url)
describe '#authHeader', ->
it 'uses HTTP Basic authentication with the id as the username', ->
expect(@oauth.authHeader(@method, @url, @params)).to.equal(
'Basic Y2xpZW50LWlkOmNsaWVudC1zZWNyZXQ=')
describe '#addAuthParams', ->
beforeEach ->
it 'returns the given object', ->
expect(@oauth.addAuthParams(@method, @url, @params)).to.equal @params
it 'adds the client id', ->
expect(@oauth.addAuthParams(@method, @url, @params)).to.deep.equal(
client_id: 'client-id', client_secret: 'client-secret',
answer: 42, other: 43)
buildSecretTransitionTests()
describe 'with an app key and Bearer token', ->
beforeEach ->
@oauth = new Dropbox.Util.Oauth key: 'client-id', token: 'access-token'
describe '#credentials', ->
it 'returns the app key and access token', ->
expect(@oauth.credentials()).to.deep.equal(
key: 'client-id', token: 'access-token')
describe '#step', ->
it 'returns DONE', ->
expect(@oauth.step()).to.equal Dropbox.Client.DONE
describe '#authHeader', ->
it 'uses HTTP Bearer auth', ->
expect(@oauth.authHeader(@method, @url, @params)).to.equal(
'Bearer access-token')
describe '#addAuthParams', ->
it 'returns the given object', ->
expect(@oauth.addAuthParams(@method, @url, @params)).to.equal @params
it 'adds the access token', ->
expect(@oauth.addAuthParams(@method, @url, @params)).to.deep.equal(
access_token: 'access-token', answer: 42, other: 43)
buildSecretlessTransitionTests()
describe 'with an app key + secret and Bearer token', ->
beforeEach ->
@oauth = new Dropbox.Util.Oauth(
key: 'client-id', secret: 'client-secret', token: 'access-token')
describe '#credentials', ->
it 'returns the app key + secret and access token', ->
expect(@oauth.credentials()).to.deep.equal(
key: 'client-id', secret: 'client-secret', token: 'access-token')
describe '#step', ->
it 'returns DONE', ->
expect(@oauth.step()).to.equal Dropbox.Client.DONE
describe '#authHeader', ->
it 'uses HTTP Bearer auth', ->
expect(@oauth.authHeader(@method, @url, @params)).to.equal(
'Bearer access-token')
describe '#addAuthParams', ->
it 'returns the given object', ->
expect(@oauth.addAuthParams(@method, @url, @params)).to.equal @params
it 'adds the access token', ->
expect(@oauth.addAuthParams(@method, @url, @params)).to.deep.equal(
access_token: 'access-token', answer: 42, other: 43)
buildSecretTransitionTests()
describe 'with a Bearer token', ->
beforeEach ->
@oauth = new Dropbox.Util.Oauth token: 'access-token'
describe '#credentials', ->
it 'returns the access token', ->
expect(@oauth.credentials()).to.deep.equal token: 'access-token'
describe '#step', ->
it 'returns DONE', ->
expect(@oauth.step()).to.equal Dropbox.Client.DONE
describe '#authHeader', ->
it 'uses HTTP Bearer auth', ->
expect(@oauth.authHeader(@method, @url, @params)).to.equal(
'Bearer access-token')
describe '#addAuthParams', ->
it 'returns the given object', ->
expect(@oauth.addAuthParams(@method, @url, @params)).to.equal @params
it 'adds the access token', ->
expect(@oauth.addAuthParams(@method, @url, @params)).to.deep.equal(
access_token: 'access-token', answer: 42, other: 43)
buildKeylessTransitionTests()
describe 'with an app key and MAC token', ->
beforeEach ->
@oauth = new Dropbox.Util.Oauth(
key: 'client-id', token: 'access-token',
tokenKey: 'token-key', tokenKid: 'token-kid')
@stub = sinon.stub Dropbox.Util.Oauth, 'timestamp'
@stub.returns @timestamp
afterEach ->
@stub.restore()
describe '#credentials', ->
it 'returns the app key and access token', ->
expect(@oauth.credentials()).to.deep.equal(
key: 'client-id', token: 'access-token',
tokenKey: 'token-key', tokenKid: 'token-kid')
describe '#step', ->
it 'returns DONE', ->
expect(@oauth.step()).to.equal Dropbox.Client.DONE
describe '#authHeader', ->
it 'uses HTTP MAC auth', ->
expect(@oauth.authHeader(@method, @url, @params)).to.equal(
'MAC kid=token-kid ts=1370129543574 access_token=access-token ' +
'mac=tlkfjonwKYiWU0Yf5EYwyDQfpJs=')
describe '#addAuthParams', ->
it 'returns the given object', ->
expect(@oauth.addAuthParams(@method, @url, @params)).to.equal @params
it 'adds the access token and signature', ->
expect(@oauth.addAuthParams(@method, @url, @params)).to.deep.equal(
access_token: 'access-token', kid: 'token-kid',
mac: 'tlkfjonwKYiWU0Yf5EYwyDQfpJs=', ts: 1370129543574,
answer: 42, other: 43)
buildSecretlessTransitionTests()
describe 'with an app key + secret and MAC token', ->
beforeEach ->
@oauth = new Dropbox.Util.Oauth(
key: 'client-id', secret: 'client-secret', token: 'access-token',
tokenKey: 'token-key', tokenKid: 'token-kid')
@stub = sinon.stub Dropbox.Util.Oauth, 'timestamp'
@stub.returns @timestamp
afterEach ->
@stub.restore()
describe '#credentials', ->
it 'returns the app key + secret and access token', ->
expect(@oauth.credentials()).to.deep.equal(
key: 'client-id', secret: 'client-secret', token: 'access-token',
tokenKey: 'token-key', tokenKid: 'token-kid')
describe '#step', ->
it 'returns DONE', ->
expect(@oauth.step()).to.equal Dropbox.Client.DONE
describe '#authHeader', ->
it 'uses HTTP MAC auth', ->
expect(@oauth.authHeader(@method, @url, @params)).to.equal(
'MAC kid=token-kid ts=1370129543574 access_token=access-token ' +
'mac=tlkfjonwKYiWU0Yf5EYwyDQfpJs=')
describe '#addAuthParams', ->
it 'returns the given object', ->
expect(@oauth.addAuthParams(@method, @url, @params)).to.equal @params
it 'adds the access token and signature', ->
expect(@oauth.addAuthParams(@method, @url, @params)).to.deep.equal(
access_token: 'access-token', kid: 'token-kid',
mac: 'tlkfjonwKYiWU0Yf5EYwyDQfpJs=', ts: 1370129543574,
answer: 42, other: 43)
buildSecretTransitionTests()
describe 'with a MAC token', ->
beforeEach ->
@oauth = new Dropbox.Util.Oauth(
token: 'access-token', tokenKey: 'token-key', tokenKid: 'token-kid')
@stub = sinon.stub Dropbox.Util.Oauth, 'timestamp'
@stub.returns @timestamp
afterEach ->
@stub.restore()
describe '#credentials', ->
it 'returns the app key and access token', ->
expect(@oauth.credentials()).to.deep.equal(
token: 'access-token', tokenKey: 'token-key',
tokenKid: 'token-kid')
describe '#step', ->
it 'returns DONE', ->
expect(@oauth.step()).to.equal Dropbox.Client.DONE
describe '#authHeader', ->
it 'uses HTTP MAC auth', ->
expect(@oauth.authHeader(@method, @url, @params)).to.equal(
'MAC kid=token-kid ts=1370129543574 access_token=access-token ' +
'mac=tlkfjonwKYiWU0Yf5EYwyDQfpJs=')
describe '#addAuthParams', ->
it 'returns the given object', ->
expect(@oauth.addAuthParams(@method, @url, @params)).to.equal @params
it 'adds the access token and signature', ->
expect(@oauth.addAuthParams(@method, @url, @params)).to.deep.equal(
access_token: 'access-token', kid: 'token-kid',
mac: 'tlkfjonwKYiWU0Yf5EYwyDQfpJs=', ts: 1370129543574,
answer: 42, other: 43)
buildKeylessTransitionTests()
describe '#queryParamsFromUrl', ->
it 'extracts simple query params', ->
url = 'http://localhost:8911/oauth_redirect?param1=value1¶m2=value2'
expect(Dropbox.Util.Oauth.queryParamsFromUrl(url)).to.deep.equal(
param1: 'value1', param2: 'value2')
it 'extracts simple fragment params', ->
url = 'http://localhost:8911/oauth_redirect#param1=value1¶m2=value2'
expect(Dropbox.Util.Oauth.queryParamsFromUrl(url)).to.deep.equal(
param1: 'value1', param2: 'value2')
it 'extracts simple fragment query params', ->
url = 'http://localhost:8911/oauth_redirect#?param1=value1¶m2=value2'
expect(Dropbox.Util.Oauth.queryParamsFromUrl(url)).to.deep.equal(
param1: 'value1', param2: 'value2')
it 'extracts simple query and fragment params', ->
url = 'http://localhost:8911/oauth_redirect?param1=value1#param2=value2'
expect(Dropbox.Util.Oauth.queryParamsFromUrl(url)).to.deep.equal(
param1: 'value1', param2: 'value2')
it 'extracts percent-encoded query params', ->
url = 'http://localhost:8911/oauth_redirect?p%20=v%20'
expect(Dropbox.Util.Oauth.queryParamsFromUrl(url)).to.deep.equal(
'p ': 'v ')
it 'extracts query and fragment params with /-prefixed query', ->
url = 'http://localhost:8911/oauth_redirect?/param1=value1#param2=value2'
expect(Dropbox.Util.Oauth.queryParamsFromUrl(url)).to.deep.equal(
param1: 'value1', param2: 'value2')
it 'extracts query and fragment params with /-prefixed fragment', ->
url = 'http://localhost:8911/oauth_redirect?param1=value1#/param2=value2'
expect(Dropbox.Util.Oauth.queryParamsFromUrl(url)).to.deep.equal(
param1: 'value1', param2: 'value2')
it 'extracts /-prefixed fragment query param', ->
url = 'http://localhost:8911/oauth_redirect#?/param1=value1'
expect(Dropbox.Util.Oauth.queryParamsFromUrl(url)).to.deep.equal(
param1: 'value1')
describe '.timestamp', ->
it 'returns a number', ->
expect(Dropbox.Util.Oauth.timestamp()).to.be.a 'number'
it 'returns non-decreasing values', ->
ts = (Dropbox.Util.Oauth.timestamp() for i in [0..100])
for i in [1..100]
expect(ts[i - i]).to.be.lte(ts[i])
describe '.randomAuthStateParam', ->
it 'returns a short string', ->
expect(Dropbox.Util.Oauth.randomAuthStateParam()).to.be.a 'string'
expect(Dropbox.Util.Oauth.randomAuthStateParam().length).to.be.below 64
it 'returns different values', ->
values = (Dropbox.Util.Oauth.randomAuthStateParam() for i in [0..100])
values.sort()
for i in [1..100]
expect(values[i - 1]).not.to.equal(values[i])
| 106546 | describe 'Dropbox.Util.Oauth', ->
beforeEach ->
@method = 'GET'
@url = '/photos'
@params = answer: 42, other: 43
@timestamp = 1370129543574
buildSecretlessTransitionTests = ->
describe '#setAuthStateParam', ->
beforeEach ->
@oauth.setAuthStateParam 'oauth-state'
it 'makes #step return PARAM_SET', ->
expect(@oauth.step()).to.equal Dropbox.Client.PARAM_SET
it 'adds the param to credentials', ->
expect(@oauth.credentials()).to.deep.equal(
key: '<KEY>', oauthStateParam: 'oauth-state')
describe '#processRedirectParams', ->
it 'returns true when the query params contain a code', ->
expect(@oauth.processRedirectParams(code: 'authorization-code')).
to.equal true
it 'returns true when the query params contain a token', ->
expect(@oauth.processRedirectParams(
token_type: 'Bearer', access_token: 'access-token')).
to.equal true
it 'returns true when the query params contain a error', ->
expect(@oauth.processRedirectParams(error: 'access_denied')).
to.equal true
it 'throws an exception on unimplemented token types', ->
expect(=> @oauth.processRedirectParams(token_type: 'unimplemented')).
to.throw(Error, /unimplemented token/i)
it "returns false when the query params don't contain a code/token", ->
expect(@oauth.processRedirectParams(random_param: 'random')).
to.equal false
describe 'with an authorization code', ->
beforeEach ->
@oauth.processRedirectParams code: 'authorization-code'
it 'makes #step return AUTHORIZED', ->
expect(@oauth.step()).to.equal Dropbox.Client.AUTHORIZED
it 'adds the code to credentials', ->
expect(@oauth.credentials()).to.deep.equal(
key: 'client-<KEY>', oauthCode: 'authorization-code')
describe 'with a Bearer token', ->
beforeEach ->
@oauth.processRedirectParams(
token_type: 'Bearer', access_token: 'bearer-token')
it 'makes #step return DONE', ->
expect(@oauth.step()).to.equal Dropbox.Client.DONE
it 'adds the token to credentials', ->
expect(@oauth.credentials()).to.deep.equal(
key: 'client-<KEY>', token: '<PASSWORD> <KEY>-<PASSWORD>')
describe 'with a MAC token', ->
beforeEach ->
@oauth.processRedirectParams(
token_type: 'mac', access_token: '<KEY>',
kid: 'mac-server-kid', mac_key: '<KEY>',
mac_algorithm: 'hmac-sha-1')
it 'makes #step() return DONE', ->
expect(@oauth.step()).to.equal Dropbox.Client.DONE
it 'adds the token to credentials', ->
expect(@oauth.credentials()).to.deep.equal(
key: 'client-<KEY>', token: '<KEY>', tokenKid: '<KEY>',
tokenKey: '<KEY>')
describe 'with an OAuth error response', ->
beforeEach ->
@oauth.processRedirectParams(
error: 'access_denied',
error_description: "The application didn't seem trustworthy")
it 'makes #step() return ERROR', ->
expect(@oauth.step()).to.equal Dropbox.Client.ERROR
it 'preserves the api key in the credentials', ->
expect(@oauth.credentials()).to.deep.equal key: 'client-id'
it 'makes #error() return the error', ->
error = @oauth.error()
expect(error).to.be.instanceOf Dropbox.AuthError
expect(error.code).to.equal Dropbox.AuthError.ACCESS_DENIED
expect(error.description).to.equal(
"The application didn't seem trustworthy")
it 'lets #reset() return to RESET', ->
@oauth.reset()
expect(@oauth.step()).to.equal Dropbox.Client.RESET
describe 'without a code or token', ->
beforeEach ->
@oldStep = @oauth.step()
@oauth.processRedirectParams random_param: 'random'
it 'does not change the auth step', ->
expect(@oauth.step()).to.equal @oldStep
describe '#reset', ->
beforeEach ->
@oauth.reset()
it 'makes #step() return RESET', ->
expect(@oauth.step()).to.equal Dropbox.Client.RESET
buildSecretTransitionTests = ->
describe '#setAuthStateParam', ->
beforeEach ->
@oauth.setAuthStateParam 'oauth-state'
it 'makes #step return PARAM_SET', ->
expect(@oauth.step()).to.equal Dropbox.Client.PARAM_SET
it 'adds the param to credentials', ->
expect(@oauth.credentials()).to.deep.equal(
key: 'client-id', secret: 'client-secret',
oauthStateParam: 'oauth-state')
describe '#processRedirectParams', ->
it 'returns true when the query params contain a code', ->
expect(@oauth.processRedirectParams(code: 'authorization-code')).
to.equal true
it 'returns true when the query params contain a token', ->
expect(@oauth.processRedirectParams(
token_type: 'Bearer', access_token: 'access-token')).
to.equal true
it 'returns true when the query params contain a error', ->
expect(@oauth.processRedirectParams(error: 'access_denied')).
to.equal true
it 'throws an exception on unimplemented token types', ->
expect(=> @oauth.processRedirectParams(token_type: 'unimplemented')).
to.throw(Error, /unimplemented token/i)
it "returns false when the query params don't contain a code/token", ->
expect(@oauth.processRedirectParams(random_param: 'random')).
to.equal false
describe 'with an authorization code', ->
beforeEach ->
@oauth.processRedirectParams code: 'authorization-code'
it 'makes #step return AUTHORIZED', ->
expect(@oauth.step()).to.equal Dropbox.Client.AUTHORIZED
it 'adds the code to credentials', ->
expect(@oauth.credentials()).to.deep.equal(
key: 'client-id', secret: 'client-secret',
oauthCode: 'authorization-code')
describe 'with a Bearer token', ->
beforeEach ->
@oauth.processRedirectParams(
token_type: 'Bearer', access_token: 'bearer-token')
it 'makes #step return DONE', ->
expect(@oauth.step()).to.equal Dropbox.Client.DONE
it 'adds the token to credentials', ->
expect(@oauth.credentials()).to.deep.equal(
key: 'client-id', secret: 'client-secret', token: '<PASSWORD>')
describe 'with a MAC token', ->
beforeEach ->
@oauth.processRedirectParams(
token_type: 'mac', access_token: '<KEY>',
kid: 'mac-server-kid', mac_key: '<KEY>',
mac_algorithm: 'hmac-sha-1')
it 'makes #step return DONE', ->
expect(@oauth.step()).to.equal Dropbox.Client.DONE
it 'adds the token to credentials', ->
expect(@oauth.credentials()).to.deep.equal(
key: 'client-id', secret: 'client-secret',
token: '<KEY>', tokenKid: 'mac-server-<KEY>',
tokenKey: '<KEY>')
describe 'with an OAuth error response', ->
beforeEach ->
@oauth.processRedirectParams(
error: 'access_denied',
error_description: "The application didn't seem trustworthy")
it 'makes #step() return ERROR', ->
expect(@oauth.step()).to.equal Dropbox.Client.ERROR
it 'preserves the app key and secret in the credentials', ->
expect(@oauth.credentials()).to.deep.equal(
key: 'client-id', secret: 'client-secret')
it 'lets #reset() return to RESET', ->
@oauth.reset()
expect(@oauth.step()).to.equal Dropbox.Client.RESET
describe 'without a code or token', ->
beforeEach ->
@oldStep = @oauth.step()
@oauth.processRedirectParams random_param: 'random'
it 'does not change the step', ->
expect(@oauth.step()).to.equal @oldStep
describe '#reset', ->
beforeEach ->
@oauth.reset()
it 'makes #step() return RESET', ->
expect(@oauth.step()).to.equal Dropbox.Client.RESET
buildKeylessTransitionTests = ->
describe '#setAuthStateParam', ->
it 'throws an exception', ->
expect(=> @oauth.setAuthStateParam('oauth-state')).to.throw(
Error, /no api key/i)
describe '#processRedirectParams', ->
it 'throws an exception when the query params contain a code', ->
expect(=> @oauth.processRedirectParams(code: 'authorization-code')).
to.throw(Error, /no api key/i)
it 'returns true when the query params contain a token', ->
expect(@oauth.processRedirectParams(
token_type: 'Bearer', access_token: 'access-token')).
to.equal true
it 'throws an exeception when the query params contain a error', ->
expect(=> @oauth.processRedirectParams(error: 'access_denied')).
to.throw(Error, /no api key/i)
it 'throws an exception on unimplemented token types', ->
expect(=> @oauth.processRedirectParams(token_type: 'unimplemented')).
to.throw(Error, /unimplemented token/i)
it "returns false when the query params don't contain a code/token", ->
expect(@oauth.processRedirectParams(random_param: 'random')).
to.equal false
describe 'with a Bearer token', ->
beforeEach ->
@oauth.processRedirectParams(
token_type: 'Bearer', access_token: 'bearer-token')
it 'makes #step return DONE', ->
expect(@oauth.step()).to.equal Dropbox.Client.DONE
it 'adds the token to credentials', ->
expect(@oauth.credentials()).to.deep.equal(token: 'bearer-token')
describe 'with a MAC token', ->
beforeEach ->
@oauth.processRedirectParams(
token_type: 'mac', access_token: 'mac<KEY>-token',
kid: 'mac-server-kid', mac_key: 'mac-<PASSWORD>-key',
mac_algorithm: 'hmac-sha-1')
it 'makes #step() return DONE', ->
expect(@oauth.step()).to.equal Dropbox.Client.DONE
it 'adds the token to credentials', ->
expect(@oauth.credentials()).to.deep.equal(
token: 'mac-<PASSWORD>', tokenKid: 'mac-server-kid',
tokenKey: '<KEY>')
describe 'without a code or token', ->
beforeEach ->
@oldStep = @oauth.step()
@oauth.processRedirectParams random_param: 'random'
it 'does not change the auth step', ->
expect(@oauth.step()).to.equal @oldStep
describe '#reset', ->
beforeEach ->
@oauth.reset()
it 'makes #step() return RESET', ->
expect(@oauth.step()).to.equal Dropbox.Client.RESET
describe 'with an app key', ->
beforeEach ->
@oauth = new Dropbox.Util.Oauth key: 'client-id'
describe '#credentials', ->
it 'returns the app key', ->
expect(@oauth.credentials()).to.deep.equal key: 'client-id'
describe '#step', ->
it 'returns RESET', ->
expect(@oauth.step()).to.equal Dropbox.Client.RESET
describe '#authHeader', ->
it 'uses HTTP Basic authentication with the client id and no pw', ->
expect(@oauth.authHeader(@method, @url, @params)).to.equal(
'Basic Y2xpZW50LWlkOg==')
describe '#addAuthParams', ->
it 'returns the given object', ->
expect(@oauth.addAuthParams(@method, @url, @params)).to.equal @params
it 'adds the client id', ->
expect(@oauth.addAuthParams(@method, @url, @params)).to.deep.equal(
client_id: 'client-id', answer: 42, other: 43)
describe '#checkAuthStateParam', ->
it 'returns false for null', ->
expect(@oauth.checkAuthStateParam(null)).to.equal false
buildSecretlessTransitionTests()
describe 'with an app key and secret', ->
beforeEach ->
@oauth = new Dropbox.Util.Oauth key: 'client-id', secret: 'client-secret'
describe '#credentials', ->
it 'returns the app key', ->
expect(@oauth.credentials()).to.deep.equal(
key: 'client-id', secret: 'client-secret')
describe '#step', ->
it 'returns RESET', ->
expect(@oauth.step()).to.equal Dropbox.Client.RESET
describe '#authHeader', ->
it 'uses HTTP Basic authentication with the client id and secret', ->
expect(@oauth.authHeader(@method, @url, @params)).to.equal(
'Basic Y2xpZW50LWlkOmNsaWVudC1zZWNyZXQ=')
describe '#addAuthParams', ->
it 'returns the given object', ->
expect(@oauth.addAuthParams(@method, @url, @params)).to.equal @params
it 'adds the client id', ->
expect(@oauth.addAuthParams(@method, @url, @params)).to.deep.equal(
client_id: 'client-id', client_secret: '<KEY>',
answer: 42, other: 43)
describe '#checkAuthStateParam', ->
it 'returns false for null', ->
expect(@oauth.checkAuthStateParam(null)).to.equal false
buildSecretTransitionTests()
describe 'with an app key and state param', ->
beforeEach ->
@oauth = new Dropbox.Util.Oauth(
key: '<KEY>', oauthStateParam: 'oauth-state')
describe '#credentials', ->
it 'returns the app key and state param', ->
expect(@oauth.credentials()).to.deep.equal(
key: '<KEY>-<KEY>', oauthStateParam: 'oauth-state')
describe '#step', ->
it 'returns PARAM_LOADED', ->
expect(@oauth.step()).to.equal Dropbox.Client.PARAM_LOADED
describe '#checkAuthStateParam', ->
it 'returns true for the correct param', ->
expect(@oauth.checkAuthStateParam('oauth-state')).to.equal true
it 'returns false for the wrong param', ->
expect(@oauth.checkAuthStateParam('not-oauth-state')).to.equal false
it 'returns false for null', ->
expect(@oauth.checkAuthStateParam(null)).to.equal false
describe '#authorizeUrlParams', ->
beforeEach ->
@url = 'http://redirect.to/here'
describe 'with token responseType', ->
it 'asks for an access token', ->
expect(@oauth.authorizeUrlParams('token', @url)).to.deep.equal(
client_id: 'client-id', state: 'oauth-state',
response_type: 'token', redirect_uri: @url)
describe 'with code responseType', ->
it 'asks for an authorization code', ->
expect(@oauth.authorizeUrlParams('code', @url)).to.deep.equal(
client_id: 'client-id', state: 'oauth-state',
response_type: 'code', redirect_uri: @url)
describe 'with an un-implemented responseType', ->
it 'throws an Error', ->
expect(=> @oauth.authorizeUrlParams('other', @url)).to.
throw(Error, /unimplemented .* response type/i)
describe '#authHeader', ->
it 'uses HTTP Basic authentication with the client id and no pw', ->
expect(@oauth.authHeader(@method, @url, @params)).to.equal(
'Basic Y2xpZW50LWlkOg==')
describe '#addAuthParams', ->
it 'returns the given object', ->
expect(@oauth.addAuthParams(@method, @url, @params)).to.equal @params
it 'adds the client id', ->
expect(@oauth.addAuthParams(@method, @url, @params)).to.deep.equal(
client_id: 'client-id', answer: 42, other: 43)
buildSecretlessTransitionTests()
describe 'with an app key + secret and state param', ->
beforeEach ->
@oauth = new Dropbox.Util.Oauth(
key: 'client-id', secret: 'client-secret',
oauthStateParam: 'oauth-state')
describe '#credentials', ->
it 'returns the app key + secret and state param', ->
expect(@oauth.credentials()).to.deep.equal(
key: 'client-id', secret: '<KEY>-<KEY>',
oauthStateParam: 'oauth-state')
describe '#step', ->
it 'returns PARAM_LOADED', ->
expect(@oauth.step()).to.equal Dropbox.Client.PARAM_LOADED
describe '#checkAuthStateParam', ->
it 'returns true for the correct param', ->
expect(@oauth.checkAuthStateParam('oauth-state')).to.equal true
it 'returns false for the wrong param', ->
expect(@oauth.checkAuthStateParam('not-oauth-state')).to.equal false
it 'returns false for null', ->
expect(@oauth.checkAuthStateParam(null)).to.equal false
describe '#authorizeUrlParams', ->
beforeEach ->
@url = 'http://redirect.to/here'
describe 'with token responseType', ->
it 'asks for an access token', ->
expect(@oauth.authorizeUrlParams('token', @url)).to.deep.equal(
client_id: 'client-id', state: 'oauth-state',
response_type: 'token', redirect_uri: @url)
describe 'with code responseType', ->
it 'asks for an authorization code', ->
expect(@oauth.authorizeUrlParams('code', @url)).to.deep.equal(
client_id: 'client-id', state: 'oauth-state',
response_type: 'code', redirect_uri: @url)
describe 'with an un-implemented responseType', ->
it 'throws an Error', ->
expect(=> @oauth.authorizeUrlParams('other', @url)).to.
throw(Error, /unimplemented .* response type/i)
describe '#authHeader', ->
it 'uses HTTP Basic authentication with the id as the username', ->
expect(@oauth.authHeader(@method, @url, @params)).to.equal(
'Basic Y2xpZW50LWlkOmNsaWVudC1zZWNyZXQ=')
describe '#addAuthParams', ->
it 'returns the given object', ->
expect(@oauth.addAuthParams(@method, @url, @params)).to.equal @params
it 'adds the client id', ->
expect(@oauth.addAuthParams(@method, @url, @params)).to.deep.equal(
client_id: 'client-id', client_secret: 'client-secret',
answer: 42, other: 43)
buildSecretTransitionTests()
describe 'with an app key and authorization code', ->
beforeEach ->
@oauth = new Dropbox.Util.Oauth key: 'client-id', oauthCode: 'auth-code'
describe '#credentials', ->
it 'returns the app key and authorization code', ->
expect(@oauth.credentials()).to.deep.equal(
key: 'client-id', oauthCode: 'auth-code')
describe '#step', ->
it 'returns AUTHORIZED', ->
expect(@oauth.step()).to.equal Dropbox.Client.AUTHORIZED
describe '#accessTokenParams', ->
describe 'without a redirect URL', ->
it 'matches the spec', ->
expect(@oauth.accessTokenParams()).to.deep.equal(
grant_type: 'authorization_code', code: 'auth-code')
describe 'with a redirect URL', ->
it 'matches the spec and includes the URL', ->
url = 'http://redirect.to/here'
expect(@oauth.accessTokenParams(url)).to.deep.equal(
grant_type: 'authorization_code', code: 'auth-code',
redirect_uri: url)
describe '#authHeader', ->
it 'uses HTTP Basic authentication with the client id and no pw', ->
expect(@oauth.authHeader(@method, @url, @params)).to.equal(
'Basic Y2xpZW50LWlkOg==')
describe '#addAuthParams', ->
it 'returns the given object', ->
expect(@oauth.addAuthParams(@method, @url, @params)).to.equal @params
it 'adds the client id', ->
expect(@oauth.addAuthParams(@method, @url, @params)).to.deep.equal(
client_id: 'client-id', answer: 42, other: 43)
buildSecretlessTransitionTests()
describe 'with an app key + secret and authorization code', ->
beforeEach ->
@oauth = new Dropbox.Util.Oauth(
key: 'client-id', secret: 'client-secret', oauthCode: 'auth-code')
describe '#credentials', ->
it 'returns the app key + secret and state param', ->
expect(@oauth.credentials()).to.deep.equal(
key: 'client-id', secret: 'client-secret', oauthCode: 'auth-code')
describe '#step', ->
it 'returns AUTHORIZED', ->
expect(@oauth.step()).to.equal Dropbox.Client.AUTHORIZED
describe '#accessTokenParams', ->
describe 'without a redirect URL', ->
it 'matches the spec', ->
expect(@oauth.accessTokenParams()).to.deep.equal(
grant_type: 'authorization_code', code: 'auth-code')
describe 'with a redirect URL', ->
it 'matches the spec and includes the URL', ->
url = 'http://redirect.to/here'
expect(@oauth.accessTokenParams(url)).to.deep.equal(
grant_type: 'authorization_code', code: 'auth-code',
redirect_uri: url)
describe '#authHeader', ->
it 'uses HTTP Basic authentication with the id as the username', ->
expect(@oauth.authHeader(@method, @url, @params)).to.equal(
'Basic Y2xpZW50LWlkOmNsaWVudC1zZWNyZXQ=')
describe '#addAuthParams', ->
beforeEach ->
it 'returns the given object', ->
expect(@oauth.addAuthParams(@method, @url, @params)).to.equal @params
it 'adds the client id', ->
expect(@oauth.addAuthParams(@method, @url, @params)).to.deep.equal(
client_id: 'client-id', client_secret: 'client-secret',
answer: 42, other: 43)
buildSecretTransitionTests()
describe 'with an app key and Bearer token', ->
beforeEach ->
@oauth = new Dropbox.Util.Oauth key: 'client-id', token: 'access-token'
describe '#credentials', ->
it 'returns the app key and access token', ->
expect(@oauth.credentials()).to.deep.equal(
key: 'client-id', token: 'access-token')
describe '#step', ->
it 'returns DONE', ->
expect(@oauth.step()).to.equal Dropbox.Client.DONE
describe '#authHeader', ->
it 'uses HTTP Bearer auth', ->
expect(@oauth.authHeader(@method, @url, @params)).to.equal(
'Bearer access-token')
describe '#addAuthParams', ->
it 'returns the given object', ->
expect(@oauth.addAuthParams(@method, @url, @params)).to.equal @params
it 'adds the access token', ->
expect(@oauth.addAuthParams(@method, @url, @params)).to.deep.equal(
access_token: 'access-token', answer: 42, other: 43)
buildSecretlessTransitionTests()
describe 'with an app key + secret and Bearer token', ->
beforeEach ->
@oauth = new Dropbox.Util.Oauth(
key: 'client-id', secret: 'client-secret', token: 'access-token')
describe '#credentials', ->
it 'returns the app key + secret and access token', ->
expect(@oauth.credentials()).to.deep.equal(
key: 'client-id', secret: '<KEY>-<KEY>', token: '<KEY>-token')
describe '#step', ->
it 'returns DONE', ->
expect(@oauth.step()).to.equal Dropbox.Client.DONE
describe '#authHeader', ->
it 'uses HTTP Bearer auth', ->
expect(@oauth.authHeader(@method, @url, @params)).to.equal(
'Bearer access-token')
describe '#addAuthParams', ->
it 'returns the given object', ->
expect(@oauth.addAuthParams(@method, @url, @params)).to.equal @params
it 'adds the access token', ->
expect(@oauth.addAuthParams(@method, @url, @params)).to.deep.equal(
access_token: 'access-token', answer: 42, other: 43)
buildSecretTransitionTests()
describe 'with a Bearer token', ->
beforeEach ->
@oauth = new Dropbox.Util.Oauth token: 'access-token'
describe '#credentials', ->
it 'returns the access token', ->
expect(@oauth.credentials()).to.deep.equal token: 'access-token'
describe '#step', ->
it 'returns DONE', ->
expect(@oauth.step()).to.equal Dropbox.Client.DONE
describe '#authHeader', ->
it 'uses HTTP Bearer auth', ->
expect(@oauth.authHeader(@method, @url, @params)).to.equal(
'Bearer access-token')
describe '#addAuthParams', ->
it 'returns the given object', ->
expect(@oauth.addAuthParams(@method, @url, @params)).to.equal @params
it 'adds the access token', ->
expect(@oauth.addAuthParams(@method, @url, @params)).to.deep.equal(
access_token: 'access-token', answer: 42, other: 43)
buildKeylessTransitionTests()
describe 'with an app key and MAC token', ->
beforeEach ->
@oauth = new Dropbox.Util.Oauth(
key: 'client-id', token: 'access-token',
tokenKey: 'token-key', tokenKid: 'token-<KEY>')
@stub = sinon.stub Dropbox.Util.Oauth, 'timestamp'
@stub.returns @timestamp
afterEach ->
@stub.restore()
describe '#credentials', ->
it 'returns the app key and access token', ->
expect(@oauth.credentials()).to.deep.equal(
key: 'client-id', token: '<KEY>-token',
tokenKey: 'token-<KEY>', tokenKid: 'token-<KEY>')
describe '#step', ->
it 'returns DONE', ->
expect(@oauth.step()).to.equal Dropbox.Client.DONE
describe '#authHeader', ->
it 'uses HTTP MAC auth', ->
expect(@oauth.authHeader(@method, @url, @params)).to.equal(
'MAC kid=token-kid ts=1370129543574 access_token=access-token ' +
'mac=tlkfjonwKYiWU0Yf5EYwyDQfpJs=')
describe '#addAuthParams', ->
it 'returns the given object', ->
expect(@oauth.addAuthParams(@method, @url, @params)).to.equal @params
it 'adds the access token and signature', ->
expect(@oauth.addAuthParams(@method, @url, @params)).to.deep.equal(
access_token: 'access-token', kid: 'token-kid',
mac: 'tlkfjonwKYiWU0Yf5EYwyDQfpJs=', ts: 1370129543574,
answer: 42, other: 43)
buildSecretlessTransitionTests()
describe 'with an app key + secret and MAC token', ->
beforeEach ->
@oauth = new Dropbox.Util.Oauth(
key: 'client-id', secret: 'client-secret', token: 'access-token',
tokenKey: '<PASSWORD>-<KEY>', tokenKid: '<PASSWORD>-<KEY>')
@stub = sinon.stub Dropbox.Util.Oauth, 'timestamp'
@stub.returns @timestamp
afterEach ->
@stub.restore()
describe '#credentials', ->
it 'returns the app key + secret and access token', ->
expect(@oauth.credentials()).to.deep.equal(
key: 'client-id', secret: 'client-secret', token: '<KEY>',
tokenKey: 'token-key', tokenKid: '<KEY>')
describe '#step', ->
it 'returns DONE', ->
expect(@oauth.step()).to.equal Dropbox.Client.DONE
describe '#authHeader', ->
it 'uses HTTP MAC auth', ->
expect(@oauth.authHeader(@method, @url, @params)).to.equal(
'MAC kid=token-kid ts=1370129543574 access_token=access-token ' +
'mac=tlkfjonwKYiWU0Yf5EYwyDQfpJs=')
describe '#addAuthParams', ->
it 'returns the given object', ->
expect(@oauth.addAuthParams(@method, @url, @params)).to.equal @params
it 'adds the access token and signature', ->
expect(@oauth.addAuthParams(@method, @url, @params)).to.deep.equal(
access_token: '<KEY>-token', kid: 'token-kid',
mac: 'tlkfjonwKYiWU0Yf5EYwyDQfpJs=', ts: 1370129543574,
answer: 42, other: 43)
buildSecretTransitionTests()
describe 'with a MAC token', ->
beforeEach ->
@oauth = new Dropbox.Util.Oauth(
token: 'access-token', tokenKey: 'token-key', tokenKid: 'token-kid')
@stub = sinon.stub Dropbox.Util.Oauth, 'timestamp'
@stub.returns @timestamp
afterEach ->
@stub.restore()
describe '#credentials', ->
it 'returns the app key and access token', ->
expect(@oauth.credentials()).to.deep.equal(
token: 'access-token', tokenKey: 'token-key',
tokenKid: 'token-<KEY>')
describe '#step', ->
it 'returns DONE', ->
expect(@oauth.step()).to.equal Dropbox.Client.DONE
describe '#authHeader', ->
it 'uses HTTP MAC auth', ->
expect(@oauth.authHeader(@method, @url, @params)).to.equal(
'MAC kid=token-kid ts=1370129543574 access_token=access-token ' +
'mac=tlkfjonwKYiWU0Yf5EYwyDQfpJs=')
describe '#addAuthParams', ->
it 'returns the given object', ->
expect(@oauth.addAuthParams(@method, @url, @params)).to.equal @params
it 'adds the access token and signature', ->
expect(@oauth.addAuthParams(@method, @url, @params)).to.deep.equal(
access_token: '<KEY>-<PASSWORD>', kid: 'token-kid',
mac: 'tlkfjonwKYiWU0Yf5EYwyDQfpJs=', ts: 1370129543574,
answer: 42, other: 43)
buildKeylessTransitionTests()
describe '#queryParamsFromUrl', ->
it 'extracts simple query params', ->
url = 'http://localhost:8911/oauth_redirect?param1=value1¶m2=value2'
expect(Dropbox.Util.Oauth.queryParamsFromUrl(url)).to.deep.equal(
param1: 'value1', param2: 'value2')
it 'extracts simple fragment params', ->
url = 'http://localhost:8911/oauth_redirect#param1=value1¶m2=value2'
expect(Dropbox.Util.Oauth.queryParamsFromUrl(url)).to.deep.equal(
param1: 'value1', param2: 'value2')
it 'extracts simple fragment query params', ->
url = 'http://localhost:8911/oauth_redirect#?param1=value1¶m2=value2'
expect(Dropbox.Util.Oauth.queryParamsFromUrl(url)).to.deep.equal(
param1: 'value1', param2: 'value2')
it 'extracts simple query and fragment params', ->
url = 'http://localhost:8911/oauth_redirect?param1=value1#param2=value2'
expect(Dropbox.Util.Oauth.queryParamsFromUrl(url)).to.deep.equal(
param1: 'value1', param2: 'value2')
it 'extracts percent-encoded query params', ->
url = 'http://localhost:8911/oauth_redirect?p%20=v%20'
expect(Dropbox.Util.Oauth.queryParamsFromUrl(url)).to.deep.equal(
'p ': 'v ')
it 'extracts query and fragment params with /-prefixed query', ->
url = 'http://localhost:8911/oauth_redirect?/param1=value1#param2=value2'
expect(Dropbox.Util.Oauth.queryParamsFromUrl(url)).to.deep.equal(
param1: 'value1', param2: 'value2')
it 'extracts query and fragment params with /-prefixed fragment', ->
url = 'http://localhost:8911/oauth_redirect?param1=value1#/param2=value2'
expect(Dropbox.Util.Oauth.queryParamsFromUrl(url)).to.deep.equal(
param1: 'value1', param2: 'value2')
it 'extracts /-prefixed fragment query param', ->
url = 'http://localhost:8911/oauth_redirect#?/param1=value1'
expect(Dropbox.Util.Oauth.queryParamsFromUrl(url)).to.deep.equal(
param1: 'value1')
describe '.timestamp', ->
it 'returns a number', ->
expect(Dropbox.Util.Oauth.timestamp()).to.be.a 'number'
it 'returns non-decreasing values', ->
ts = (Dropbox.Util.Oauth.timestamp() for i in [0..100])
for i in [1..100]
expect(ts[i - i]).to.be.lte(ts[i])
describe '.randomAuthStateParam', ->
it 'returns a short string', ->
expect(Dropbox.Util.Oauth.randomAuthStateParam()).to.be.a 'string'
expect(Dropbox.Util.Oauth.randomAuthStateParam().length).to.be.below 64
it 'returns different values', ->
values = (Dropbox.Util.Oauth.randomAuthStateParam() for i in [0..100])
values.sort()
for i in [1..100]
expect(values[i - 1]).not.to.equal(values[i])
| true | describe 'Dropbox.Util.Oauth', ->
beforeEach ->
@method = 'GET'
@url = '/photos'
@params = answer: 42, other: 43
@timestamp = 1370129543574
buildSecretlessTransitionTests = ->
describe '#setAuthStateParam', ->
beforeEach ->
@oauth.setAuthStateParam 'oauth-state'
it 'makes #step return PARAM_SET', ->
expect(@oauth.step()).to.equal Dropbox.Client.PARAM_SET
it 'adds the param to credentials', ->
expect(@oauth.credentials()).to.deep.equal(
key: 'PI:KEY:<KEY>END_PI', oauthStateParam: 'oauth-state')
describe '#processRedirectParams', ->
it 'returns true when the query params contain a code', ->
expect(@oauth.processRedirectParams(code: 'authorization-code')).
to.equal true
it 'returns true when the query params contain a token', ->
expect(@oauth.processRedirectParams(
token_type: 'Bearer', access_token: 'access-token')).
to.equal true
it 'returns true when the query params contain a error', ->
expect(@oauth.processRedirectParams(error: 'access_denied')).
to.equal true
it 'throws an exception on unimplemented token types', ->
expect(=> @oauth.processRedirectParams(token_type: 'unimplemented')).
to.throw(Error, /unimplemented token/i)
it "returns false when the query params don't contain a code/token", ->
expect(@oauth.processRedirectParams(random_param: 'random')).
to.equal false
describe 'with an authorization code', ->
beforeEach ->
@oauth.processRedirectParams code: 'authorization-code'
it 'makes #step return AUTHORIZED', ->
expect(@oauth.step()).to.equal Dropbox.Client.AUTHORIZED
it 'adds the code to credentials', ->
expect(@oauth.credentials()).to.deep.equal(
key: 'client-PI:KEY:<KEY>END_PI', oauthCode: 'authorization-code')
describe 'with a Bearer token', ->
beforeEach ->
@oauth.processRedirectParams(
token_type: 'Bearer', access_token: 'bearer-token')
it 'makes #step return DONE', ->
expect(@oauth.step()).to.equal Dropbox.Client.DONE
it 'adds the token to credentials', ->
expect(@oauth.credentials()).to.deep.equal(
key: 'client-PI:KEY:<KEY>END_PI', token: 'PI:PASSWORD:<PASSWORD>END_PI PI:KEY:<KEY>END_PI-PI:PASSWORD:<PASSWORD>END_PI')
describe 'with a MAC token', ->
beforeEach ->
@oauth.processRedirectParams(
token_type: 'mac', access_token: 'PI:KEY:<KEY>END_PI',
kid: 'mac-server-kid', mac_key: 'PI:KEY:<KEY>END_PI',
mac_algorithm: 'hmac-sha-1')
it 'makes #step() return DONE', ->
expect(@oauth.step()).to.equal Dropbox.Client.DONE
it 'adds the token to credentials', ->
expect(@oauth.credentials()).to.deep.equal(
key: 'client-PI:KEY:<KEY>END_PI', token: 'PI:KEY:<KEY>END_PI', tokenKid: 'PI:KEY:<KEY>END_PI',
tokenKey: 'PI:KEY:<KEY>END_PI')
describe 'with an OAuth error response', ->
beforeEach ->
@oauth.processRedirectParams(
error: 'access_denied',
error_description: "The application didn't seem trustworthy")
it 'makes #step() return ERROR', ->
expect(@oauth.step()).to.equal Dropbox.Client.ERROR
it 'preserves the api key in the credentials', ->
expect(@oauth.credentials()).to.deep.equal key: 'client-id'
it 'makes #error() return the error', ->
error = @oauth.error()
expect(error).to.be.instanceOf Dropbox.AuthError
expect(error.code).to.equal Dropbox.AuthError.ACCESS_DENIED
expect(error.description).to.equal(
"The application didn't seem trustworthy")
it 'lets #reset() return to RESET', ->
@oauth.reset()
expect(@oauth.step()).to.equal Dropbox.Client.RESET
describe 'without a code or token', ->
beforeEach ->
@oldStep = @oauth.step()
@oauth.processRedirectParams random_param: 'random'
it 'does not change the auth step', ->
expect(@oauth.step()).to.equal @oldStep
describe '#reset', ->
beforeEach ->
@oauth.reset()
it 'makes #step() return RESET', ->
expect(@oauth.step()).to.equal Dropbox.Client.RESET
buildSecretTransitionTests = ->
describe '#setAuthStateParam', ->
beforeEach ->
@oauth.setAuthStateParam 'oauth-state'
it 'makes #step return PARAM_SET', ->
expect(@oauth.step()).to.equal Dropbox.Client.PARAM_SET
it 'adds the param to credentials', ->
expect(@oauth.credentials()).to.deep.equal(
key: 'client-id', secret: 'client-secret',
oauthStateParam: 'oauth-state')
describe '#processRedirectParams', ->
it 'returns true when the query params contain a code', ->
expect(@oauth.processRedirectParams(code: 'authorization-code')).
to.equal true
it 'returns true when the query params contain a token', ->
expect(@oauth.processRedirectParams(
token_type: 'Bearer', access_token: 'access-token')).
to.equal true
it 'returns true when the query params contain a error', ->
expect(@oauth.processRedirectParams(error: 'access_denied')).
to.equal true
it 'throws an exception on unimplemented token types', ->
expect(=> @oauth.processRedirectParams(token_type: 'unimplemented')).
to.throw(Error, /unimplemented token/i)
it "returns false when the query params don't contain a code/token", ->
expect(@oauth.processRedirectParams(random_param: 'random')).
to.equal false
describe 'with an authorization code', ->
beforeEach ->
@oauth.processRedirectParams code: 'authorization-code'
it 'makes #step return AUTHORIZED', ->
expect(@oauth.step()).to.equal Dropbox.Client.AUTHORIZED
it 'adds the code to credentials', ->
expect(@oauth.credentials()).to.deep.equal(
key: 'client-id', secret: 'client-secret',
oauthCode: 'authorization-code')
describe 'with a Bearer token', ->
beforeEach ->
@oauth.processRedirectParams(
token_type: 'Bearer', access_token: 'bearer-token')
it 'makes #step return DONE', ->
expect(@oauth.step()).to.equal Dropbox.Client.DONE
it 'adds the token to credentials', ->
expect(@oauth.credentials()).to.deep.equal(
key: 'client-id', secret: 'client-secret', token: 'PI:PASSWORD:<PASSWORD>END_PI')
describe 'with a MAC token', ->
beforeEach ->
@oauth.processRedirectParams(
token_type: 'mac', access_token: 'PI:KEY:<KEY>END_PI',
kid: 'mac-server-kid', mac_key: 'PI:KEY:<KEY>END_PI',
mac_algorithm: 'hmac-sha-1')
it 'makes #step return DONE', ->
expect(@oauth.step()).to.equal Dropbox.Client.DONE
it 'adds the token to credentials', ->
expect(@oauth.credentials()).to.deep.equal(
key: 'client-id', secret: 'client-secret',
token: 'PI:KEY:<KEY>END_PI', tokenKid: 'mac-server-PI:KEY:<KEY>END_PI',
tokenKey: 'PI:KEY:<KEY>END_PI')
describe 'with an OAuth error response', ->
beforeEach ->
@oauth.processRedirectParams(
error: 'access_denied',
error_description: "The application didn't seem trustworthy")
it 'makes #step() return ERROR', ->
expect(@oauth.step()).to.equal Dropbox.Client.ERROR
it 'preserves the app key and secret in the credentials', ->
expect(@oauth.credentials()).to.deep.equal(
key: 'client-id', secret: 'client-secret')
it 'lets #reset() return to RESET', ->
@oauth.reset()
expect(@oauth.step()).to.equal Dropbox.Client.RESET
describe 'without a code or token', ->
beforeEach ->
@oldStep = @oauth.step()
@oauth.processRedirectParams random_param: 'random'
it 'does not change the step', ->
expect(@oauth.step()).to.equal @oldStep
describe '#reset', ->
beforeEach ->
@oauth.reset()
it 'makes #step() return RESET', ->
expect(@oauth.step()).to.equal Dropbox.Client.RESET
buildKeylessTransitionTests = ->
describe '#setAuthStateParam', ->
it 'throws an exception', ->
expect(=> @oauth.setAuthStateParam('oauth-state')).to.throw(
Error, /no api key/i)
describe '#processRedirectParams', ->
it 'throws an exception when the query params contain a code', ->
expect(=> @oauth.processRedirectParams(code: 'authorization-code')).
to.throw(Error, /no api key/i)
it 'returns true when the query params contain a token', ->
expect(@oauth.processRedirectParams(
token_type: 'Bearer', access_token: 'access-token')).
to.equal true
it 'throws an exeception when the query params contain a error', ->
expect(=> @oauth.processRedirectParams(error: 'access_denied')).
to.throw(Error, /no api key/i)
it 'throws an exception on unimplemented token types', ->
expect(=> @oauth.processRedirectParams(token_type: 'unimplemented')).
to.throw(Error, /unimplemented token/i)
it "returns false when the query params don't contain a code/token", ->
expect(@oauth.processRedirectParams(random_param: 'random')).
to.equal false
describe 'with a Bearer token', ->
beforeEach ->
@oauth.processRedirectParams(
token_type: 'Bearer', access_token: 'bearer-token')
it 'makes #step return DONE', ->
expect(@oauth.step()).to.equal Dropbox.Client.DONE
it 'adds the token to credentials', ->
expect(@oauth.credentials()).to.deep.equal(token: 'bearer-token')
describe 'with a MAC token', ->
beforeEach ->
@oauth.processRedirectParams(
token_type: 'mac', access_token: 'macPI:KEY:<KEY>END_PI-token',
kid: 'mac-server-kid', mac_key: 'mac-PI:KEY:<PASSWORD>END_PI-key',
mac_algorithm: 'hmac-sha-1')
it 'makes #step() return DONE', ->
expect(@oauth.step()).to.equal Dropbox.Client.DONE
it 'adds the token to credentials', ->
expect(@oauth.credentials()).to.deep.equal(
token: 'mac-PI:KEY:<PASSWORD>END_PI', tokenKid: 'mac-server-kid',
tokenKey: 'PI:KEY:<KEY>END_PI')
describe 'without a code or token', ->
beforeEach ->
@oldStep = @oauth.step()
@oauth.processRedirectParams random_param: 'random'
it 'does not change the auth step', ->
expect(@oauth.step()).to.equal @oldStep
describe '#reset', ->
beforeEach ->
@oauth.reset()
it 'makes #step() return RESET', ->
expect(@oauth.step()).to.equal Dropbox.Client.RESET
describe 'with an app key', ->
beforeEach ->
@oauth = new Dropbox.Util.Oauth key: 'client-id'
describe '#credentials', ->
it 'returns the app key', ->
expect(@oauth.credentials()).to.deep.equal key: 'client-id'
describe '#step', ->
it 'returns RESET', ->
expect(@oauth.step()).to.equal Dropbox.Client.RESET
describe '#authHeader', ->
it 'uses HTTP Basic authentication with the client id and no pw', ->
expect(@oauth.authHeader(@method, @url, @params)).to.equal(
'Basic Y2xpZW50LWlkOg==')
describe '#addAuthParams', ->
it 'returns the given object', ->
expect(@oauth.addAuthParams(@method, @url, @params)).to.equal @params
it 'adds the client id', ->
expect(@oauth.addAuthParams(@method, @url, @params)).to.deep.equal(
client_id: 'client-id', answer: 42, other: 43)
describe '#checkAuthStateParam', ->
it 'returns false for null', ->
expect(@oauth.checkAuthStateParam(null)).to.equal false
buildSecretlessTransitionTests()
describe 'with an app key and secret', ->
beforeEach ->
@oauth = new Dropbox.Util.Oauth key: 'client-id', secret: 'client-secret'
describe '#credentials', ->
it 'returns the app key', ->
expect(@oauth.credentials()).to.deep.equal(
key: 'client-id', secret: 'client-secret')
describe '#step', ->
it 'returns RESET', ->
expect(@oauth.step()).to.equal Dropbox.Client.RESET
describe '#authHeader', ->
it 'uses HTTP Basic authentication with the client id and secret', ->
expect(@oauth.authHeader(@method, @url, @params)).to.equal(
'Basic Y2xpZW50LWlkOmNsaWVudC1zZWNyZXQ=')
describe '#addAuthParams', ->
it 'returns the given object', ->
expect(@oauth.addAuthParams(@method, @url, @params)).to.equal @params
it 'adds the client id', ->
expect(@oauth.addAuthParams(@method, @url, @params)).to.deep.equal(
client_id: 'client-id', client_secret: 'PI:KEY:<KEY>END_PI',
answer: 42, other: 43)
describe '#checkAuthStateParam', ->
it 'returns false for null', ->
expect(@oauth.checkAuthStateParam(null)).to.equal false
buildSecretTransitionTests()
describe 'with an app key and state param', ->
beforeEach ->
@oauth = new Dropbox.Util.Oauth(
key: 'PI:KEY:<KEY>END_PI', oauthStateParam: 'oauth-state')
describe '#credentials', ->
it 'returns the app key and state param', ->
expect(@oauth.credentials()).to.deep.equal(
key: 'PI:KEY:<KEY>END_PI-PI:KEY:<KEY>END_PI', oauthStateParam: 'oauth-state')
describe '#step', ->
it 'returns PARAM_LOADED', ->
expect(@oauth.step()).to.equal Dropbox.Client.PARAM_LOADED
describe '#checkAuthStateParam', ->
it 'returns true for the correct param', ->
expect(@oauth.checkAuthStateParam('oauth-state')).to.equal true
it 'returns false for the wrong param', ->
expect(@oauth.checkAuthStateParam('not-oauth-state')).to.equal false
it 'returns false for null', ->
expect(@oauth.checkAuthStateParam(null)).to.equal false
describe '#authorizeUrlParams', ->
beforeEach ->
@url = 'http://redirect.to/here'
describe 'with token responseType', ->
it 'asks for an access token', ->
expect(@oauth.authorizeUrlParams('token', @url)).to.deep.equal(
client_id: 'client-id', state: 'oauth-state',
response_type: 'token', redirect_uri: @url)
describe 'with code responseType', ->
it 'asks for an authorization code', ->
expect(@oauth.authorizeUrlParams('code', @url)).to.deep.equal(
client_id: 'client-id', state: 'oauth-state',
response_type: 'code', redirect_uri: @url)
describe 'with an un-implemented responseType', ->
it 'throws an Error', ->
expect(=> @oauth.authorizeUrlParams('other', @url)).to.
throw(Error, /unimplemented .* response type/i)
describe '#authHeader', ->
it 'uses HTTP Basic authentication with the client id and no pw', ->
expect(@oauth.authHeader(@method, @url, @params)).to.equal(
'Basic Y2xpZW50LWlkOg==')
describe '#addAuthParams', ->
it 'returns the given object', ->
expect(@oauth.addAuthParams(@method, @url, @params)).to.equal @params
it 'adds the client id', ->
expect(@oauth.addAuthParams(@method, @url, @params)).to.deep.equal(
client_id: 'client-id', answer: 42, other: 43)
buildSecretlessTransitionTests()
describe 'with an app key + secret and state param', ->
beforeEach ->
@oauth = new Dropbox.Util.Oauth(
key: 'client-id', secret: 'client-secret',
oauthStateParam: 'oauth-state')
describe '#credentials', ->
it 'returns the app key + secret and state param', ->
expect(@oauth.credentials()).to.deep.equal(
key: 'client-id', secret: 'PI:KEY:<KEY>END_PI-PI:KEY:<KEY>END_PI',
oauthStateParam: 'oauth-state')
describe '#step', ->
it 'returns PARAM_LOADED', ->
expect(@oauth.step()).to.equal Dropbox.Client.PARAM_LOADED
describe '#checkAuthStateParam', ->
it 'returns true for the correct param', ->
expect(@oauth.checkAuthStateParam('oauth-state')).to.equal true
it 'returns false for the wrong param', ->
expect(@oauth.checkAuthStateParam('not-oauth-state')).to.equal false
it 'returns false for null', ->
expect(@oauth.checkAuthStateParam(null)).to.equal false
describe '#authorizeUrlParams', ->
beforeEach ->
@url = 'http://redirect.to/here'
describe 'with token responseType', ->
it 'asks for an access token', ->
expect(@oauth.authorizeUrlParams('token', @url)).to.deep.equal(
client_id: 'client-id', state: 'oauth-state',
response_type: 'token', redirect_uri: @url)
describe 'with code responseType', ->
it 'asks for an authorization code', ->
expect(@oauth.authorizeUrlParams('code', @url)).to.deep.equal(
client_id: 'client-id', state: 'oauth-state',
response_type: 'code', redirect_uri: @url)
describe 'with an un-implemented responseType', ->
it 'throws an Error', ->
expect(=> @oauth.authorizeUrlParams('other', @url)).to.
throw(Error, /unimplemented .* response type/i)
describe '#authHeader', ->
it 'uses HTTP Basic authentication with the id as the username', ->
expect(@oauth.authHeader(@method, @url, @params)).to.equal(
'Basic Y2xpZW50LWlkOmNsaWVudC1zZWNyZXQ=')
describe '#addAuthParams', ->
it 'returns the given object', ->
expect(@oauth.addAuthParams(@method, @url, @params)).to.equal @params
it 'adds the client id', ->
expect(@oauth.addAuthParams(@method, @url, @params)).to.deep.equal(
client_id: 'client-id', client_secret: 'client-secret',
answer: 42, other: 43)
buildSecretTransitionTests()
describe 'with an app key and authorization code', ->
beforeEach ->
@oauth = new Dropbox.Util.Oauth key: 'client-id', oauthCode: 'auth-code'
describe '#credentials', ->
it 'returns the app key and authorization code', ->
expect(@oauth.credentials()).to.deep.equal(
key: 'client-id', oauthCode: 'auth-code')
describe '#step', ->
it 'returns AUTHORIZED', ->
expect(@oauth.step()).to.equal Dropbox.Client.AUTHORIZED
describe '#accessTokenParams', ->
describe 'without a redirect URL', ->
it 'matches the spec', ->
expect(@oauth.accessTokenParams()).to.deep.equal(
grant_type: 'authorization_code', code: 'auth-code')
describe 'with a redirect URL', ->
it 'matches the spec and includes the URL', ->
url = 'http://redirect.to/here'
expect(@oauth.accessTokenParams(url)).to.deep.equal(
grant_type: 'authorization_code', code: 'auth-code',
redirect_uri: url)
describe '#authHeader', ->
it 'uses HTTP Basic authentication with the client id and no pw', ->
expect(@oauth.authHeader(@method, @url, @params)).to.equal(
'Basic Y2xpZW50LWlkOg==')
describe '#addAuthParams', ->
it 'returns the given object', ->
expect(@oauth.addAuthParams(@method, @url, @params)).to.equal @params
it 'adds the client id', ->
expect(@oauth.addAuthParams(@method, @url, @params)).to.deep.equal(
client_id: 'client-id', answer: 42, other: 43)
buildSecretlessTransitionTests()
describe 'with an app key + secret and authorization code', ->
beforeEach ->
@oauth = new Dropbox.Util.Oauth(
key: 'client-id', secret: 'client-secret', oauthCode: 'auth-code')
describe '#credentials', ->
it 'returns the app key + secret and state param', ->
expect(@oauth.credentials()).to.deep.equal(
key: 'client-id', secret: 'client-secret', oauthCode: 'auth-code')
describe '#step', ->
it 'returns AUTHORIZED', ->
expect(@oauth.step()).to.equal Dropbox.Client.AUTHORIZED
describe '#accessTokenParams', ->
describe 'without a redirect URL', ->
it 'matches the spec', ->
expect(@oauth.accessTokenParams()).to.deep.equal(
grant_type: 'authorization_code', code: 'auth-code')
describe 'with a redirect URL', ->
it 'matches the spec and includes the URL', ->
url = 'http://redirect.to/here'
expect(@oauth.accessTokenParams(url)).to.deep.equal(
grant_type: 'authorization_code', code: 'auth-code',
redirect_uri: url)
describe '#authHeader', ->
it 'uses HTTP Basic authentication with the id as the username', ->
expect(@oauth.authHeader(@method, @url, @params)).to.equal(
'Basic Y2xpZW50LWlkOmNsaWVudC1zZWNyZXQ=')
describe '#addAuthParams', ->
beforeEach ->
it 'returns the given object', ->
expect(@oauth.addAuthParams(@method, @url, @params)).to.equal @params
it 'adds the client id', ->
expect(@oauth.addAuthParams(@method, @url, @params)).to.deep.equal(
client_id: 'client-id', client_secret: 'client-secret',
answer: 42, other: 43)
buildSecretTransitionTests()
describe 'with an app key and Bearer token', ->
beforeEach ->
@oauth = new Dropbox.Util.Oauth key: 'client-id', token: 'access-token'
describe '#credentials', ->
it 'returns the app key and access token', ->
expect(@oauth.credentials()).to.deep.equal(
key: 'client-id', token: 'access-token')
describe '#step', ->
it 'returns DONE', ->
expect(@oauth.step()).to.equal Dropbox.Client.DONE
describe '#authHeader', ->
it 'uses HTTP Bearer auth', ->
expect(@oauth.authHeader(@method, @url, @params)).to.equal(
'Bearer access-token')
describe '#addAuthParams', ->
it 'returns the given object', ->
expect(@oauth.addAuthParams(@method, @url, @params)).to.equal @params
it 'adds the access token', ->
expect(@oauth.addAuthParams(@method, @url, @params)).to.deep.equal(
access_token: 'access-token', answer: 42, other: 43)
buildSecretlessTransitionTests()
describe 'with an app key + secret and Bearer token', ->
beforeEach ->
@oauth = new Dropbox.Util.Oauth(
key: 'client-id', secret: 'client-secret', token: 'access-token')
describe '#credentials', ->
it 'returns the app key + secret and access token', ->
expect(@oauth.credentials()).to.deep.equal(
key: 'client-id', secret: 'PI:KEY:<KEY>END_PI-PI:KEY:<KEY>END_PI', token: 'PI:KEY:<KEY>END_PI-token')
describe '#step', ->
it 'returns DONE', ->
expect(@oauth.step()).to.equal Dropbox.Client.DONE
describe '#authHeader', ->
it 'uses HTTP Bearer auth', ->
expect(@oauth.authHeader(@method, @url, @params)).to.equal(
'Bearer access-token')
describe '#addAuthParams', ->
it 'returns the given object', ->
expect(@oauth.addAuthParams(@method, @url, @params)).to.equal @params
it 'adds the access token', ->
expect(@oauth.addAuthParams(@method, @url, @params)).to.deep.equal(
access_token: 'access-token', answer: 42, other: 43)
buildSecretTransitionTests()
describe 'with a Bearer token', ->
beforeEach ->
@oauth = new Dropbox.Util.Oauth token: 'access-token'
describe '#credentials', ->
it 'returns the access token', ->
expect(@oauth.credentials()).to.deep.equal token: 'access-token'
describe '#step', ->
it 'returns DONE', ->
expect(@oauth.step()).to.equal Dropbox.Client.DONE
describe '#authHeader', ->
it 'uses HTTP Bearer auth', ->
expect(@oauth.authHeader(@method, @url, @params)).to.equal(
'Bearer access-token')
describe '#addAuthParams', ->
it 'returns the given object', ->
expect(@oauth.addAuthParams(@method, @url, @params)).to.equal @params
it 'adds the access token', ->
expect(@oauth.addAuthParams(@method, @url, @params)).to.deep.equal(
access_token: 'access-token', answer: 42, other: 43)
buildKeylessTransitionTests()
describe 'with an app key and MAC token', ->
beforeEach ->
@oauth = new Dropbox.Util.Oauth(
key: 'client-id', token: 'access-token',
tokenKey: 'token-key', tokenKid: 'token-PI:KEY:<KEY>END_PI')
@stub = sinon.stub Dropbox.Util.Oauth, 'timestamp'
@stub.returns @timestamp
afterEach ->
@stub.restore()
describe '#credentials', ->
it 'returns the app key and access token', ->
expect(@oauth.credentials()).to.deep.equal(
key: 'client-id', token: 'PI:KEY:<KEY>END_PI-token',
tokenKey: 'token-PI:KEY:<KEY>END_PI', tokenKid: 'token-PI:KEY:<KEY>END_PI')
describe '#step', ->
it 'returns DONE', ->
expect(@oauth.step()).to.equal Dropbox.Client.DONE
describe '#authHeader', ->
it 'uses HTTP MAC auth', ->
expect(@oauth.authHeader(@method, @url, @params)).to.equal(
'MAC kid=token-kid ts=1370129543574 access_token=access-token ' +
'mac=tlkfjonwKYiWU0Yf5EYwyDQfpJs=')
describe '#addAuthParams', ->
it 'returns the given object', ->
expect(@oauth.addAuthParams(@method, @url, @params)).to.equal @params
it 'adds the access token and signature', ->
expect(@oauth.addAuthParams(@method, @url, @params)).to.deep.equal(
access_token: 'access-token', kid: 'token-kid',
mac: 'tlkfjonwKYiWU0Yf5EYwyDQfpJs=', ts: 1370129543574,
answer: 42, other: 43)
buildSecretlessTransitionTests()
describe 'with an app key + secret and MAC token', ->
beforeEach ->
@oauth = new Dropbox.Util.Oauth(
key: 'client-id', secret: 'client-secret', token: 'access-token',
tokenKey: 'PI:KEY:<PASSWORD>END_PI-PI:KEY:<KEY>END_PI', tokenKid: 'PI:KEY:<PASSWORD>END_PI-PI:KEY:<KEY>END_PI')
@stub = sinon.stub Dropbox.Util.Oauth, 'timestamp'
@stub.returns @timestamp
afterEach ->
@stub.restore()
describe '#credentials', ->
it 'returns the app key + secret and access token', ->
expect(@oauth.credentials()).to.deep.equal(
key: 'client-id', secret: 'client-secret', token: 'PI:KEY:<KEY>END_PI',
tokenKey: 'token-key', tokenKid: 'PI:KEY:<KEY>END_PI')
describe '#step', ->
it 'returns DONE', ->
expect(@oauth.step()).to.equal Dropbox.Client.DONE
describe '#authHeader', ->
it 'uses HTTP MAC auth', ->
expect(@oauth.authHeader(@method, @url, @params)).to.equal(
'MAC kid=token-kid ts=1370129543574 access_token=access-token ' +
'mac=tlkfjonwKYiWU0Yf5EYwyDQfpJs=')
describe '#addAuthParams', ->
it 'returns the given object', ->
expect(@oauth.addAuthParams(@method, @url, @params)).to.equal @params
it 'adds the access token and signature', ->
expect(@oauth.addAuthParams(@method, @url, @params)).to.deep.equal(
access_token: 'PI:KEY:<KEY>END_PI-token', kid: 'token-kid',
mac: 'tlkfjonwKYiWU0Yf5EYwyDQfpJs=', ts: 1370129543574,
answer: 42, other: 43)
buildSecretTransitionTests()
describe 'with a MAC token', ->
beforeEach ->
@oauth = new Dropbox.Util.Oauth(
token: 'access-token', tokenKey: 'token-key', tokenKid: 'token-kid')
@stub = sinon.stub Dropbox.Util.Oauth, 'timestamp'
@stub.returns @timestamp
afterEach ->
@stub.restore()
describe '#credentials', ->
it 'returns the app key and access token', ->
expect(@oauth.credentials()).to.deep.equal(
token: 'access-token', tokenKey: 'token-key',
tokenKid: 'token-PI:KEY:<KEY>END_PI')
describe '#step', ->
it 'returns DONE', ->
expect(@oauth.step()).to.equal Dropbox.Client.DONE
describe '#authHeader', ->
it 'uses HTTP MAC auth', ->
expect(@oauth.authHeader(@method, @url, @params)).to.equal(
'MAC kid=token-kid ts=1370129543574 access_token=access-token ' +
'mac=tlkfjonwKYiWU0Yf5EYwyDQfpJs=')
describe '#addAuthParams', ->
it 'returns the given object', ->
expect(@oauth.addAuthParams(@method, @url, @params)).to.equal @params
it 'adds the access token and signature', ->
expect(@oauth.addAuthParams(@method, @url, @params)).to.deep.equal(
access_token: 'PI:PASSWORD:<KEY>END_PI-PI:KEY:<PASSWORD>END_PI', kid: 'token-kid',
mac: 'tlkfjonwKYiWU0Yf5EYwyDQfpJs=', ts: 1370129543574,
answer: 42, other: 43)
buildKeylessTransitionTests()
describe '#queryParamsFromUrl', ->
it 'extracts simple query params', ->
url = 'http://localhost:8911/oauth_redirect?param1=value1¶m2=value2'
expect(Dropbox.Util.Oauth.queryParamsFromUrl(url)).to.deep.equal(
param1: 'value1', param2: 'value2')
it 'extracts simple fragment params', ->
url = 'http://localhost:8911/oauth_redirect#param1=value1¶m2=value2'
expect(Dropbox.Util.Oauth.queryParamsFromUrl(url)).to.deep.equal(
param1: 'value1', param2: 'value2')
it 'extracts simple fragment query params', ->
url = 'http://localhost:8911/oauth_redirect#?param1=value1¶m2=value2'
expect(Dropbox.Util.Oauth.queryParamsFromUrl(url)).to.deep.equal(
param1: 'value1', param2: 'value2')
it 'extracts simple query and fragment params', ->
url = 'http://localhost:8911/oauth_redirect?param1=value1#param2=value2'
expect(Dropbox.Util.Oauth.queryParamsFromUrl(url)).to.deep.equal(
param1: 'value1', param2: 'value2')
it 'extracts percent-encoded query params', ->
url = 'http://localhost:8911/oauth_redirect?p%20=v%20'
expect(Dropbox.Util.Oauth.queryParamsFromUrl(url)).to.deep.equal(
'p ': 'v ')
it 'extracts query and fragment params with /-prefixed query', ->
url = 'http://localhost:8911/oauth_redirect?/param1=value1#param2=value2'
expect(Dropbox.Util.Oauth.queryParamsFromUrl(url)).to.deep.equal(
param1: 'value1', param2: 'value2')
it 'extracts query and fragment params with /-prefixed fragment', ->
url = 'http://localhost:8911/oauth_redirect?param1=value1#/param2=value2'
expect(Dropbox.Util.Oauth.queryParamsFromUrl(url)).to.deep.equal(
param1: 'value1', param2: 'value2')
it 'extracts /-prefixed fragment query param', ->
url = 'http://localhost:8911/oauth_redirect#?/param1=value1'
expect(Dropbox.Util.Oauth.queryParamsFromUrl(url)).to.deep.equal(
param1: 'value1')
describe '.timestamp', ->
it 'returns a number', ->
expect(Dropbox.Util.Oauth.timestamp()).to.be.a 'number'
it 'returns non-decreasing values', ->
ts = (Dropbox.Util.Oauth.timestamp() for i in [0..100])
for i in [1..100]
expect(ts[i - i]).to.be.lte(ts[i])
describe '.randomAuthStateParam', ->
it 'returns a short string', ->
expect(Dropbox.Util.Oauth.randomAuthStateParam()).to.be.a 'string'
expect(Dropbox.Util.Oauth.randomAuthStateParam().length).to.be.below 64
it 'returns different values', ->
values = (Dropbox.Util.Oauth.randomAuthStateParam() for i in [0..100])
values.sort()
for i in [1..100]
expect(values[i - 1]).not.to.equal(values[i])
|
[
{
"context": "mmit_token = process.env.ANAGRAMMIT_API_TOKEN || 'dev'\nanagrammit_host = process.env.ANAGRAMMIT_HOST ||",
"end": 288,
"score": 0.9731486439704895,
"start": 285,
"tag": "PASSWORD",
"value": "dev"
},
{
"context": "NAGRAMMIT_PORT || 3100\n\nmodule.exports =\n name: \"Anagrammit\"\n listen: (message, room, logger) ->\n body = ",
"end": 442,
"score": 0.852510929107666,
"start": 432,
"tag": "USERNAME",
"value": "Anagrammit"
}
] | src/plugins/anagrammit.coffee | abachman/pat-the-campfire-bot | 2 | # pull results from anagrammit web service
util = require('util')
curl = require('../vendor/simple_http').curl
qs = require('querystring')
# if you're using anagrammit, make sure you add the same api token to both heroku envs
anagrammit_token = process.env.ANAGRAMMIT_API_TOKEN || 'dev'
anagrammit_host = process.env.ANAGRAMMIT_HOST || 'localhost'
anagrammit_port = process.env.ANAGRAMMIT_PORT || 3100
module.exports =
name: "Anagrammit"
listen: (message, room, logger) ->
body = message.body
if /pat/i.test(body) && /anagram/i.test(body)
phrase = body.match(/"([^\"]*)"/)
unless phrase && phrase[1].length
room.speak "You'll have to give me more than that. Make sure you include a phrase in double quotes. e.g., \"helloworld\"", logger
return
phrase = phrase[1]
console.log "getting anagrams of \"#{ phrase }\""
console.log "from #{anagrammit_host}:#{anagrammit_port}/token=#{anagrammit_token}"
options =
host: anagrammit_host
port: anagrammit_port
path: "/generate?phrase=#{ qs.escape(phrase) }&token=#{ qs.escape(anagrammit_token) }"
curl options, (data) ->
console.log "results are ready! #{ data }"
results = JSON.parse(data)
if /success/i.test(results.status)
room.speak "#{ results.results.length } results:"
room.paste results.results.join(' \n'), logger
else
room.speak "there was a problem :( \"#{ results.message }\"", logger
| 120548 | # pull results from anagrammit web service
util = require('util')
curl = require('../vendor/simple_http').curl
qs = require('querystring')
# if you're using anagrammit, make sure you add the same api token to both heroku envs
anagrammit_token = process.env.ANAGRAMMIT_API_TOKEN || '<PASSWORD>'
anagrammit_host = process.env.ANAGRAMMIT_HOST || 'localhost'
anagrammit_port = process.env.ANAGRAMMIT_PORT || 3100
module.exports =
name: "Anagrammit"
listen: (message, room, logger) ->
body = message.body
if /pat/i.test(body) && /anagram/i.test(body)
phrase = body.match(/"([^\"]*)"/)
unless phrase && phrase[1].length
room.speak "You'll have to give me more than that. Make sure you include a phrase in double quotes. e.g., \"helloworld\"", logger
return
phrase = phrase[1]
console.log "getting anagrams of \"#{ phrase }\""
console.log "from #{anagrammit_host}:#{anagrammit_port}/token=#{anagrammit_token}"
options =
host: anagrammit_host
port: anagrammit_port
path: "/generate?phrase=#{ qs.escape(phrase) }&token=#{ qs.escape(anagrammit_token) }"
curl options, (data) ->
console.log "results are ready! #{ data }"
results = JSON.parse(data)
if /success/i.test(results.status)
room.speak "#{ results.results.length } results:"
room.paste results.results.join(' \n'), logger
else
room.speak "there was a problem :( \"#{ results.message }\"", logger
| true | # pull results from anagrammit web service
util = require('util')
curl = require('../vendor/simple_http').curl
qs = require('querystring')
# if you're using anagrammit, make sure you add the same api token to both heroku envs
anagrammit_token = process.env.ANAGRAMMIT_API_TOKEN || 'PI:PASSWORD:<PASSWORD>END_PI'
anagrammit_host = process.env.ANAGRAMMIT_HOST || 'localhost'
anagrammit_port = process.env.ANAGRAMMIT_PORT || 3100
module.exports =
name: "Anagrammit"
listen: (message, room, logger) ->
body = message.body
if /pat/i.test(body) && /anagram/i.test(body)
phrase = body.match(/"([^\"]*)"/)
unless phrase && phrase[1].length
room.speak "You'll have to give me more than that. Make sure you include a phrase in double quotes. e.g., \"helloworld\"", logger
return
phrase = phrase[1]
console.log "getting anagrams of \"#{ phrase }\""
console.log "from #{anagrammit_host}:#{anagrammit_port}/token=#{anagrammit_token}"
options =
host: anagrammit_host
port: anagrammit_port
path: "/generate?phrase=#{ qs.escape(phrase) }&token=#{ qs.escape(anagrammit_token) }"
curl options, (data) ->
console.log "results are ready! #{ data }"
results = JSON.parse(data)
if /success/i.test(results.status)
room.speak "#{ results.results.length } results:"
room.paste results.results.join(' \n'), logger
else
room.speak "there was a problem :( \"#{ results.message }\"", logger
|
[
{
"context": "][location.y] =\n move: move\n name: name\n direction: direction\n location: location\n",
"end": 2184,
"score": 0.9646176695823669,
"start": 2180,
"tag": "NAME",
"value": "name"
}
] | src/client/game.coffee | sortelli/jitterbug | 0 | window.jitterbug_game = (canvas_id) ->
canvas = new fabric.Canvas 'jitterbug_game_canvas',
backgroundColor: "#CCCCCC"
renderOnAddRemove: false
fabric.Object.prototype.transparentCorners = false
width = 60 # Number of bugs
height = 40 # Number of bugs
bug_size = 10 # pixels
max_turns = 1000
color_scale = [
{rgb: '#A61313', name: 'red' },
{rgb: '#EF38FF', name: 'pink' },
{rgb: '#4E45FF', name: 'blue' },
{rgb: '#0D851E', name: 'green' },
{rgb: '#BEC168', name: 'yellow'},
{rgb: '#C1BAB0', name: 'grey' },
{rgb: '#000000', name: 'black' }
]
canvas.setHeight height * bug_size
canvas.setWidth width * bug_size
bugs = starting_bugs(bug_size, width, height, max_turns, color_scale)
render_game canvas, bugs
next_turn bugs, canvas
next_turn = (bugs, canvas) ->
next_iteration bugs
render_game canvas, bugs
if bugs.turns % 2 == 0
progress_chart '#jitterbug_progress_chart_svg', bugs
if bugs.turns < bugs.max_turns
bugs.turns += 1
setTimeout((-> next_turn bugs, canvas), 30)
starting_bugs = (bug_size, width, height, max_turns, color_scale) ->
grid = []
for x in [0..(width - 1)]
grid[x] = []
for y in [0..(height - 1)]
grid[x][y] = null
bugs =
grid: grid
colors: {}
next_serial: 0
stats: {}
names: []
color_scale: color_scale
next_color: color_scale.map((color) -> color.name).reverse()
turns: 0
count: 0
bug_size: bug_size
width: width
height: height
max_turns: max_turns
for i in [0...10]
add_bug bugs, 'fly_trap', create_fly_trap
for i in [0...10]
add_bug bugs, 'moth1', create_moth
for i in [0...10]
add_bug bugs, 'moth2', create_moth
bugs
add_bug = (bugs, name, move) ->
location = random_location bugs
direction = random_num 3
color = bugs.colors[name]
unless color?
color = bugs.next_color.pop()
throw new Error("This game does not support this many bug types") unless color?
bugs.colors[name] = color
bugs.grid[location.x][location.y] =
move: move
name: name
direction: direction
location: location
color: color
serial: (bugs.next_serial += 1)
unless bugs.stats[name]?
bugs.stats[name] = [{iteration: 0, count: 0, name: name}]
bugs.names.push name
bugs.stats[name][0].count += 1
bugs.count += 1
random_location = (bugs) ->
x = random_num(bugs.width - 1)
y = random_num(bugs.height - 1)
if (bugs.grid[x][y]?)
random_location bugs
else
x: x, y: y
next_iteration = (bugs) ->
bug_list = []
for column in bugs.grid
for bug in column
if bug?
bug_list.push bug
shuffle_array bug_list
for bug in bug_list
move_bug bugs, bug
bugs.names.map (name) ->
count = bug_list.reduce(
((sum, bug) -> if bug.name == name then sum + 1 else sum), 0)
bugs.stats[name].push
iteration: bugs.turns
count: count
name: name
info_at_location = (grid, bug, x, y) ->
info = if x < 0 or y < 0 or x == 59 or y == 39
'WALL'
else if !grid[x][y]?
'EMPTY'
else if grid[x][y].name == bug.name
'SAME'
else
'OTHER'
info: info, x: x, y: y
direction_name = (direction) ->
switch direction
when 0 then 'NORTH'
when 1 then 'EAST'
when 2 then 'SOUTH'
else 'WEST'
move_bug = (bugs, bug) ->
x = bug.location.x
y = bug.location.y
d = bug.direction
surrounding_info = [
info_at_location bugs.grid, bug, x, y - 1 # North
info_at_location bugs.grid, bug, x + 1, y # East
info_at_location bugs.grid, bug, x, y + 1 # South
info_at_location bugs.grid, bug, x - 1, y # West
]
while d-- > 0
surrounding_info.push(surrounding_info.shift())
info =
front: surrounding_info[0].info
left: surrounding_info[1].info
right: surrounding_info[2].info
back: surrounding_info[3].info
direction: bug.direciton
front = surrounding_info[0]
next = bug.move info
switch next
when 'EAT' then eat_bug bugs, bug, front.info, front.x, front.y
when 'WALK_FORWARD' then walk_bug bugs, bug, front.info, front.x, front.y
when 'TURN_LEFT' then turn_bug bug, -1
when 'TURN_RIGHT' then turn_bug bug, +1
eat_bug = (bugs, bug, info, x, y) ->
return unless info == 'OTHER'
bugs.grid[x][y].name = bug.name
bugs.grid[x][y].move = bug.move
bugs.grid[x][y].color = bug.color
walk_bug = (bugs, bug, info, x, y) ->
return unless info == 'EMPTY'
bugs.grid[bug.location.x][bug.location.y] = null
bugs.grid[x][y] = bug
bug.location.x = x
bug.location.y = y
turn_bug = (bug, offset) ->
bug.direction = (((bug.direction + offset) % 4) + 4) % 4
add_bug_to_canvas = (canvas, bug, bug_size) ->
x = bug.location.x * bug_size
y = bug.location.y * bug_size
pos = switch bug.direction
when 0 then left: x, top: y, angle: 0
when 1 then left: x + bug_size, top: y, angle: 90
when 2 then left: x + bug_size, top: y + bug_size, angle: 180
when 3 then left: x, top: y + bug_size, angle: 270
if bug.prev_name == bug.name
if bug.canvas_img
bug.canvas_img.set pos
else
canvas.remove(bug.canvas_img) if bug.canvas_img
bug.prev_name = bug.name
url = '/imgs/bugs/bug_' + bug.color + '.png'
fabric.Image.fromURL url, (img) ->
bug.canvas_img = img
canvas.add img.set
width: bug_size
height: bug_size
left: pos.left
top: pos.top
angle: pos.angle
selectable: false
render_game = (canvas, bugs) ->
for column in bugs.grid
for bug in column
add_bug_to_canvas(canvas, bug, bugs.bug_size) if bug?
canvas.renderAll()
random_num = (max, min = 0) ->
Math.floor(Math.random() * (max - min) + min)
create_fly_trap =
(info) ->
if info.front == 'OTHER' then 'EAT' else 'TURN_LEFT'
create_moth =
(info) ->
r = random_num 100
if info.front == 'OTHER'
'EAT'
else if r > 4 and info.front == 'EMPTY'
'WALK_FORWARD'
else if r > 2
'TURN_LEFT'
else
'TURN_RIGHT'
shuffle_array = (array) ->
i = array.length
return [] if i is 0
while --i
j = Math.floor(Math.random() * (i+1))
[array[i], array[j]] = [array[j], array[i]]
| 217632 | window.jitterbug_game = (canvas_id) ->
canvas = new fabric.Canvas 'jitterbug_game_canvas',
backgroundColor: "#CCCCCC"
renderOnAddRemove: false
fabric.Object.prototype.transparentCorners = false
width = 60 # Number of bugs
height = 40 # Number of bugs
bug_size = 10 # pixels
max_turns = 1000
color_scale = [
{rgb: '#A61313', name: 'red' },
{rgb: '#EF38FF', name: 'pink' },
{rgb: '#4E45FF', name: 'blue' },
{rgb: '#0D851E', name: 'green' },
{rgb: '#BEC168', name: 'yellow'},
{rgb: '#C1BAB0', name: 'grey' },
{rgb: '#000000', name: 'black' }
]
canvas.setHeight height * bug_size
canvas.setWidth width * bug_size
bugs = starting_bugs(bug_size, width, height, max_turns, color_scale)
render_game canvas, bugs
next_turn bugs, canvas
next_turn = (bugs, canvas) ->
next_iteration bugs
render_game canvas, bugs
if bugs.turns % 2 == 0
progress_chart '#jitterbug_progress_chart_svg', bugs
if bugs.turns < bugs.max_turns
bugs.turns += 1
setTimeout((-> next_turn bugs, canvas), 30)
starting_bugs = (bug_size, width, height, max_turns, color_scale) ->
grid = []
for x in [0..(width - 1)]
grid[x] = []
for y in [0..(height - 1)]
grid[x][y] = null
bugs =
grid: grid
colors: {}
next_serial: 0
stats: {}
names: []
color_scale: color_scale
next_color: color_scale.map((color) -> color.name).reverse()
turns: 0
count: 0
bug_size: bug_size
width: width
height: height
max_turns: max_turns
for i in [0...10]
add_bug bugs, 'fly_trap', create_fly_trap
for i in [0...10]
add_bug bugs, 'moth1', create_moth
for i in [0...10]
add_bug bugs, 'moth2', create_moth
bugs
add_bug = (bugs, name, move) ->
location = random_location bugs
direction = random_num 3
color = bugs.colors[name]
unless color?
color = bugs.next_color.pop()
throw new Error("This game does not support this many bug types") unless color?
bugs.colors[name] = color
bugs.grid[location.x][location.y] =
move: move
name: <NAME>
direction: direction
location: location
color: color
serial: (bugs.next_serial += 1)
unless bugs.stats[name]?
bugs.stats[name] = [{iteration: 0, count: 0, name: name}]
bugs.names.push name
bugs.stats[name][0].count += 1
bugs.count += 1
random_location = (bugs) ->
x = random_num(bugs.width - 1)
y = random_num(bugs.height - 1)
if (bugs.grid[x][y]?)
random_location bugs
else
x: x, y: y
next_iteration = (bugs) ->
bug_list = []
for column in bugs.grid
for bug in column
if bug?
bug_list.push bug
shuffle_array bug_list
for bug in bug_list
move_bug bugs, bug
bugs.names.map (name) ->
count = bug_list.reduce(
((sum, bug) -> if bug.name == name then sum + 1 else sum), 0)
bugs.stats[name].push
iteration: bugs.turns
count: count
name: name
info_at_location = (grid, bug, x, y) ->
info = if x < 0 or y < 0 or x == 59 or y == 39
'WALL'
else if !grid[x][y]?
'EMPTY'
else if grid[x][y].name == bug.name
'SAME'
else
'OTHER'
info: info, x: x, y: y
direction_name = (direction) ->
switch direction
when 0 then 'NORTH'
when 1 then 'EAST'
when 2 then 'SOUTH'
else 'WEST'
move_bug = (bugs, bug) ->
x = bug.location.x
y = bug.location.y
d = bug.direction
surrounding_info = [
info_at_location bugs.grid, bug, x, y - 1 # North
info_at_location bugs.grid, bug, x + 1, y # East
info_at_location bugs.grid, bug, x, y + 1 # South
info_at_location bugs.grid, bug, x - 1, y # West
]
while d-- > 0
surrounding_info.push(surrounding_info.shift())
info =
front: surrounding_info[0].info
left: surrounding_info[1].info
right: surrounding_info[2].info
back: surrounding_info[3].info
direction: bug.direciton
front = surrounding_info[0]
next = bug.move info
switch next
when 'EAT' then eat_bug bugs, bug, front.info, front.x, front.y
when 'WALK_FORWARD' then walk_bug bugs, bug, front.info, front.x, front.y
when 'TURN_LEFT' then turn_bug bug, -1
when 'TURN_RIGHT' then turn_bug bug, +1
eat_bug = (bugs, bug, info, x, y) ->
return unless info == 'OTHER'
bugs.grid[x][y].name = bug.name
bugs.grid[x][y].move = bug.move
bugs.grid[x][y].color = bug.color
walk_bug = (bugs, bug, info, x, y) ->
return unless info == 'EMPTY'
bugs.grid[bug.location.x][bug.location.y] = null
bugs.grid[x][y] = bug
bug.location.x = x
bug.location.y = y
turn_bug = (bug, offset) ->
bug.direction = (((bug.direction + offset) % 4) + 4) % 4
add_bug_to_canvas = (canvas, bug, bug_size) ->
x = bug.location.x * bug_size
y = bug.location.y * bug_size
pos = switch bug.direction
when 0 then left: x, top: y, angle: 0
when 1 then left: x + bug_size, top: y, angle: 90
when 2 then left: x + bug_size, top: y + bug_size, angle: 180
when 3 then left: x, top: y + bug_size, angle: 270
if bug.prev_name == bug.name
if bug.canvas_img
bug.canvas_img.set pos
else
canvas.remove(bug.canvas_img) if bug.canvas_img
bug.prev_name = bug.name
url = '/imgs/bugs/bug_' + bug.color + '.png'
fabric.Image.fromURL url, (img) ->
bug.canvas_img = img
canvas.add img.set
width: bug_size
height: bug_size
left: pos.left
top: pos.top
angle: pos.angle
selectable: false
render_game = (canvas, bugs) ->
for column in bugs.grid
for bug in column
add_bug_to_canvas(canvas, bug, bugs.bug_size) if bug?
canvas.renderAll()
random_num = (max, min = 0) ->
Math.floor(Math.random() * (max - min) + min)
create_fly_trap =
(info) ->
if info.front == 'OTHER' then 'EAT' else 'TURN_LEFT'
create_moth =
(info) ->
r = random_num 100
if info.front == 'OTHER'
'EAT'
else if r > 4 and info.front == 'EMPTY'
'WALK_FORWARD'
else if r > 2
'TURN_LEFT'
else
'TURN_RIGHT'
shuffle_array = (array) ->
i = array.length
return [] if i is 0
while --i
j = Math.floor(Math.random() * (i+1))
[array[i], array[j]] = [array[j], array[i]]
| true | window.jitterbug_game = (canvas_id) ->
canvas = new fabric.Canvas 'jitterbug_game_canvas',
backgroundColor: "#CCCCCC"
renderOnAddRemove: false
fabric.Object.prototype.transparentCorners = false
width = 60 # Number of bugs
height = 40 # Number of bugs
bug_size = 10 # pixels
max_turns = 1000
color_scale = [
{rgb: '#A61313', name: 'red' },
{rgb: '#EF38FF', name: 'pink' },
{rgb: '#4E45FF', name: 'blue' },
{rgb: '#0D851E', name: 'green' },
{rgb: '#BEC168', name: 'yellow'},
{rgb: '#C1BAB0', name: 'grey' },
{rgb: '#000000', name: 'black' }
]
canvas.setHeight height * bug_size
canvas.setWidth width * bug_size
bugs = starting_bugs(bug_size, width, height, max_turns, color_scale)
render_game canvas, bugs
next_turn bugs, canvas
next_turn = (bugs, canvas) ->
next_iteration bugs
render_game canvas, bugs
if bugs.turns % 2 == 0
progress_chart '#jitterbug_progress_chart_svg', bugs
if bugs.turns < bugs.max_turns
bugs.turns += 1
setTimeout((-> next_turn bugs, canvas), 30)
starting_bugs = (bug_size, width, height, max_turns, color_scale) ->
grid = []
for x in [0..(width - 1)]
grid[x] = []
for y in [0..(height - 1)]
grid[x][y] = null
bugs =
grid: grid
colors: {}
next_serial: 0
stats: {}
names: []
color_scale: color_scale
next_color: color_scale.map((color) -> color.name).reverse()
turns: 0
count: 0
bug_size: bug_size
width: width
height: height
max_turns: max_turns
for i in [0...10]
add_bug bugs, 'fly_trap', create_fly_trap
for i in [0...10]
add_bug bugs, 'moth1', create_moth
for i in [0...10]
add_bug bugs, 'moth2', create_moth
bugs
add_bug = (bugs, name, move) ->
location = random_location bugs
direction = random_num 3
color = bugs.colors[name]
unless color?
color = bugs.next_color.pop()
throw new Error("This game does not support this many bug types") unless color?
bugs.colors[name] = color
bugs.grid[location.x][location.y] =
move: move
name: PI:NAME:<NAME>END_PI
direction: direction
location: location
color: color
serial: (bugs.next_serial += 1)
unless bugs.stats[name]?
bugs.stats[name] = [{iteration: 0, count: 0, name: name}]
bugs.names.push name
bugs.stats[name][0].count += 1
bugs.count += 1
random_location = (bugs) ->
x = random_num(bugs.width - 1)
y = random_num(bugs.height - 1)
if (bugs.grid[x][y]?)
random_location bugs
else
x: x, y: y
next_iteration = (bugs) ->
bug_list = []
for column in bugs.grid
for bug in column
if bug?
bug_list.push bug
shuffle_array bug_list
for bug in bug_list
move_bug bugs, bug
bugs.names.map (name) ->
count = bug_list.reduce(
((sum, bug) -> if bug.name == name then sum + 1 else sum), 0)
bugs.stats[name].push
iteration: bugs.turns
count: count
name: name
info_at_location = (grid, bug, x, y) ->
info = if x < 0 or y < 0 or x == 59 or y == 39
'WALL'
else if !grid[x][y]?
'EMPTY'
else if grid[x][y].name == bug.name
'SAME'
else
'OTHER'
info: info, x: x, y: y
direction_name = (direction) ->
switch direction
when 0 then 'NORTH'
when 1 then 'EAST'
when 2 then 'SOUTH'
else 'WEST'
move_bug = (bugs, bug) ->
x = bug.location.x
y = bug.location.y
d = bug.direction
surrounding_info = [
info_at_location bugs.grid, bug, x, y - 1 # North
info_at_location bugs.grid, bug, x + 1, y # East
info_at_location bugs.grid, bug, x, y + 1 # South
info_at_location bugs.grid, bug, x - 1, y # West
]
while d-- > 0
surrounding_info.push(surrounding_info.shift())
info =
front: surrounding_info[0].info
left: surrounding_info[1].info
right: surrounding_info[2].info
back: surrounding_info[3].info
direction: bug.direciton
front = surrounding_info[0]
next = bug.move info
switch next
when 'EAT' then eat_bug bugs, bug, front.info, front.x, front.y
when 'WALK_FORWARD' then walk_bug bugs, bug, front.info, front.x, front.y
when 'TURN_LEFT' then turn_bug bug, -1
when 'TURN_RIGHT' then turn_bug bug, +1
eat_bug = (bugs, bug, info, x, y) ->
return unless info == 'OTHER'
bugs.grid[x][y].name = bug.name
bugs.grid[x][y].move = bug.move
bugs.grid[x][y].color = bug.color
walk_bug = (bugs, bug, info, x, y) ->
return unless info == 'EMPTY'
bugs.grid[bug.location.x][bug.location.y] = null
bugs.grid[x][y] = bug
bug.location.x = x
bug.location.y = y
turn_bug = (bug, offset) ->
bug.direction = (((bug.direction + offset) % 4) + 4) % 4
add_bug_to_canvas = (canvas, bug, bug_size) ->
x = bug.location.x * bug_size
y = bug.location.y * bug_size
pos = switch bug.direction
when 0 then left: x, top: y, angle: 0
when 1 then left: x + bug_size, top: y, angle: 90
when 2 then left: x + bug_size, top: y + bug_size, angle: 180
when 3 then left: x, top: y + bug_size, angle: 270
if bug.prev_name == bug.name
if bug.canvas_img
bug.canvas_img.set pos
else
canvas.remove(bug.canvas_img) if bug.canvas_img
bug.prev_name = bug.name
url = '/imgs/bugs/bug_' + bug.color + '.png'
fabric.Image.fromURL url, (img) ->
bug.canvas_img = img
canvas.add img.set
width: bug_size
height: bug_size
left: pos.left
top: pos.top
angle: pos.angle
selectable: false
render_game = (canvas, bugs) ->
for column in bugs.grid
for bug in column
add_bug_to_canvas(canvas, bug, bugs.bug_size) if bug?
canvas.renderAll()
random_num = (max, min = 0) ->
Math.floor(Math.random() * (max - min) + min)
create_fly_trap =
(info) ->
if info.front == 'OTHER' then 'EAT' else 'TURN_LEFT'
create_moth =
(info) ->
r = random_num 100
if info.front == 'OTHER'
'EAT'
else if r > 4 and info.front == 'EMPTY'
'WALK_FORWARD'
else if r > 2
'TURN_LEFT'
else
'TURN_RIGHT'
shuffle_array = (array) ->
i = array.length
return [] if i is 0
while --i
j = Math.floor(Math.random() * (i+1))
[array[i], array[j]] = [array[j], array[i]]
|
[
{
"context": "s\",\"Prov\",\"Eccl\",\"Song\",\"Isa\",\"Jer\",\"Lam\",\"Ezek\",\"Dan\",\"Hos\",\"Joel\",\"Amos\",\"Obad\",\"Jonah\",\"Mic\",\"Nah\",\"",
"end": 505,
"score": 0.7670413851737976,
"start": 502,
"tag": "NAME",
"value": "Dan"
},
{
"context": "ov\",\"Eccl\",\"Song\",\"Isa\",\"Jer\",\"Lam\",\"Ezek\",\"Dan\",\"Hos\",\"Joel\",\"Amos\",\"Obad\",\"Jonah\",\"Mic\",\"Nah\",\"Hab\",\"",
"end": 511,
"score": 0.6559768915176392,
"start": 508,
"tag": "NAME",
"value": "Hos"
},
{
"context": "ccl\",\"Song\",\"Isa\",\"Jer\",\"Lam\",\"Ezek\",\"Dan\",\"Hos\",\"Joel\",\"Amos\",\"Obad\",\"Jonah\",\"Mic\",\"Nah\",\"Hab\",\"Zeph\",\"",
"end": 518,
"score": 0.8542309999465942,
"start": 514,
"tag": "NAME",
"value": "Joel"
},
{
"context": "ong\",\"Isa\",\"Jer\",\"Lam\",\"Ezek\",\"Dan\",\"Hos\",\"Joel\",\"Amos\",\"Obad\",\"Jonah\",\"Mic\",\"Nah\",\"Hab\",\"Zeph\",\"Hag\"",
"end": 522,
"score": 0.5625885725021362,
"start": 521,
"tag": "NAME",
"value": "A"
},
{
"context": "sa\",\"Jer\",\"Lam\",\"Ezek\",\"Dan\",\"Hos\",\"Joel\",\"Amos\",\"Obad\",\"Jonah\",\"Mic\",\"Nah\",\"Hab\",\"Zeph\",\"Hag\",\"Zech\",",
"end": 530,
"score": 0.5614379644393921,
"start": 528,
"tag": "NAME",
"value": "Ob"
},
{
"context": "r\",\"Lam\",\"Ezek\",\"Dan\",\"Hos\",\"Joel\",\"Amos\",\"Obad\",\"Jonah\",\"Mic\",\"Nah\",\"Hab\",\"Zeph\",\"Hag\",\"Zech\",\"Mal\",\"Mat",
"end": 540,
"score": 0.8598995208740234,
"start": 535,
"tag": "NAME",
"value": "Jonah"
},
{
"context": ",\"Ezek\",\"Dan\",\"Hos\",\"Joel\",\"Amos\",\"Obad\",\"Jonah\",\"Mic\",\"Nah\",\"Hab\",\"Zeph\",\"Hag\",\"Zech\",\"Mal\",\"Matt\",\"Ma",
"end": 546,
"score": 0.8236979246139526,
"start": 543,
"tag": "NAME",
"value": "Mic"
},
{
"context": "\",\"Dan\",\"Hos\",\"Joel\",\"Amos\",\"Obad\",\"Jonah\",\"Mic\",\"Nah\",\"Hab\",\"Zeph\",\"Hag\",\"Zech\",\"Mal\",\"Matt\",\"Mark\",",
"end": 550,
"score": 0.6407975554466248,
"start": 549,
"tag": "NAME",
"value": "N"
},
{
"context": "d\",\"Jonah\",\"Mic\",\"Nah\",\"Hab\",\"Zeph\",\"Hag\",\"Zech\",\"Mal\",\"Matt\",\"Mark\",\"Luke\",\"John\",\"Acts\",\"Rom\",\"1Cor\",",
"end": 584,
"score": 0.621737003326416,
"start": 581,
"tag": "NAME",
"value": "Mal"
},
{
"context": "nah\",\"Mic\",\"Nah\",\"Hab\",\"Zeph\",\"Hag\",\"Zech\",\"Mal\",\"Matt\",\"Mark\",\"Luke\",\"John\",\"Acts\",\"Rom\",\"1Cor\",\"2Cor\",",
"end": 591,
"score": 0.9278978705406189,
"start": 587,
"tag": "NAME",
"value": "Matt"
},
{
"context": "ic\",\"Nah\",\"Hab\",\"Zeph\",\"Hag\",\"Zech\",\"Mal\",\"Matt\",\"Mark\",\"Luke\",\"John\",\"Acts\",\"Rom\",\"1Cor\",\"2Cor\",\"Gal\",\"",
"end": 598,
"score": 0.9509187936782837,
"start": 594,
"tag": "NAME",
"value": "Mark"
},
{
"context": "h\",\"Hab\",\"Zeph\",\"Hag\",\"Zech\",\"Mal\",\"Matt\",\"Mark\",\"Luke\",\"John\",\"Acts\",\"Rom\",\"1Cor\",\"2Cor\",\"Gal\",\"Eph\",\"P",
"end": 605,
"score": 0.872262716293335,
"start": 601,
"tag": "NAME",
"value": "Luke"
},
{
"context": "\",\"Zeph\",\"Hag\",\"Zech\",\"Mal\",\"Matt\",\"Mark\",\"Luke\",\"John\",\"Acts\",\"Rom\",\"1Cor\",\"2Cor\",\"Gal\",\"Eph\",\"Phil\",\"C",
"end": 612,
"score": 0.713059663772583,
"start": 608,
"tag": "NAME",
"value": "John"
},
{
"context": "al(\"Deut.1.1\")\n\t\t`\n\t\ttrue\ndescribe \"Localized book Josh (zu)\", ->\n\tp = {}\n\tbeforeEach ->\n\t\tp = new bcv",
"end": 9209,
"score": 0.7705211639404297,
"start": 9208,
"tag": "NAME",
"value": "J"
},
{
"context": "\tp.include_apocrypha true\n\tit \"should handle book: Josh (zu)\", ->\n\t\t`\n\t\texpect(p.parse(\"UJoshuwa 1:1\")",
"end": 9468,
"score": 0.8948482275009155,
"start": 9467,
"tag": "NAME",
"value": "J"
},
{
"context": "al(\"Ezek.1.1\")\n\t\t`\n\t\ttrue\ndescribe \"Localized book Dan (zu)\", ->\n\tp = {}\n\tbeforeEach ->\n\t\tp = new bcv_pa",
"end": 24884,
"score": 0.6982195377349854,
"start": 24881,
"tag": "NAME",
"value": "Dan"
},
{
"context": "\tp.include_apocrypha true\n\tit \"should handle book: Dan (zu)\", ->\n\t\t`\n\t\texpect(p.parse(\"UDaniyeli 1:1\").o",
"end": 25142,
"score": 0.8694042563438416,
"start": 25139,
"tag": "NAME",
"value": "Dan"
},
{
"context": "\tp.include_apocrypha true\n\tit \"should handle book: Obad (zu)\", ->\n\t\t`\n\t\texpect(p.parse(\"U-Obadiya 1:1\").o",
"end": 27910,
"score": 0.8278636932373047,
"start": 27906,
"tag": "NAME",
"value": "Obad"
},
{
"context": "al(\"Obad.1.1\")\n\t\t`\n\t\ttrue\ndescribe \"Localized book Jonah (zu)\", ->\n\tp = {}\n\tbeforeEach ->\n\t\tp = new bcv_",
"end": 28351,
"score": 0.7595509886741638,
"start": 28348,
"tag": "NAME",
"value": "Jon"
},
{
"context": "\tp.include_apocrypha true\n\tit \"should handle book: Jonah (zu)\", ->\n\t\t`\n\t\texpect(p.parse(\"Jonah 1:1\").osi",
"end": 28611,
"score": 0.6981237530708313,
"start": 28608,
"tag": "NAME",
"value": "Jon"
},
{
"context": "l(\"Mal.1.1\")\n\t\t`\n\t\ttrue\ndescribe \"Localized book Matt (zu)\", ->\n\tp = {}\n\tbeforeEach ->\n\t\tp = new bcv_pa",
"end": 33880,
"score": 0.6844573616981506,
"start": 33877,
"tag": "NAME",
"value": "att"
},
{
"context": "\tp.include_apocrypha true\n\tit \"should handle book: Matt (zu)\", ->\n\t\t`\n\t\texpect(p.parse(\"NgokukaMathewu 1:",
"end": 34139,
"score": 0.9814304709434509,
"start": 34135,
"tag": "NAME",
"value": "Matt"
},
{
"context": "al(\"Mark.1.1\")\n\t\t`\n\t\ttrue\ndescribe \"Localized book Luke (zu)\", ->\n\tp = {}\n\tbeforeEach ->\n\t\tp = new bcv_pa",
"end": 35524,
"score": 0.9294874668121338,
"start": 35520,
"tag": "NAME",
"value": "Luke"
},
{
"context": "\tp.include_apocrypha true\n\tit \"should handle book: Luke (zu)\", ->\n\t\t`\n\t\texpect(p.parse(\"NgokukaLuka 1:1\")",
"end": 35783,
"score": 0.9444615244865417,
"start": 35779,
"tag": "NAME",
"value": "Luke"
},
{
"context": "p.include_apocrypha true\n\tit \"should handle book: 1John (zu)\", ->\n\t\t`\n\t\texpect(p.parse(\"1 kaJohane 1:",
"end": 36480,
"score": 0.838202714920044,
"start": 36479,
"tag": "NAME",
"value": "1"
},
{
"context": "(\"1John.1.1\")\n\t\t`\n\t\ttrue\ndescribe \"Localized book 2John (zu)\", ->\n\tp = {}\n\tbeforeEach ->\n\t\tp = new bc",
"end": 36935,
"score": 0.7914099097251892,
"start": 36934,
"tag": "NAME",
"value": "2"
},
{
"context": "l(\"3John.1.1\")\n\t\t`\n\t\ttrue\ndescribe \"Localized book John (zu)\", ->\n\tp = {}\n\tbeforeEach ->\n\t\tp = new bcv_pa",
"end": 38368,
"score": 0.9137840867042542,
"start": 38364,
"tag": "NAME",
"value": "John"
},
{
"context": "\tp.include_apocrypha true\n\tit \"should handle book: John (zu)\", ->\n\t\t`\n\t\texpect(p.parse(\"NgokukaJohane 1:1",
"end": 38627,
"score": 0.983789324760437,
"start": 38623,
"tag": "NAME",
"value": "John"
},
{
"context": "\tp.include_apocrypha true\n\tit \"should handle book: Phil (zu)\", ->\n\t\t`\n\t\texpect(p.parse(\"KwabaseFilipi 1:1",
"end": 43908,
"score": 0.9888197779655457,
"start": 43904,
"tag": "NAME",
"value": "Phil"
}
] | lib/bible-tools/lib/Bible-Passage-Reference-Parser/src/zu/spec.coffee | saiba-mais/bible-lessons | 0 | bcv_parser = require("../../js/zu_bcv_parser.js").bcv_parser
describe "Parsing", ->
p = {}
beforeEach ->
p = new bcv_parser
p.options.osis_compaction_strategy = "b"
p.options.sequence_combination_strategy = "combine"
it "should round-trip OSIS references", ->
p.set_options osis_compaction_strategy: "bc"
books = ["Gen","Exod","Lev","Num","Deut","Josh","Judg","Ruth","1Sam","2Sam","1Kgs","2Kgs","1Chr","2Chr","Ezra","Neh","Esth","Job","Ps","Prov","Eccl","Song","Isa","Jer","Lam","Ezek","Dan","Hos","Joel","Amos","Obad","Jonah","Mic","Nah","Hab","Zeph","Hag","Zech","Mal","Matt","Mark","Luke","John","Acts","Rom","1Cor","2Cor","Gal","Eph","Phil","Col","1Thess","2Thess","1Tim","2Tim","Titus","Phlm","Heb","Jas","1Pet","2Pet","1John","2John","3John","Jude","Rev"]
for book in books
bc = book + ".1"
bcv = bc + ".1"
bcv_range = bcv + "-" + bc + ".2"
expect(p.parse(bc).osis()).toEqual bc
expect(p.parse(bcv).osis()).toEqual bcv
expect(p.parse(bcv_range).osis()).toEqual bcv_range
it "should round-trip OSIS Apocrypha references", ->
p.set_options osis_compaction_strategy: "bc", ps151_strategy: "b"
p.include_apocrypha true
books = ["Tob","Jdt","GkEsth","Wis","Sir","Bar","PrAzar","Sus","Bel","SgThree","EpJer","1Macc","2Macc","3Macc","4Macc","1Esd","2Esd","PrMan","Ps151"]
for book in books
bc = book + ".1"
bcv = bc + ".1"
bcv_range = bcv + "-" + bc + ".2"
expect(p.parse(bc).osis()).toEqual bc
expect(p.parse(bcv).osis()).toEqual bcv
expect(p.parse(bcv_range).osis()).toEqual bcv_range
p.set_options ps151_strategy: "bc"
expect(p.parse("Ps151.1").osis()).toEqual "Ps.151"
expect(p.parse("Ps151.1.1").osis()).toEqual "Ps.151.1"
expect(p.parse("Ps151.1-Ps151.2").osis()).toEqual "Ps.151.1-Ps.151.2"
p.include_apocrypha false
for book in books
bc = book + ".1"
expect(p.parse(bc).osis()).toEqual ""
it "should handle a preceding character", ->
expect(p.parse(" Gen 1").osis()).toEqual "Gen.1"
expect(p.parse("Matt5John3").osis()).toEqual "Matt.5,John.3"
expect(p.parse("1Ps 1").osis()).toEqual ""
expect(p.parse("11Sam 1").osis()).toEqual ""
describe "Localized book Gen (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Gen (zu)", ->
`
expect(p.parse("UGenesise 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("Genesise 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("Gen 1:1").osis()).toEqual("Gen.1.1")
p.include_apocrypha(false)
expect(p.parse("UGENESISE 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("GENESISE 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("GEN 1:1").osis()).toEqual("Gen.1.1")
`
true
describe "Localized book Exod (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Exod (zu)", ->
`
expect(p.parse("U-Eksodusi 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("Eksodusi 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("Exod 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("Eks 1:1").osis()).toEqual("Exod.1.1")
p.include_apocrypha(false)
expect(p.parse("U-EKSODUSI 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("EKSODUSI 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("EXOD 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("EKS 1:1").osis()).toEqual("Exod.1.1")
`
true
describe "Localized book Bel (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Bel (zu)", ->
`
expect(p.parse("Bel 1:1").osis()).toEqual("Bel.1.1")
`
true
describe "Localized book Lev (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Lev (zu)", ->
`
expect(p.parse("ULevitikusi 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("Levitikusi 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("Levi 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("Lev 1:1").osis()).toEqual("Lev.1.1")
p.include_apocrypha(false)
expect(p.parse("ULEVITIKUSI 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("LEVITIKUSI 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("LEVI 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("LEV 1:1").osis()).toEqual("Lev.1.1")
`
true
describe "Localized book Num (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Num (zu)", ->
`
expect(p.parse("UNumeri 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("Numeri 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("Num 1:1").osis()).toEqual("Num.1.1")
p.include_apocrypha(false)
expect(p.parse("UNUMERI 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("NUMERI 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("NUM 1:1").osis()).toEqual("Num.1.1")
`
true
describe "Localized book Sir (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Sir (zu)", ->
`
expect(p.parse("Sir 1:1").osis()).toEqual("Sir.1.1")
`
true
describe "Localized book Wis (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Wis (zu)", ->
`
expect(p.parse("Wis 1:1").osis()).toEqual("Wis.1.1")
`
true
describe "Localized book Lam (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Lam (zu)", ->
`
expect(p.parse("IsiLilo 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("Lam 1:1").osis()).toEqual("Lam.1.1")
p.include_apocrypha(false)
expect(p.parse("ISILILO 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("LAM 1:1").osis()).toEqual("Lam.1.1")
`
true
describe "Localized book EpJer (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: EpJer (zu)", ->
`
expect(p.parse("EpJer 1:1").osis()).toEqual("EpJer.1.1")
`
true
describe "Localized book Rev (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Rev (zu)", ->
`
expect(p.parse("IsAmbulo 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("Isamb 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("Samb 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("Rev 1:1").osis()).toEqual("Rev.1.1")
p.include_apocrypha(false)
expect(p.parse("ISAMBULO 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("ISAMB 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("SAMB 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("REV 1:1").osis()).toEqual("Rev.1.1")
`
true
describe "Localized book PrMan (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: PrMan (zu)", ->
`
expect(p.parse("PrMan 1:1").osis()).toEqual("PrMan.1.1")
`
true
describe "Localized book Deut (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Deut (zu)", ->
`
expect(p.parse("UDuteronomi 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("Duteronomi 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("Duter 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("Deut 1:1").osis()).toEqual("Deut.1.1")
p.include_apocrypha(false)
expect(p.parse("UDUTERONOMI 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("DUTERONOMI 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("DUTER 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("DEUT 1:1").osis()).toEqual("Deut.1.1")
`
true
describe "Localized book Josh (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Josh (zu)", ->
`
expect(p.parse("UJoshuwa 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("Joshuwa 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("Josh 1:1").osis()).toEqual("Josh.1.1")
p.include_apocrypha(false)
expect(p.parse("UJOSHUWA 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("JOSHUWA 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("JOSH 1:1").osis()).toEqual("Josh.1.1")
`
true
describe "Localized book Judg (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Judg (zu)", ->
`
expect(p.parse("AbAhluleli 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("Judg 1:1").osis()).toEqual("Judg.1.1")
p.include_apocrypha(false)
expect(p.parse("ABAHLULELI 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("JUDG 1:1").osis()).toEqual("Judg.1.1")
`
true
describe "Localized book Ruth (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Ruth (zu)", ->
`
expect(p.parse("URuthe 1:1").osis()).toEqual("Ruth.1.1")
expect(p.parse("Ruthe 1:1").osis()).toEqual("Ruth.1.1")
expect(p.parse("Ruth 1:1").osis()).toEqual("Ruth.1.1")
p.include_apocrypha(false)
expect(p.parse("URUTHE 1:1").osis()).toEqual("Ruth.1.1")
expect(p.parse("RUTHE 1:1").osis()).toEqual("Ruth.1.1")
expect(p.parse("RUTH 1:1").osis()).toEqual("Ruth.1.1")
`
true
describe "Localized book 1Esd (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Esd (zu)", ->
`
expect(p.parse("1Esd 1:1").osis()).toEqual("1Esd.1.1")
`
true
describe "Localized book 2Esd (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Esd (zu)", ->
`
expect(p.parse("2Esd 1:1").osis()).toEqual("2Esd.1.1")
`
true
describe "Localized book Isa (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Isa (zu)", ->
`
expect(p.parse("U-Isaya 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("Isaya 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("Isa 1:1").osis()).toEqual("Isa.1.1")
p.include_apocrypha(false)
expect(p.parse("U-ISAYA 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("ISAYA 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("ISA 1:1").osis()).toEqual("Isa.1.1")
`
true
describe "Localized book 2Sam (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Sam (zu)", ->
`
expect(p.parse("2 uSamuweli 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 Samuweli 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 Sam 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2Sam 1:1").osis()).toEqual("2Sam.1.1")
p.include_apocrypha(false)
expect(p.parse("2 USAMUWELI 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 SAMUWELI 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 SAM 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2SAM 1:1").osis()).toEqual("2Sam.1.1")
`
true
describe "Localized book 1Sam (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Sam (zu)", ->
`
expect(p.parse("1 uSamuweli 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 Samuweli 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 Sam 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1Sam 1:1").osis()).toEqual("1Sam.1.1")
p.include_apocrypha(false)
expect(p.parse("1 USAMUWELI 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 SAMUWELI 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 SAM 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1SAM 1:1").osis()).toEqual("1Sam.1.1")
`
true
describe "Localized book 2Kgs (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Kgs (zu)", ->
`
expect(p.parse("2 AmaKhosi 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2Kgs 1:1").osis()).toEqual("2Kgs.1.1")
p.include_apocrypha(false)
expect(p.parse("2 AMAKHOSI 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2KGS 1:1").osis()).toEqual("2Kgs.1.1")
`
true
describe "Localized book 1Kgs (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Kgs (zu)", ->
`
expect(p.parse("1 AmaKhosi 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1Kgs 1:1").osis()).toEqual("1Kgs.1.1")
p.include_apocrypha(false)
expect(p.parse("1 AMAKHOSI 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1KGS 1:1").osis()).toEqual("1Kgs.1.1")
`
true
describe "Localized book 2Chr (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Chr (zu)", ->
`
expect(p.parse("2 IziKronike 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 iziKronike 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2Chr 1:1").osis()).toEqual("2Chr.1.1")
p.include_apocrypha(false)
expect(p.parse("2 IZIKRONIKE 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 IZIKRONIKE 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2CHR 1:1").osis()).toEqual("2Chr.1.1")
`
true
describe "Localized book 1Chr (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Chr (zu)", ->
`
expect(p.parse("1 IziKronike 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 iziKronike 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1Chr 1:1").osis()).toEqual("1Chr.1.1")
p.include_apocrypha(false)
expect(p.parse("1 IZIKRONIKE 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 IZIKRONIKE 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1CHR 1:1").osis()).toEqual("1Chr.1.1")
`
true
describe "Localized book Ezra (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Ezra (zu)", ->
`
expect(p.parse("U-Ezra 1:1").osis()).toEqual("Ezra.1.1")
expect(p.parse("Ezra 1:1").osis()).toEqual("Ezra.1.1")
p.include_apocrypha(false)
expect(p.parse("U-EZRA 1:1").osis()).toEqual("Ezra.1.1")
expect(p.parse("EZRA 1:1").osis()).toEqual("Ezra.1.1")
`
true
describe "Localized book Neh (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Neh (zu)", ->
`
expect(p.parse("UNehemiya 1:1").osis()).toEqual("Neh.1.1")
expect(p.parse("Nehemiya 1:1").osis()).toEqual("Neh.1.1")
expect(p.parse("Neh 1:1").osis()).toEqual("Neh.1.1")
p.include_apocrypha(false)
expect(p.parse("UNEHEMIYA 1:1").osis()).toEqual("Neh.1.1")
expect(p.parse("NEHEMIYA 1:1").osis()).toEqual("Neh.1.1")
expect(p.parse("NEH 1:1").osis()).toEqual("Neh.1.1")
`
true
describe "Localized book GkEsth (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: GkEsth (zu)", ->
`
expect(p.parse("GkEsth 1:1").osis()).toEqual("GkEsth.1.1")
`
true
describe "Localized book Esth (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Esth (zu)", ->
`
expect(p.parse("U-Esteri 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("Esteri 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("Esth 1:1").osis()).toEqual("Esth.1.1")
p.include_apocrypha(false)
expect(p.parse("U-ESTERI 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("ESTERI 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("ESTH 1:1").osis()).toEqual("Esth.1.1")
`
true
describe "Localized book Job (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Job (zu)", ->
`
expect(p.parse("UJobe 1:1").osis()).toEqual("Job.1.1")
expect(p.parse("Jobe 1:1").osis()).toEqual("Job.1.1")
expect(p.parse("Job 1:1").osis()).toEqual("Job.1.1")
p.include_apocrypha(false)
expect(p.parse("UJOBE 1:1").osis()).toEqual("Job.1.1")
expect(p.parse("JOBE 1:1").osis()).toEqual("Job.1.1")
expect(p.parse("JOB 1:1").osis()).toEqual("Job.1.1")
`
true
describe "Localized book Ps (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Ps (zu)", ->
`
expect(p.parse("AmaHubo 1:1").osis()).toEqual("Ps.1.1")
expect(p.parse("IHubo 1:1").osis()).toEqual("Ps.1.1")
expect(p.parse("Ps 1:1").osis()).toEqual("Ps.1.1")
p.include_apocrypha(false)
expect(p.parse("AMAHUBO 1:1").osis()).toEqual("Ps.1.1")
expect(p.parse("IHUBO 1:1").osis()).toEqual("Ps.1.1")
expect(p.parse("PS 1:1").osis()).toEqual("Ps.1.1")
`
true
describe "Localized book PrAzar (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: PrAzar (zu)", ->
`
expect(p.parse("PrAzar 1:1").osis()).toEqual("PrAzar.1.1")
`
true
describe "Localized book Prov (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Prov (zu)", ->
`
expect(p.parse("IzAga 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("Prov 1:1").osis()).toEqual("Prov.1.1")
p.include_apocrypha(false)
expect(p.parse("IZAGA 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("PROV 1:1").osis()).toEqual("Prov.1.1")
`
true
describe "Localized book Eccl (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Eccl (zu)", ->
`
expect(p.parse("UmShumayeli 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("Mshumayeli 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("Eccl 1:1").osis()).toEqual("Eccl.1.1")
p.include_apocrypha(false)
expect(p.parse("UMSHUMAYELI 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("MSHUMAYELI 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("ECCL 1:1").osis()).toEqual("Eccl.1.1")
`
true
describe "Localized book SgThree (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: SgThree (zu)", ->
`
expect(p.parse("SgThree 1:1").osis()).toEqual("SgThree.1.1")
`
true
describe "Localized book Song (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Song (zu)", ->
`
expect(p.parse("IsiHlabelelo SeziHlabelelo 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("IsiHlabelelo seziHlabelelo 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("Song 1:1").osis()).toEqual("Song.1.1")
p.include_apocrypha(false)
expect(p.parse("ISIHLABELELO SEZIHLABELELO 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("ISIHLABELELO SEZIHLABELELO 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("SONG 1:1").osis()).toEqual("Song.1.1")
`
true
describe "Localized book Jer (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Jer (zu)", ->
`
expect(p.parse("UJeremiya 1:1").osis()).toEqual("Jer.1.1")
expect(p.parse("Jeremiya 1:1").osis()).toEqual("Jer.1.1")
expect(p.parse("Jer 1:1").osis()).toEqual("Jer.1.1")
p.include_apocrypha(false)
expect(p.parse("UJEREMIYA 1:1").osis()).toEqual("Jer.1.1")
expect(p.parse("JEREMIYA 1:1").osis()).toEqual("Jer.1.1")
expect(p.parse("JER 1:1").osis()).toEqual("Jer.1.1")
`
true
describe "Localized book Ezek (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Ezek (zu)", ->
`
expect(p.parse("UHezekeli 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("Hezekeli 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("Ezek 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("Hez 1:1").osis()).toEqual("Ezek.1.1")
p.include_apocrypha(false)
expect(p.parse("UHEZEKELI 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("HEZEKELI 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("EZEK 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("HEZ 1:1").osis()).toEqual("Ezek.1.1")
`
true
describe "Localized book Dan (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Dan (zu)", ->
`
expect(p.parse("UDaniyeli 1:1").osis()).toEqual("Dan.1.1")
expect(p.parse("Daniyeli 1:1").osis()).toEqual("Dan.1.1")
expect(p.parse("Dan 1:1").osis()).toEqual("Dan.1.1")
p.include_apocrypha(false)
expect(p.parse("UDANIYELI 1:1").osis()).toEqual("Dan.1.1")
expect(p.parse("DANIYELI 1:1").osis()).toEqual("Dan.1.1")
expect(p.parse("DAN 1:1").osis()).toEqual("Dan.1.1")
`
true
describe "Localized book Hos (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Hos (zu)", ->
`
expect(p.parse("UHoseya 1:1").osis()).toEqual("Hos.1.1")
expect(p.parse("Hoseya 1:1").osis()).toEqual("Hos.1.1")
expect(p.parse("Hos 1:1").osis()).toEqual("Hos.1.1")
p.include_apocrypha(false)
expect(p.parse("UHOSEYA 1:1").osis()).toEqual("Hos.1.1")
expect(p.parse("HOSEYA 1:1").osis()).toEqual("Hos.1.1")
expect(p.parse("HOS 1:1").osis()).toEqual("Hos.1.1")
`
true
describe "Localized book Joel (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Joel (zu)", ->
`
expect(p.parse("UJoweli 1:1").osis()).toEqual("Joel.1.1")
expect(p.parse("Joweli 1:1").osis()).toEqual("Joel.1.1")
expect(p.parse("Joel 1:1").osis()).toEqual("Joel.1.1")
p.include_apocrypha(false)
expect(p.parse("UJOWELI 1:1").osis()).toEqual("Joel.1.1")
expect(p.parse("JOWELI 1:1").osis()).toEqual("Joel.1.1")
expect(p.parse("JOEL 1:1").osis()).toEqual("Joel.1.1")
`
true
describe "Localized book Amos (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Amos (zu)", ->
`
expect(p.parse("U-Amose 1:1").osis()).toEqual("Amos.1.1")
expect(p.parse("Amose 1:1").osis()).toEqual("Amos.1.1")
expect(p.parse("Amos 1:1").osis()).toEqual("Amos.1.1")
p.include_apocrypha(false)
expect(p.parse("U-AMOSE 1:1").osis()).toEqual("Amos.1.1")
expect(p.parse("AMOSE 1:1").osis()).toEqual("Amos.1.1")
expect(p.parse("AMOS 1:1").osis()).toEqual("Amos.1.1")
`
true
describe "Localized book Obad (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Obad (zu)", ->
`
expect(p.parse("U-Obadiya 1:1").osis()).toEqual("Obad.1.1")
expect(p.parse("Obadiya 1:1").osis()).toEqual("Obad.1.1")
expect(p.parse("Obad 1:1").osis()).toEqual("Obad.1.1")
p.include_apocrypha(false)
expect(p.parse("U-OBADIYA 1:1").osis()).toEqual("Obad.1.1")
expect(p.parse("OBADIYA 1:1").osis()).toEqual("Obad.1.1")
expect(p.parse("OBAD 1:1").osis()).toEqual("Obad.1.1")
`
true
describe "Localized book Jonah (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Jonah (zu)", ->
`
expect(p.parse("Jonah 1:1").osis()).toEqual("Jonah.1.1")
expect(p.parse("UJona 1:1").osis()).toEqual("Jonah.1.1")
expect(p.parse("Jona 1:1").osis()).toEqual("Jonah.1.1")
p.include_apocrypha(false)
expect(p.parse("JONAH 1:1").osis()).toEqual("Jonah.1.1")
expect(p.parse("UJONA 1:1").osis()).toEqual("Jonah.1.1")
expect(p.parse("JONA 1:1").osis()).toEqual("Jonah.1.1")
`
true
describe "Localized book Mic (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Mic (zu)", ->
`
expect(p.parse("UMika 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("Mika 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("Mic 1:1").osis()).toEqual("Mic.1.1")
p.include_apocrypha(false)
expect(p.parse("UMIKA 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("MIKA 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("MIC 1:1").osis()).toEqual("Mic.1.1")
`
true
describe "Localized book Nah (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Nah (zu)", ->
`
expect(p.parse("UNahume 1:1").osis()).toEqual("Nah.1.1")
expect(p.parse("Nahume 1:1").osis()).toEqual("Nah.1.1")
expect(p.parse("Nah 1:1").osis()).toEqual("Nah.1.1")
p.include_apocrypha(false)
expect(p.parse("UNAHUME 1:1").osis()).toEqual("Nah.1.1")
expect(p.parse("NAHUME 1:1").osis()).toEqual("Nah.1.1")
expect(p.parse("NAH 1:1").osis()).toEqual("Nah.1.1")
`
true
describe "Localized book Hab (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Hab (zu)", ->
`
expect(p.parse("UHabakuki 1:1").osis()).toEqual("Hab.1.1")
expect(p.parse("Habakuki 1:1").osis()).toEqual("Hab.1.1")
expect(p.parse("Hab 1:1").osis()).toEqual("Hab.1.1")
p.include_apocrypha(false)
expect(p.parse("UHABAKUKI 1:1").osis()).toEqual("Hab.1.1")
expect(p.parse("HABAKUKI 1:1").osis()).toEqual("Hab.1.1")
expect(p.parse("HAB 1:1").osis()).toEqual("Hab.1.1")
`
true
describe "Localized book Zeph (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Zeph (zu)", ->
`
expect(p.parse("UZefaniya 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("Zefaniya 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("Zeph 1:1").osis()).toEqual("Zeph.1.1")
p.include_apocrypha(false)
expect(p.parse("UZEFANIYA 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("ZEFANIYA 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("ZEPH 1:1").osis()).toEqual("Zeph.1.1")
`
true
describe "Localized book Hag (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Hag (zu)", ->
`
expect(p.parse("UHagayi 1:1").osis()).toEqual("Hag.1.1")
expect(p.parse("Hagayi 1:1").osis()).toEqual("Hag.1.1")
expect(p.parse("Hag 1:1").osis()).toEqual("Hag.1.1")
p.include_apocrypha(false)
expect(p.parse("UHAGAYI 1:1").osis()).toEqual("Hag.1.1")
expect(p.parse("HAGAYI 1:1").osis()).toEqual("Hag.1.1")
expect(p.parse("HAG 1:1").osis()).toEqual("Hag.1.1")
`
true
describe "Localized book Zech (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Zech (zu)", ->
`
expect(p.parse("UZakariya 1:1").osis()).toEqual("Zech.1.1")
expect(p.parse("Zakariya 1:1").osis()).toEqual("Zech.1.1")
expect(p.parse("Zech 1:1").osis()).toEqual("Zech.1.1")
p.include_apocrypha(false)
expect(p.parse("UZAKARIYA 1:1").osis()).toEqual("Zech.1.1")
expect(p.parse("ZAKARIYA 1:1").osis()).toEqual("Zech.1.1")
expect(p.parse("ZECH 1:1").osis()).toEqual("Zech.1.1")
`
true
describe "Localized book Mal (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Mal (zu)", ->
`
expect(p.parse("UMalaki 1:1").osis()).toEqual("Mal.1.1")
expect(p.parse("Malaki 1:1").osis()).toEqual("Mal.1.1")
expect(p.parse("Mal 1:1").osis()).toEqual("Mal.1.1")
p.include_apocrypha(false)
expect(p.parse("UMALAKI 1:1").osis()).toEqual("Mal.1.1")
expect(p.parse("MALAKI 1:1").osis()).toEqual("Mal.1.1")
expect(p.parse("MAL 1:1").osis()).toEqual("Mal.1.1")
`
true
describe "Localized book Matt (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Matt (zu)", ->
`
expect(p.parse("NgokukaMathewu 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("Mathewu 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("Math 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("Matt 1:1").osis()).toEqual("Matt.1.1")
p.include_apocrypha(false)
expect(p.parse("NGOKUKAMATHEWU 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("MATHEWU 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("MATH 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("MATT 1:1").osis()).toEqual("Matt.1.1")
`
true
describe "Localized book Mark (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Mark (zu)", ->
`
expect(p.parse("NgokukaMarku 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("Marko 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("Marku 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("Mark 1:1").osis()).toEqual("Mark.1.1")
p.include_apocrypha(false)
expect(p.parse("NGOKUKAMARKU 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("MARKO 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("MARKU 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("MARK 1:1").osis()).toEqual("Mark.1.1")
`
true
describe "Localized book Luke (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Luke (zu)", ->
`
expect(p.parse("NgokukaLuka 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("Luka 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("Luke 1:1").osis()).toEqual("Luke.1.1")
p.include_apocrypha(false)
expect(p.parse("NGOKUKALUKA 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("LUKA 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("LUKE 1:1").osis()).toEqual("Luke.1.1")
`
true
describe "Localized book 1John (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1John (zu)", ->
`
expect(p.parse("1 kaJohane 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 Johane 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1John 1:1").osis()).toEqual("1John.1.1")
p.include_apocrypha(false)
expect(p.parse("1 KAJOHANE 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 JOHANE 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1JOHN 1:1").osis()).toEqual("1John.1.1")
`
true
describe "Localized book 2John (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2John (zu)", ->
`
expect(p.parse("2 kaJohane 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 Johane 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2John 1:1").osis()).toEqual("2John.1.1")
p.include_apocrypha(false)
expect(p.parse("2 KAJOHANE 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 JOHANE 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2JOHN 1:1").osis()).toEqual("2John.1.1")
`
true
describe "Localized book 3John (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 3John (zu)", ->
`
expect(p.parse("3 kaJohane 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 Johane 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3John 1:1").osis()).toEqual("3John.1.1")
p.include_apocrypha(false)
expect(p.parse("3 KAJOHANE 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 JOHANE 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3JOHN 1:1").osis()).toEqual("3John.1.1")
`
true
describe "Localized book John (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: John (zu)", ->
`
expect(p.parse("NgokukaJohane 1:1").osis()).toEqual("John.1.1")
expect(p.parse("Johane 1:1").osis()).toEqual("John.1.1")
expect(p.parse("John 1:1").osis()).toEqual("John.1.1")
p.include_apocrypha(false)
expect(p.parse("NGOKUKAJOHANE 1:1").osis()).toEqual("John.1.1")
expect(p.parse("JOHANE 1:1").osis()).toEqual("John.1.1")
expect(p.parse("JOHN 1:1").osis()).toEqual("John.1.1")
`
true
describe "Localized book Acts (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Acts (zu)", ->
`
expect(p.parse("IZenzo 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("IzEnzo 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("Acts 1:1").osis()).toEqual("Acts.1.1")
p.include_apocrypha(false)
expect(p.parse("IZENZO 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("IZENZO 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("ACTS 1:1").osis()).toEqual("Acts.1.1")
`
true
describe "Localized book Rom (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Rom (zu)", ->
`
expect(p.parse("KwabaseRoma 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("AmaRoma 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("Roma 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("Rom 1:1").osis()).toEqual("Rom.1.1")
p.include_apocrypha(false)
expect(p.parse("KWABASEROMA 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("AMAROMA 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROMA 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROM 1:1").osis()).toEqual("Rom.1.1")
`
true
describe "Localized book 2Cor (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Cor (zu)", ->
`
expect(p.parse("2 kwabaseKorinte 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 Korinte 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 Kor 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2Cor 1:1").osis()).toEqual("2Cor.1.1")
p.include_apocrypha(false)
expect(p.parse("2 KWABASEKORINTE 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KORINTE 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KOR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2COR 1:1").osis()).toEqual("2Cor.1.1")
`
true
describe "Localized book 1Cor (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Cor (zu)", ->
`
expect(p.parse("1 kwabaseKorinte 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 Korinte 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 Kor 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1Cor 1:1").osis()).toEqual("1Cor.1.1")
p.include_apocrypha(false)
expect(p.parse("1 KWABASEKORINTE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KORINTE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KOR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1COR 1:1").osis()).toEqual("1Cor.1.1")
`
true
describe "Localized book Gal (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Gal (zu)", ->
`
expect(p.parse("KwabaseGalathiya 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("Galathiya 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("Gal 1:1").osis()).toEqual("Gal.1.1")
p.include_apocrypha(false)
expect(p.parse("KWABASEGALATHIYA 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALATHIYA 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GAL 1:1").osis()).toEqual("Gal.1.1")
`
true
describe "Localized book Eph (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Eph (zu)", ->
`
expect(p.parse("Kwabase-Efesu 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("Efesu 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("Eph 1:1").osis()).toEqual("Eph.1.1")
p.include_apocrypha(false)
expect(p.parse("KWABASE-EFESU 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EFESU 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EPH 1:1").osis()).toEqual("Eph.1.1")
`
true
describe "Localized book Phil (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Phil (zu)", ->
`
expect(p.parse("KwabaseFilipi 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("Filipi 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("Phil 1:1").osis()).toEqual("Phil.1.1")
p.include_apocrypha(false)
expect(p.parse("KWABASEFILIPI 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("FILIPI 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("PHIL 1:1").osis()).toEqual("Phil.1.1")
`
true
describe "Localized book Col (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Col (zu)", ->
`
expect(p.parse("KwabaseKolose 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("Kolose 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("Col 1:1").osis()).toEqual("Col.1.1")
p.include_apocrypha(false)
expect(p.parse("KWABASEKOLOSE 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KOLOSE 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("COL 1:1").osis()).toEqual("Col.1.1")
`
true
describe "Localized book 2Thess (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Thess (zu)", ->
`
expect(p.parse("2 kwabaseThesalonika 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 Thesalonika 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2Thess 1:1").osis()).toEqual("2Thess.1.1")
p.include_apocrypha(false)
expect(p.parse("2 KWABASETHESALONIKA 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 THESALONIKA 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2THESS 1:1").osis()).toEqual("2Thess.1.1")
`
true
describe "Localized book 1Thess (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Thess (zu)", ->
`
expect(p.parse("1 kwabaseThesalonika 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 Thesalonika 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1Thess 1:1").osis()).toEqual("1Thess.1.1")
p.include_apocrypha(false)
expect(p.parse("1 KWABASETHESALONIKA 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 THESALONIKA 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1THESS 1:1").osis()).toEqual("1Thess.1.1")
`
true
describe "Localized book 2Tim (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Tim (zu)", ->
`
expect(p.parse("2 kuThimothewu 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 Thimothewu 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2Tim 1:1").osis()).toEqual("2Tim.1.1")
p.include_apocrypha(false)
expect(p.parse("2 KUTHIMOTHEWU 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 THIMOTHEWU 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2TIM 1:1").osis()).toEqual("2Tim.1.1")
`
true
describe "Localized book 1Tim (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Tim (zu)", ->
`
expect(p.parse("1 kuThimothewu 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 Thimothewu 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1Tim 1:1").osis()).toEqual("1Tim.1.1")
p.include_apocrypha(false)
expect(p.parse("1 KUTHIMOTHEWU 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 THIMOTHEWU 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1TIM 1:1").osis()).toEqual("1Tim.1.1")
`
true
describe "Localized book Titus (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Titus (zu)", ->
`
expect(p.parse("KuThithu 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("Titus 1:1").osis()).toEqual("Titus.1.1")
p.include_apocrypha(false)
expect(p.parse("KUTHITHU 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("TITUS 1:1").osis()).toEqual("Titus.1.1")
`
true
describe "Localized book Phlm (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Phlm (zu)", ->
`
expect(p.parse("KuFilemoni 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("Phlm 1:1").osis()).toEqual("Phlm.1.1")
p.include_apocrypha(false)
expect(p.parse("KUFILEMONI 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("PHLM 1:1").osis()).toEqual("Phlm.1.1")
`
true
describe "Localized book Heb (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Heb (zu)", ->
`
expect(p.parse("KumaHeberu 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("Heb 1:1").osis()).toEqual("Heb.1.1")
p.include_apocrypha(false)
expect(p.parse("KUMAHEBERU 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("HEB 1:1").osis()).toEqual("Heb.1.1")
`
true
describe "Localized book Jas (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Jas (zu)", ->
`
expect(p.parse("EkaJakobe 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("Jakobe 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("Jas 1:1").osis()).toEqual("Jas.1.1")
p.include_apocrypha(false)
expect(p.parse("EKAJAKOBE 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("JAKOBE 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("JAS 1:1").osis()).toEqual("Jas.1.1")
`
true
describe "Localized book 2Pet (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Pet (zu)", ->
`
expect(p.parse("2 kaPetru 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 Petru 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2Pet 1:1").osis()).toEqual("2Pet.1.1")
p.include_apocrypha(false)
expect(p.parse("2 KAPETRU 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 PETRU 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2PET 1:1").osis()).toEqual("2Pet.1.1")
`
true
describe "Localized book 1Pet (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Pet (zu)", ->
`
expect(p.parse("1 kaPetru 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 Petru 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1Pet 1:1").osis()).toEqual("1Pet.1.1")
p.include_apocrypha(false)
expect(p.parse("1 KAPETRU 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 PETRU 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1PET 1:1").osis()).toEqual("1Pet.1.1")
`
true
describe "Localized book Jude (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Jude (zu)", ->
`
expect(p.parse("EkaJuda 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("Jude 1:1").osis()).toEqual("Jude.1.1")
p.include_apocrypha(false)
expect(p.parse("EKAJUDA 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("JUDE 1:1").osis()).toEqual("Jude.1.1")
`
true
describe "Localized book Tob (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Tob (zu)", ->
`
expect(p.parse("Tob 1:1").osis()).toEqual("Tob.1.1")
`
true
describe "Localized book Jdt (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Jdt (zu)", ->
`
expect(p.parse("Jdt 1:1").osis()).toEqual("Jdt.1.1")
`
true
describe "Localized book Bar (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Bar (zu)", ->
`
expect(p.parse("Bar 1:1").osis()).toEqual("Bar.1.1")
`
true
describe "Localized book Sus (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Sus (zu)", ->
`
expect(p.parse("Sus 1:1").osis()).toEqual("Sus.1.1")
`
true
describe "Localized book 2Macc (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Macc (zu)", ->
`
expect(p.parse("2Macc 1:1").osis()).toEqual("2Macc.1.1")
`
true
describe "Localized book 3Macc (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 3Macc (zu)", ->
`
expect(p.parse("3Macc 1:1").osis()).toEqual("3Macc.1.1")
`
true
describe "Localized book 4Macc (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 4Macc (zu)", ->
`
expect(p.parse("4Macc 1:1").osis()).toEqual("4Macc.1.1")
`
true
describe "Localized book 1Macc (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Macc (zu)", ->
`
expect(p.parse("1Macc 1:1").osis()).toEqual("1Macc.1.1")
`
true
describe "Miscellaneous tests", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore", book_sequence_strategy: "ignore", osis_compaction_strategy: "bc", captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should return the expected language", ->
expect(p.languages).toEqual ["zu"]
it "should handle ranges (zu)", ->
expect(p.parse("Titus 1:1 kuye 2").osis()).toEqual "Titus.1.1-Titus.1.2"
expect(p.parse("Matt 1kuye2").osis()).toEqual "Matt.1-Matt.2"
expect(p.parse("Phlm 2 KUYE 3").osis()).toEqual "Phlm.1.2-Phlm.1.3"
it "should handle chapters (zu)", ->
expect(p.parse("Titus 1:1, isahluko 2").osis()).toEqual "Titus.1.1,Titus.2"
expect(p.parse("Matt 3:4 ISAHLUKO 6").osis()).toEqual "Matt.3.4,Matt.6"
it "should handle verses (zu)", ->
expect(p.parse("Exod 1:1 ivesi 3").osis()).toEqual "Exod.1.1,Exod.1.3"
expect(p.parse("Phlm IVESI 6").osis()).toEqual "Phlm.1.6"
it "should handle 'and' (zu)", ->
expect(p.parse("Exod 1:1 futhi 3").osis()).toEqual "Exod.1.1,Exod.1.3"
expect(p.parse("Phlm 2 FUTHI 6").osis()).toEqual "Phlm.1.2,Phlm.1.6"
it "should handle titles (zu)", ->
expect(p.parse("Ps 3 title, 4:2, 5:title").osis()).toEqual "Ps.3.1,Ps.4.2,Ps.5.1"
expect(p.parse("PS 3 TITLE, 4:2, 5:TITLE").osis()).toEqual "Ps.3.1,Ps.4.2,Ps.5.1"
it "should handle 'ff' (zu)", ->
expect(p.parse("Rev 3ff, 4:2ff").osis()).toEqual "Rev.3-Rev.22,Rev.4.2-Rev.4.11"
expect(p.parse("REV 3 FF, 4:2 FF").osis()).toEqual "Rev.3-Rev.22,Rev.4.2-Rev.4.11"
it "should handle translations (zu)", ->
expect(p.parse("Lev 1 (zul59)").osis_and_translations()).toEqual [["Lev.1", "zul59"]]
expect(p.parse("lev 1 zul59").osis_and_translations()).toEqual [["Lev.1", "zul59"]]
it "should handle boundaries (zu)", ->
p.set_options {book_alone_strategy: "full"}
expect(p.parse("\u2014Matt\u2014").osis()).toEqual "Matt.1-Matt.28"
expect(p.parse("\u201cMatt 1:1\u201d").osis()).toEqual "Matt.1.1"
| 14142 | bcv_parser = require("../../js/zu_bcv_parser.js").bcv_parser
describe "Parsing", ->
p = {}
beforeEach ->
p = new bcv_parser
p.options.osis_compaction_strategy = "b"
p.options.sequence_combination_strategy = "combine"
it "should round-trip OSIS references", ->
p.set_options osis_compaction_strategy: "bc"
books = ["Gen","Exod","Lev","Num","Deut","Josh","Judg","Ruth","1Sam","2Sam","1Kgs","2Kgs","1Chr","2Chr","Ezra","Neh","Esth","Job","Ps","Prov","Eccl","Song","Isa","Jer","Lam","Ezek","<NAME>","<NAME>","<NAME>","<NAME>mos","<NAME>ad","<NAME>","<NAME>","<NAME>ah","Hab","Zeph","Hag","Zech","<NAME>","<NAME>","<NAME>","<NAME>","<NAME>","Acts","Rom","1Cor","2Cor","Gal","Eph","Phil","Col","1Thess","2Thess","1Tim","2Tim","Titus","Phlm","Heb","Jas","1Pet","2Pet","1John","2John","3John","Jude","Rev"]
for book in books
bc = book + ".1"
bcv = bc + ".1"
bcv_range = bcv + "-" + bc + ".2"
expect(p.parse(bc).osis()).toEqual bc
expect(p.parse(bcv).osis()).toEqual bcv
expect(p.parse(bcv_range).osis()).toEqual bcv_range
it "should round-trip OSIS Apocrypha references", ->
p.set_options osis_compaction_strategy: "bc", ps151_strategy: "b"
p.include_apocrypha true
books = ["Tob","Jdt","GkEsth","Wis","Sir","Bar","PrAzar","Sus","Bel","SgThree","EpJer","1Macc","2Macc","3Macc","4Macc","1Esd","2Esd","PrMan","Ps151"]
for book in books
bc = book + ".1"
bcv = bc + ".1"
bcv_range = bcv + "-" + bc + ".2"
expect(p.parse(bc).osis()).toEqual bc
expect(p.parse(bcv).osis()).toEqual bcv
expect(p.parse(bcv_range).osis()).toEqual bcv_range
p.set_options ps151_strategy: "bc"
expect(p.parse("Ps151.1").osis()).toEqual "Ps.151"
expect(p.parse("Ps151.1.1").osis()).toEqual "Ps.151.1"
expect(p.parse("Ps151.1-Ps151.2").osis()).toEqual "Ps.151.1-Ps.151.2"
p.include_apocrypha false
for book in books
bc = book + ".1"
expect(p.parse(bc).osis()).toEqual ""
it "should handle a preceding character", ->
expect(p.parse(" Gen 1").osis()).toEqual "Gen.1"
expect(p.parse("Matt5John3").osis()).toEqual "Matt.5,John.3"
expect(p.parse("1Ps 1").osis()).toEqual ""
expect(p.parse("11Sam 1").osis()).toEqual ""
describe "Localized book Gen (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Gen (zu)", ->
`
expect(p.parse("UGenesise 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("Genesise 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("Gen 1:1").osis()).toEqual("Gen.1.1")
p.include_apocrypha(false)
expect(p.parse("UGENESISE 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("GENESISE 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("GEN 1:1").osis()).toEqual("Gen.1.1")
`
true
describe "Localized book Exod (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Exod (zu)", ->
`
expect(p.parse("U-Eksodusi 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("Eksodusi 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("Exod 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("Eks 1:1").osis()).toEqual("Exod.1.1")
p.include_apocrypha(false)
expect(p.parse("U-EKSODUSI 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("EKSODUSI 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("EXOD 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("EKS 1:1").osis()).toEqual("Exod.1.1")
`
true
describe "Localized book Bel (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Bel (zu)", ->
`
expect(p.parse("Bel 1:1").osis()).toEqual("Bel.1.1")
`
true
describe "Localized book Lev (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Lev (zu)", ->
`
expect(p.parse("ULevitikusi 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("Levitikusi 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("Levi 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("Lev 1:1").osis()).toEqual("Lev.1.1")
p.include_apocrypha(false)
expect(p.parse("ULEVITIKUSI 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("LEVITIKUSI 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("LEVI 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("LEV 1:1").osis()).toEqual("Lev.1.1")
`
true
describe "Localized book Num (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Num (zu)", ->
`
expect(p.parse("UNumeri 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("Numeri 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("Num 1:1").osis()).toEqual("Num.1.1")
p.include_apocrypha(false)
expect(p.parse("UNUMERI 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("NUMERI 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("NUM 1:1").osis()).toEqual("Num.1.1")
`
true
describe "Localized book Sir (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Sir (zu)", ->
`
expect(p.parse("Sir 1:1").osis()).toEqual("Sir.1.1")
`
true
describe "Localized book Wis (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Wis (zu)", ->
`
expect(p.parse("Wis 1:1").osis()).toEqual("Wis.1.1")
`
true
describe "Localized book Lam (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Lam (zu)", ->
`
expect(p.parse("IsiLilo 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("Lam 1:1").osis()).toEqual("Lam.1.1")
p.include_apocrypha(false)
expect(p.parse("ISILILO 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("LAM 1:1").osis()).toEqual("Lam.1.1")
`
true
describe "Localized book EpJer (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: EpJer (zu)", ->
`
expect(p.parse("EpJer 1:1").osis()).toEqual("EpJer.1.1")
`
true
describe "Localized book Rev (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Rev (zu)", ->
`
expect(p.parse("IsAmbulo 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("Isamb 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("Samb 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("Rev 1:1").osis()).toEqual("Rev.1.1")
p.include_apocrypha(false)
expect(p.parse("ISAMBULO 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("ISAMB 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("SAMB 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("REV 1:1").osis()).toEqual("Rev.1.1")
`
true
describe "Localized book PrMan (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: PrMan (zu)", ->
`
expect(p.parse("PrMan 1:1").osis()).toEqual("PrMan.1.1")
`
true
describe "Localized book Deut (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Deut (zu)", ->
`
expect(p.parse("UDuteronomi 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("Duteronomi 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("Duter 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("Deut 1:1").osis()).toEqual("Deut.1.1")
p.include_apocrypha(false)
expect(p.parse("UDUTERONOMI 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("DUTERONOMI 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("DUTER 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("DEUT 1:1").osis()).toEqual("Deut.1.1")
`
true
describe "Localized book <NAME>osh (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: <NAME>osh (zu)", ->
`
expect(p.parse("UJoshuwa 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("Joshuwa 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("Josh 1:1").osis()).toEqual("Josh.1.1")
p.include_apocrypha(false)
expect(p.parse("UJOSHUWA 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("JOSHUWA 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("JOSH 1:1").osis()).toEqual("Josh.1.1")
`
true
describe "Localized book Judg (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Judg (zu)", ->
`
expect(p.parse("AbAhluleli 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("Judg 1:1").osis()).toEqual("Judg.1.1")
p.include_apocrypha(false)
expect(p.parse("ABAHLULELI 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("JUDG 1:1").osis()).toEqual("Judg.1.1")
`
true
describe "Localized book Ruth (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Ruth (zu)", ->
`
expect(p.parse("URuthe 1:1").osis()).toEqual("Ruth.1.1")
expect(p.parse("Ruthe 1:1").osis()).toEqual("Ruth.1.1")
expect(p.parse("Ruth 1:1").osis()).toEqual("Ruth.1.1")
p.include_apocrypha(false)
expect(p.parse("URUTHE 1:1").osis()).toEqual("Ruth.1.1")
expect(p.parse("RUTHE 1:1").osis()).toEqual("Ruth.1.1")
expect(p.parse("RUTH 1:1").osis()).toEqual("Ruth.1.1")
`
true
describe "Localized book 1Esd (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Esd (zu)", ->
`
expect(p.parse("1Esd 1:1").osis()).toEqual("1Esd.1.1")
`
true
describe "Localized book 2Esd (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Esd (zu)", ->
`
expect(p.parse("2Esd 1:1").osis()).toEqual("2Esd.1.1")
`
true
describe "Localized book Isa (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Isa (zu)", ->
`
expect(p.parse("U-Isaya 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("Isaya 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("Isa 1:1").osis()).toEqual("Isa.1.1")
p.include_apocrypha(false)
expect(p.parse("U-ISAYA 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("ISAYA 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("ISA 1:1").osis()).toEqual("Isa.1.1")
`
true
describe "Localized book 2Sam (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Sam (zu)", ->
`
expect(p.parse("2 uSamuweli 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 Samuweli 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 Sam 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2Sam 1:1").osis()).toEqual("2Sam.1.1")
p.include_apocrypha(false)
expect(p.parse("2 USAMUWELI 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 SAMUWELI 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 SAM 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2SAM 1:1").osis()).toEqual("2Sam.1.1")
`
true
describe "Localized book 1Sam (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Sam (zu)", ->
`
expect(p.parse("1 uSamuweli 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 Samuweli 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 Sam 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1Sam 1:1").osis()).toEqual("1Sam.1.1")
p.include_apocrypha(false)
expect(p.parse("1 USAMUWELI 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 SAMUWELI 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 SAM 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1SAM 1:1").osis()).toEqual("1Sam.1.1")
`
true
describe "Localized book 2Kgs (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Kgs (zu)", ->
`
expect(p.parse("2 AmaKhosi 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2Kgs 1:1").osis()).toEqual("2Kgs.1.1")
p.include_apocrypha(false)
expect(p.parse("2 AMAKHOSI 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2KGS 1:1").osis()).toEqual("2Kgs.1.1")
`
true
describe "Localized book 1Kgs (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Kgs (zu)", ->
`
expect(p.parse("1 AmaKhosi 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1Kgs 1:1").osis()).toEqual("1Kgs.1.1")
p.include_apocrypha(false)
expect(p.parse("1 AMAKHOSI 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1KGS 1:1").osis()).toEqual("1Kgs.1.1")
`
true
describe "Localized book 2Chr (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Chr (zu)", ->
`
expect(p.parse("2 IziKronike 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 iziKronike 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2Chr 1:1").osis()).toEqual("2Chr.1.1")
p.include_apocrypha(false)
expect(p.parse("2 IZIKRONIKE 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 IZIKRONIKE 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2CHR 1:1").osis()).toEqual("2Chr.1.1")
`
true
describe "Localized book 1Chr (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Chr (zu)", ->
`
expect(p.parse("1 IziKronike 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 iziKronike 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1Chr 1:1").osis()).toEqual("1Chr.1.1")
p.include_apocrypha(false)
expect(p.parse("1 IZIKRONIKE 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 IZIKRONIKE 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1CHR 1:1").osis()).toEqual("1Chr.1.1")
`
true
describe "Localized book Ezra (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Ezra (zu)", ->
`
expect(p.parse("U-Ezra 1:1").osis()).toEqual("Ezra.1.1")
expect(p.parse("Ezra 1:1").osis()).toEqual("Ezra.1.1")
p.include_apocrypha(false)
expect(p.parse("U-EZRA 1:1").osis()).toEqual("Ezra.1.1")
expect(p.parse("EZRA 1:1").osis()).toEqual("Ezra.1.1")
`
true
describe "Localized book Neh (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Neh (zu)", ->
`
expect(p.parse("UNehemiya 1:1").osis()).toEqual("Neh.1.1")
expect(p.parse("Nehemiya 1:1").osis()).toEqual("Neh.1.1")
expect(p.parse("Neh 1:1").osis()).toEqual("Neh.1.1")
p.include_apocrypha(false)
expect(p.parse("UNEHEMIYA 1:1").osis()).toEqual("Neh.1.1")
expect(p.parse("NEHEMIYA 1:1").osis()).toEqual("Neh.1.1")
expect(p.parse("NEH 1:1").osis()).toEqual("Neh.1.1")
`
true
describe "Localized book GkEsth (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: GkEsth (zu)", ->
`
expect(p.parse("GkEsth 1:1").osis()).toEqual("GkEsth.1.1")
`
true
describe "Localized book Esth (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Esth (zu)", ->
`
expect(p.parse("U-Esteri 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("Esteri 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("Esth 1:1").osis()).toEqual("Esth.1.1")
p.include_apocrypha(false)
expect(p.parse("U-ESTERI 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("ESTERI 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("ESTH 1:1").osis()).toEqual("Esth.1.1")
`
true
describe "Localized book Job (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Job (zu)", ->
`
expect(p.parse("UJobe 1:1").osis()).toEqual("Job.1.1")
expect(p.parse("Jobe 1:1").osis()).toEqual("Job.1.1")
expect(p.parse("Job 1:1").osis()).toEqual("Job.1.1")
p.include_apocrypha(false)
expect(p.parse("UJOBE 1:1").osis()).toEqual("Job.1.1")
expect(p.parse("JOBE 1:1").osis()).toEqual("Job.1.1")
expect(p.parse("JOB 1:1").osis()).toEqual("Job.1.1")
`
true
describe "Localized book Ps (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Ps (zu)", ->
`
expect(p.parse("AmaHubo 1:1").osis()).toEqual("Ps.1.1")
expect(p.parse("IHubo 1:1").osis()).toEqual("Ps.1.1")
expect(p.parse("Ps 1:1").osis()).toEqual("Ps.1.1")
p.include_apocrypha(false)
expect(p.parse("AMAHUBO 1:1").osis()).toEqual("Ps.1.1")
expect(p.parse("IHUBO 1:1").osis()).toEqual("Ps.1.1")
expect(p.parse("PS 1:1").osis()).toEqual("Ps.1.1")
`
true
describe "Localized book PrAzar (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: PrAzar (zu)", ->
`
expect(p.parse("PrAzar 1:1").osis()).toEqual("PrAzar.1.1")
`
true
describe "Localized book Prov (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Prov (zu)", ->
`
expect(p.parse("IzAga 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("Prov 1:1").osis()).toEqual("Prov.1.1")
p.include_apocrypha(false)
expect(p.parse("IZAGA 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("PROV 1:1").osis()).toEqual("Prov.1.1")
`
true
describe "Localized book Eccl (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Eccl (zu)", ->
`
expect(p.parse("UmShumayeli 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("Mshumayeli 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("Eccl 1:1").osis()).toEqual("Eccl.1.1")
p.include_apocrypha(false)
expect(p.parse("UMSHUMAYELI 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("MSHUMAYELI 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("ECCL 1:1").osis()).toEqual("Eccl.1.1")
`
true
describe "Localized book SgThree (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: SgThree (zu)", ->
`
expect(p.parse("SgThree 1:1").osis()).toEqual("SgThree.1.1")
`
true
describe "Localized book Song (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Song (zu)", ->
`
expect(p.parse("IsiHlabelelo SeziHlabelelo 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("IsiHlabelelo seziHlabelelo 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("Song 1:1").osis()).toEqual("Song.1.1")
p.include_apocrypha(false)
expect(p.parse("ISIHLABELELO SEZIHLABELELO 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("ISIHLABELELO SEZIHLABELELO 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("SONG 1:1").osis()).toEqual("Song.1.1")
`
true
describe "Localized book Jer (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Jer (zu)", ->
`
expect(p.parse("UJeremiya 1:1").osis()).toEqual("Jer.1.1")
expect(p.parse("Jeremiya 1:1").osis()).toEqual("Jer.1.1")
expect(p.parse("Jer 1:1").osis()).toEqual("Jer.1.1")
p.include_apocrypha(false)
expect(p.parse("UJEREMIYA 1:1").osis()).toEqual("Jer.1.1")
expect(p.parse("JEREMIYA 1:1").osis()).toEqual("Jer.1.1")
expect(p.parse("JER 1:1").osis()).toEqual("Jer.1.1")
`
true
describe "Localized book Ezek (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Ezek (zu)", ->
`
expect(p.parse("UHezekeli 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("Hezekeli 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("Ezek 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("Hez 1:1").osis()).toEqual("Ezek.1.1")
p.include_apocrypha(false)
expect(p.parse("UHEZEKELI 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("HEZEKELI 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("EZEK 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("HEZ 1:1").osis()).toEqual("Ezek.1.1")
`
true
describe "Localized book <NAME> (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: <NAME> (zu)", ->
`
expect(p.parse("UDaniyeli 1:1").osis()).toEqual("Dan.1.1")
expect(p.parse("Daniyeli 1:1").osis()).toEqual("Dan.1.1")
expect(p.parse("Dan 1:1").osis()).toEqual("Dan.1.1")
p.include_apocrypha(false)
expect(p.parse("UDANIYELI 1:1").osis()).toEqual("Dan.1.1")
expect(p.parse("DANIYELI 1:1").osis()).toEqual("Dan.1.1")
expect(p.parse("DAN 1:1").osis()).toEqual("Dan.1.1")
`
true
describe "Localized book Hos (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Hos (zu)", ->
`
expect(p.parse("UHoseya 1:1").osis()).toEqual("Hos.1.1")
expect(p.parse("Hoseya 1:1").osis()).toEqual("Hos.1.1")
expect(p.parse("Hos 1:1").osis()).toEqual("Hos.1.1")
p.include_apocrypha(false)
expect(p.parse("UHOSEYA 1:1").osis()).toEqual("Hos.1.1")
expect(p.parse("HOSEYA 1:1").osis()).toEqual("Hos.1.1")
expect(p.parse("HOS 1:1").osis()).toEqual("Hos.1.1")
`
true
describe "Localized book Joel (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Joel (zu)", ->
`
expect(p.parse("UJoweli 1:1").osis()).toEqual("Joel.1.1")
expect(p.parse("Joweli 1:1").osis()).toEqual("Joel.1.1")
expect(p.parse("Joel 1:1").osis()).toEqual("Joel.1.1")
p.include_apocrypha(false)
expect(p.parse("UJOWELI 1:1").osis()).toEqual("Joel.1.1")
expect(p.parse("JOWELI 1:1").osis()).toEqual("Joel.1.1")
expect(p.parse("JOEL 1:1").osis()).toEqual("Joel.1.1")
`
true
describe "Localized book Amos (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Amos (zu)", ->
`
expect(p.parse("U-Amose 1:1").osis()).toEqual("Amos.1.1")
expect(p.parse("Amose 1:1").osis()).toEqual("Amos.1.1")
expect(p.parse("Amos 1:1").osis()).toEqual("Amos.1.1")
p.include_apocrypha(false)
expect(p.parse("U-AMOSE 1:1").osis()).toEqual("Amos.1.1")
expect(p.parse("AMOSE 1:1").osis()).toEqual("Amos.1.1")
expect(p.parse("AMOS 1:1").osis()).toEqual("Amos.1.1")
`
true
describe "Localized book Obad (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: <NAME> (zu)", ->
`
expect(p.parse("U-Obadiya 1:1").osis()).toEqual("Obad.1.1")
expect(p.parse("Obadiya 1:1").osis()).toEqual("Obad.1.1")
expect(p.parse("Obad 1:1").osis()).toEqual("Obad.1.1")
p.include_apocrypha(false)
expect(p.parse("U-OBADIYA 1:1").osis()).toEqual("Obad.1.1")
expect(p.parse("OBADIYA 1:1").osis()).toEqual("Obad.1.1")
expect(p.parse("OBAD 1:1").osis()).toEqual("Obad.1.1")
`
true
describe "Localized book <NAME>ah (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: <NAME>ah (zu)", ->
`
expect(p.parse("Jonah 1:1").osis()).toEqual("Jonah.1.1")
expect(p.parse("UJona 1:1").osis()).toEqual("Jonah.1.1")
expect(p.parse("Jona 1:1").osis()).toEqual("Jonah.1.1")
p.include_apocrypha(false)
expect(p.parse("JONAH 1:1").osis()).toEqual("Jonah.1.1")
expect(p.parse("UJONA 1:1").osis()).toEqual("Jonah.1.1")
expect(p.parse("JONA 1:1").osis()).toEqual("Jonah.1.1")
`
true
describe "Localized book Mic (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Mic (zu)", ->
`
expect(p.parse("UMika 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("Mika 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("Mic 1:1").osis()).toEqual("Mic.1.1")
p.include_apocrypha(false)
expect(p.parse("UMIKA 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("MIKA 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("MIC 1:1").osis()).toEqual("Mic.1.1")
`
true
describe "Localized book Nah (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Nah (zu)", ->
`
expect(p.parse("UNahume 1:1").osis()).toEqual("Nah.1.1")
expect(p.parse("Nahume 1:1").osis()).toEqual("Nah.1.1")
expect(p.parse("Nah 1:1").osis()).toEqual("Nah.1.1")
p.include_apocrypha(false)
expect(p.parse("UNAHUME 1:1").osis()).toEqual("Nah.1.1")
expect(p.parse("NAHUME 1:1").osis()).toEqual("Nah.1.1")
expect(p.parse("NAH 1:1").osis()).toEqual("Nah.1.1")
`
true
describe "Localized book Hab (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Hab (zu)", ->
`
expect(p.parse("UHabakuki 1:1").osis()).toEqual("Hab.1.1")
expect(p.parse("Habakuki 1:1").osis()).toEqual("Hab.1.1")
expect(p.parse("Hab 1:1").osis()).toEqual("Hab.1.1")
p.include_apocrypha(false)
expect(p.parse("UHABAKUKI 1:1").osis()).toEqual("Hab.1.1")
expect(p.parse("HABAKUKI 1:1").osis()).toEqual("Hab.1.1")
expect(p.parse("HAB 1:1").osis()).toEqual("Hab.1.1")
`
true
describe "Localized book Zeph (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Zeph (zu)", ->
`
expect(p.parse("UZefaniya 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("Zefaniya 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("Zeph 1:1").osis()).toEqual("Zeph.1.1")
p.include_apocrypha(false)
expect(p.parse("UZEFANIYA 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("ZEFANIYA 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("ZEPH 1:1").osis()).toEqual("Zeph.1.1")
`
true
describe "Localized book Hag (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Hag (zu)", ->
`
expect(p.parse("UHagayi 1:1").osis()).toEqual("Hag.1.1")
expect(p.parse("Hagayi 1:1").osis()).toEqual("Hag.1.1")
expect(p.parse("Hag 1:1").osis()).toEqual("Hag.1.1")
p.include_apocrypha(false)
expect(p.parse("UHAGAYI 1:1").osis()).toEqual("Hag.1.1")
expect(p.parse("HAGAYI 1:1").osis()).toEqual("Hag.1.1")
expect(p.parse("HAG 1:1").osis()).toEqual("Hag.1.1")
`
true
describe "Localized book Zech (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Zech (zu)", ->
`
expect(p.parse("UZakariya 1:1").osis()).toEqual("Zech.1.1")
expect(p.parse("Zakariya 1:1").osis()).toEqual("Zech.1.1")
expect(p.parse("Zech 1:1").osis()).toEqual("Zech.1.1")
p.include_apocrypha(false)
expect(p.parse("UZAKARIYA 1:1").osis()).toEqual("Zech.1.1")
expect(p.parse("ZAKARIYA 1:1").osis()).toEqual("Zech.1.1")
expect(p.parse("ZECH 1:1").osis()).toEqual("Zech.1.1")
`
true
describe "Localized book Mal (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Mal (zu)", ->
`
expect(p.parse("UMalaki 1:1").osis()).toEqual("Mal.1.1")
expect(p.parse("Malaki 1:1").osis()).toEqual("Mal.1.1")
expect(p.parse("Mal 1:1").osis()).toEqual("Mal.1.1")
p.include_apocrypha(false)
expect(p.parse("UMALAKI 1:1").osis()).toEqual("Mal.1.1")
expect(p.parse("MALAKI 1:1").osis()).toEqual("Mal.1.1")
expect(p.parse("MAL 1:1").osis()).toEqual("Mal.1.1")
`
true
describe "Localized book M<NAME> (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: <NAME> (zu)", ->
`
expect(p.parse("NgokukaMathewu 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("Mathewu 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("Math 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("Matt 1:1").osis()).toEqual("Matt.1.1")
p.include_apocrypha(false)
expect(p.parse("NGOKUKAMATHEWU 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("MATHEWU 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("MATH 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("MATT 1:1").osis()).toEqual("Matt.1.1")
`
true
describe "Localized book Mark (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Mark (zu)", ->
`
expect(p.parse("NgokukaMarku 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("Marko 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("Marku 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("Mark 1:1").osis()).toEqual("Mark.1.1")
p.include_apocrypha(false)
expect(p.parse("NGOKUKAMARKU 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("MARKO 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("MARKU 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("MARK 1:1").osis()).toEqual("Mark.1.1")
`
true
describe "Localized book <NAME> (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: <NAME> (zu)", ->
`
expect(p.parse("NgokukaLuka 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("Luka 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("Luke 1:1").osis()).toEqual("Luke.1.1")
p.include_apocrypha(false)
expect(p.parse("NGOKUKALUKA 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("LUKA 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("LUKE 1:1").osis()).toEqual("Luke.1.1")
`
true
describe "Localized book 1John (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: <NAME>John (zu)", ->
`
expect(p.parse("1 kaJohane 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 Johane 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1John 1:1").osis()).toEqual("1John.1.1")
p.include_apocrypha(false)
expect(p.parse("1 KAJOHANE 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 JOHANE 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1JOHN 1:1").osis()).toEqual("1John.1.1")
`
true
describe "Localized book <NAME>John (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2John (zu)", ->
`
expect(p.parse("2 kaJohane 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 Johane 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2John 1:1").osis()).toEqual("2John.1.1")
p.include_apocrypha(false)
expect(p.parse("2 KAJOHANE 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 JOHANE 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2JOHN 1:1").osis()).toEqual("2John.1.1")
`
true
describe "Localized book 3John (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 3John (zu)", ->
`
expect(p.parse("3 kaJohane 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 Johane 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3John 1:1").osis()).toEqual("3John.1.1")
p.include_apocrypha(false)
expect(p.parse("3 KAJOHANE 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 JOHANE 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3JOHN 1:1").osis()).toEqual("3John.1.1")
`
true
describe "Localized book <NAME> (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: <NAME> (zu)", ->
`
expect(p.parse("NgokukaJohane 1:1").osis()).toEqual("John.1.1")
expect(p.parse("Johane 1:1").osis()).toEqual("John.1.1")
expect(p.parse("John 1:1").osis()).toEqual("John.1.1")
p.include_apocrypha(false)
expect(p.parse("NGOKUKAJOHANE 1:1").osis()).toEqual("John.1.1")
expect(p.parse("JOHANE 1:1").osis()).toEqual("John.1.1")
expect(p.parse("JOHN 1:1").osis()).toEqual("John.1.1")
`
true
describe "Localized book Acts (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Acts (zu)", ->
`
expect(p.parse("IZenzo 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("IzEnzo 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("Acts 1:1").osis()).toEqual("Acts.1.1")
p.include_apocrypha(false)
expect(p.parse("IZENZO 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("IZENZO 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("ACTS 1:1").osis()).toEqual("Acts.1.1")
`
true
describe "Localized book Rom (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Rom (zu)", ->
`
expect(p.parse("KwabaseRoma 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("AmaRoma 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("Roma 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("Rom 1:1").osis()).toEqual("Rom.1.1")
p.include_apocrypha(false)
expect(p.parse("KWABASEROMA 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("AMAROMA 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROMA 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROM 1:1").osis()).toEqual("Rom.1.1")
`
true
describe "Localized book 2Cor (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Cor (zu)", ->
`
expect(p.parse("2 kwabaseKorinte 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 Korinte 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 Kor 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2Cor 1:1").osis()).toEqual("2Cor.1.1")
p.include_apocrypha(false)
expect(p.parse("2 KWABASEKORINTE 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KORINTE 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KOR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2COR 1:1").osis()).toEqual("2Cor.1.1")
`
true
describe "Localized book 1Cor (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Cor (zu)", ->
`
expect(p.parse("1 kwabaseKorinte 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 Korinte 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 Kor 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1Cor 1:1").osis()).toEqual("1Cor.1.1")
p.include_apocrypha(false)
expect(p.parse("1 KWABASEKORINTE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KORINTE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KOR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1COR 1:1").osis()).toEqual("1Cor.1.1")
`
true
describe "Localized book Gal (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Gal (zu)", ->
`
expect(p.parse("KwabaseGalathiya 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("Galathiya 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("Gal 1:1").osis()).toEqual("Gal.1.1")
p.include_apocrypha(false)
expect(p.parse("KWABASEGALATHIYA 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALATHIYA 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GAL 1:1").osis()).toEqual("Gal.1.1")
`
true
describe "Localized book Eph (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Eph (zu)", ->
`
expect(p.parse("Kwabase-Efesu 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("Efesu 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("Eph 1:1").osis()).toEqual("Eph.1.1")
p.include_apocrypha(false)
expect(p.parse("KWABASE-EFESU 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EFESU 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EPH 1:1").osis()).toEqual("Eph.1.1")
`
true
describe "Localized book Phil (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: <NAME> (zu)", ->
`
expect(p.parse("KwabaseFilipi 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("Filipi 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("Phil 1:1").osis()).toEqual("Phil.1.1")
p.include_apocrypha(false)
expect(p.parse("KWABASEFILIPI 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("FILIPI 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("PHIL 1:1").osis()).toEqual("Phil.1.1")
`
true
describe "Localized book Col (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Col (zu)", ->
`
expect(p.parse("KwabaseKolose 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("Kolose 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("Col 1:1").osis()).toEqual("Col.1.1")
p.include_apocrypha(false)
expect(p.parse("KWABASEKOLOSE 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KOLOSE 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("COL 1:1").osis()).toEqual("Col.1.1")
`
true
describe "Localized book 2Thess (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Thess (zu)", ->
`
expect(p.parse("2 kwabaseThesalonika 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 Thesalonika 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2Thess 1:1").osis()).toEqual("2Thess.1.1")
p.include_apocrypha(false)
expect(p.parse("2 KWABASETHESALONIKA 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 THESALONIKA 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2THESS 1:1").osis()).toEqual("2Thess.1.1")
`
true
describe "Localized book 1Thess (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Thess (zu)", ->
`
expect(p.parse("1 kwabaseThesalonika 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 Thesalonika 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1Thess 1:1").osis()).toEqual("1Thess.1.1")
p.include_apocrypha(false)
expect(p.parse("1 KWABASETHESALONIKA 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 THESALONIKA 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1THESS 1:1").osis()).toEqual("1Thess.1.1")
`
true
describe "Localized book 2Tim (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Tim (zu)", ->
`
expect(p.parse("2 kuThimothewu 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 Thimothewu 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2Tim 1:1").osis()).toEqual("2Tim.1.1")
p.include_apocrypha(false)
expect(p.parse("2 KUTHIMOTHEWU 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 THIMOTHEWU 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2TIM 1:1").osis()).toEqual("2Tim.1.1")
`
true
describe "Localized book 1Tim (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Tim (zu)", ->
`
expect(p.parse("1 kuThimothewu 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 Thimothewu 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1Tim 1:1").osis()).toEqual("1Tim.1.1")
p.include_apocrypha(false)
expect(p.parse("1 KUTHIMOTHEWU 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 THIMOTHEWU 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1TIM 1:1").osis()).toEqual("1Tim.1.1")
`
true
describe "Localized book Titus (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Titus (zu)", ->
`
expect(p.parse("KuThithu 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("Titus 1:1").osis()).toEqual("Titus.1.1")
p.include_apocrypha(false)
expect(p.parse("KUTHITHU 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("TITUS 1:1").osis()).toEqual("Titus.1.1")
`
true
describe "Localized book Phlm (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Phlm (zu)", ->
`
expect(p.parse("KuFilemoni 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("Phlm 1:1").osis()).toEqual("Phlm.1.1")
p.include_apocrypha(false)
expect(p.parse("KUFILEMONI 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("PHLM 1:1").osis()).toEqual("Phlm.1.1")
`
true
describe "Localized book Heb (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Heb (zu)", ->
`
expect(p.parse("KumaHeberu 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("Heb 1:1").osis()).toEqual("Heb.1.1")
p.include_apocrypha(false)
expect(p.parse("KUMAHEBERU 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("HEB 1:1").osis()).toEqual("Heb.1.1")
`
true
describe "Localized book Jas (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Jas (zu)", ->
`
expect(p.parse("EkaJakobe 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("Jakobe 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("Jas 1:1").osis()).toEqual("Jas.1.1")
p.include_apocrypha(false)
expect(p.parse("EKAJAKOBE 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("JAKOBE 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("JAS 1:1").osis()).toEqual("Jas.1.1")
`
true
describe "Localized book 2Pet (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Pet (zu)", ->
`
expect(p.parse("2 kaPetru 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 Petru 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2Pet 1:1").osis()).toEqual("2Pet.1.1")
p.include_apocrypha(false)
expect(p.parse("2 KAPETRU 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 PETRU 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2PET 1:1").osis()).toEqual("2Pet.1.1")
`
true
describe "Localized book 1Pet (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Pet (zu)", ->
`
expect(p.parse("1 kaPetru 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 Petru 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1Pet 1:1").osis()).toEqual("1Pet.1.1")
p.include_apocrypha(false)
expect(p.parse("1 KAPETRU 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 PETRU 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1PET 1:1").osis()).toEqual("1Pet.1.1")
`
true
describe "Localized book Jude (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Jude (zu)", ->
`
expect(p.parse("EkaJuda 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("Jude 1:1").osis()).toEqual("Jude.1.1")
p.include_apocrypha(false)
expect(p.parse("EKAJUDA 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("JUDE 1:1").osis()).toEqual("Jude.1.1")
`
true
describe "Localized book Tob (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Tob (zu)", ->
`
expect(p.parse("Tob 1:1").osis()).toEqual("Tob.1.1")
`
true
describe "Localized book Jdt (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Jdt (zu)", ->
`
expect(p.parse("Jdt 1:1").osis()).toEqual("Jdt.1.1")
`
true
describe "Localized book Bar (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Bar (zu)", ->
`
expect(p.parse("Bar 1:1").osis()).toEqual("Bar.1.1")
`
true
describe "Localized book Sus (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Sus (zu)", ->
`
expect(p.parse("Sus 1:1").osis()).toEqual("Sus.1.1")
`
true
describe "Localized book 2Macc (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Macc (zu)", ->
`
expect(p.parse("2Macc 1:1").osis()).toEqual("2Macc.1.1")
`
true
describe "Localized book 3Macc (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 3Macc (zu)", ->
`
expect(p.parse("3Macc 1:1").osis()).toEqual("3Macc.1.1")
`
true
describe "Localized book 4Macc (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 4Macc (zu)", ->
`
expect(p.parse("4Macc 1:1").osis()).toEqual("4Macc.1.1")
`
true
describe "Localized book 1Macc (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Macc (zu)", ->
`
expect(p.parse("1Macc 1:1").osis()).toEqual("1Macc.1.1")
`
true
describe "Miscellaneous tests", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore", book_sequence_strategy: "ignore", osis_compaction_strategy: "bc", captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should return the expected language", ->
expect(p.languages).toEqual ["zu"]
it "should handle ranges (zu)", ->
expect(p.parse("Titus 1:1 kuye 2").osis()).toEqual "Titus.1.1-Titus.1.2"
expect(p.parse("Matt 1kuye2").osis()).toEqual "Matt.1-Matt.2"
expect(p.parse("Phlm 2 KUYE 3").osis()).toEqual "Phlm.1.2-Phlm.1.3"
it "should handle chapters (zu)", ->
expect(p.parse("Titus 1:1, isahluko 2").osis()).toEqual "Titus.1.1,Titus.2"
expect(p.parse("Matt 3:4 ISAHLUKO 6").osis()).toEqual "Matt.3.4,Matt.6"
it "should handle verses (zu)", ->
expect(p.parse("Exod 1:1 ivesi 3").osis()).toEqual "Exod.1.1,Exod.1.3"
expect(p.parse("Phlm IVESI 6").osis()).toEqual "Phlm.1.6"
it "should handle 'and' (zu)", ->
expect(p.parse("Exod 1:1 futhi 3").osis()).toEqual "Exod.1.1,Exod.1.3"
expect(p.parse("Phlm 2 FUTHI 6").osis()).toEqual "Phlm.1.2,Phlm.1.6"
it "should handle titles (zu)", ->
expect(p.parse("Ps 3 title, 4:2, 5:title").osis()).toEqual "Ps.3.1,Ps.4.2,Ps.5.1"
expect(p.parse("PS 3 TITLE, 4:2, 5:TITLE").osis()).toEqual "Ps.3.1,Ps.4.2,Ps.5.1"
it "should handle 'ff' (zu)", ->
expect(p.parse("Rev 3ff, 4:2ff").osis()).toEqual "Rev.3-Rev.22,Rev.4.2-Rev.4.11"
expect(p.parse("REV 3 FF, 4:2 FF").osis()).toEqual "Rev.3-Rev.22,Rev.4.2-Rev.4.11"
it "should handle translations (zu)", ->
expect(p.parse("Lev 1 (zul59)").osis_and_translations()).toEqual [["Lev.1", "zul59"]]
expect(p.parse("lev 1 zul59").osis_and_translations()).toEqual [["Lev.1", "zul59"]]
it "should handle boundaries (zu)", ->
p.set_options {book_alone_strategy: "full"}
expect(p.parse("\u2014Matt\u2014").osis()).toEqual "Matt.1-Matt.28"
expect(p.parse("\u201cMatt 1:1\u201d").osis()).toEqual "Matt.1.1"
| true | bcv_parser = require("../../js/zu_bcv_parser.js").bcv_parser
describe "Parsing", ->
p = {}
beforeEach ->
p = new bcv_parser
p.options.osis_compaction_strategy = "b"
p.options.sequence_combination_strategy = "combine"
it "should round-trip OSIS references", ->
p.set_options osis_compaction_strategy: "bc"
books = ["Gen","Exod","Lev","Num","Deut","Josh","Judg","Ruth","1Sam","2Sam","1Kgs","2Kgs","1Chr","2Chr","Ezra","Neh","Esth","Job","Ps","Prov","Eccl","Song","Isa","Jer","Lam","Ezek","PI:NAME:<NAME>END_PI","PI:NAME:<NAME>END_PI","PI:NAME:<NAME>END_PI","PI:NAME:<NAME>END_PImos","PI:NAME:<NAME>END_PIad","PI:NAME:<NAME>END_PI","PI:NAME:<NAME>END_PI","PI:NAME:<NAME>END_PIah","Hab","Zeph","Hag","Zech","PI:NAME:<NAME>END_PI","PI:NAME:<NAME>END_PI","PI:NAME:<NAME>END_PI","PI:NAME:<NAME>END_PI","PI:NAME:<NAME>END_PI","Acts","Rom","1Cor","2Cor","Gal","Eph","Phil","Col","1Thess","2Thess","1Tim","2Tim","Titus","Phlm","Heb","Jas","1Pet","2Pet","1John","2John","3John","Jude","Rev"]
for book in books
bc = book + ".1"
bcv = bc + ".1"
bcv_range = bcv + "-" + bc + ".2"
expect(p.parse(bc).osis()).toEqual bc
expect(p.parse(bcv).osis()).toEqual bcv
expect(p.parse(bcv_range).osis()).toEqual bcv_range
it "should round-trip OSIS Apocrypha references", ->
p.set_options osis_compaction_strategy: "bc", ps151_strategy: "b"
p.include_apocrypha true
books = ["Tob","Jdt","GkEsth","Wis","Sir","Bar","PrAzar","Sus","Bel","SgThree","EpJer","1Macc","2Macc","3Macc","4Macc","1Esd","2Esd","PrMan","Ps151"]
for book in books
bc = book + ".1"
bcv = bc + ".1"
bcv_range = bcv + "-" + bc + ".2"
expect(p.parse(bc).osis()).toEqual bc
expect(p.parse(bcv).osis()).toEqual bcv
expect(p.parse(bcv_range).osis()).toEqual bcv_range
p.set_options ps151_strategy: "bc"
expect(p.parse("Ps151.1").osis()).toEqual "Ps.151"
expect(p.parse("Ps151.1.1").osis()).toEqual "Ps.151.1"
expect(p.parse("Ps151.1-Ps151.2").osis()).toEqual "Ps.151.1-Ps.151.2"
p.include_apocrypha false
for book in books
bc = book + ".1"
expect(p.parse(bc).osis()).toEqual ""
it "should handle a preceding character", ->
expect(p.parse(" Gen 1").osis()).toEqual "Gen.1"
expect(p.parse("Matt5John3").osis()).toEqual "Matt.5,John.3"
expect(p.parse("1Ps 1").osis()).toEqual ""
expect(p.parse("11Sam 1").osis()).toEqual ""
describe "Localized book Gen (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Gen (zu)", ->
`
expect(p.parse("UGenesise 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("Genesise 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("Gen 1:1").osis()).toEqual("Gen.1.1")
p.include_apocrypha(false)
expect(p.parse("UGENESISE 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("GENESISE 1:1").osis()).toEqual("Gen.1.1")
expect(p.parse("GEN 1:1").osis()).toEqual("Gen.1.1")
`
true
describe "Localized book Exod (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Exod (zu)", ->
`
expect(p.parse("U-Eksodusi 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("Eksodusi 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("Exod 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("Eks 1:1").osis()).toEqual("Exod.1.1")
p.include_apocrypha(false)
expect(p.parse("U-EKSODUSI 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("EKSODUSI 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("EXOD 1:1").osis()).toEqual("Exod.1.1")
expect(p.parse("EKS 1:1").osis()).toEqual("Exod.1.1")
`
true
describe "Localized book Bel (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Bel (zu)", ->
`
expect(p.parse("Bel 1:1").osis()).toEqual("Bel.1.1")
`
true
describe "Localized book Lev (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Lev (zu)", ->
`
expect(p.parse("ULevitikusi 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("Levitikusi 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("Levi 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("Lev 1:1").osis()).toEqual("Lev.1.1")
p.include_apocrypha(false)
expect(p.parse("ULEVITIKUSI 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("LEVITIKUSI 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("LEVI 1:1").osis()).toEqual("Lev.1.1")
expect(p.parse("LEV 1:1").osis()).toEqual("Lev.1.1")
`
true
describe "Localized book Num (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Num (zu)", ->
`
expect(p.parse("UNumeri 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("Numeri 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("Num 1:1").osis()).toEqual("Num.1.1")
p.include_apocrypha(false)
expect(p.parse("UNUMERI 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("NUMERI 1:1").osis()).toEqual("Num.1.1")
expect(p.parse("NUM 1:1").osis()).toEqual("Num.1.1")
`
true
describe "Localized book Sir (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Sir (zu)", ->
`
expect(p.parse("Sir 1:1").osis()).toEqual("Sir.1.1")
`
true
describe "Localized book Wis (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Wis (zu)", ->
`
expect(p.parse("Wis 1:1").osis()).toEqual("Wis.1.1")
`
true
describe "Localized book Lam (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Lam (zu)", ->
`
expect(p.parse("IsiLilo 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("Lam 1:1").osis()).toEqual("Lam.1.1")
p.include_apocrypha(false)
expect(p.parse("ISILILO 1:1").osis()).toEqual("Lam.1.1")
expect(p.parse("LAM 1:1").osis()).toEqual("Lam.1.1")
`
true
describe "Localized book EpJer (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: EpJer (zu)", ->
`
expect(p.parse("EpJer 1:1").osis()).toEqual("EpJer.1.1")
`
true
describe "Localized book Rev (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Rev (zu)", ->
`
expect(p.parse("IsAmbulo 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("Isamb 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("Samb 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("Rev 1:1").osis()).toEqual("Rev.1.1")
p.include_apocrypha(false)
expect(p.parse("ISAMBULO 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("ISAMB 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("SAMB 1:1").osis()).toEqual("Rev.1.1")
expect(p.parse("REV 1:1").osis()).toEqual("Rev.1.1")
`
true
describe "Localized book PrMan (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: PrMan (zu)", ->
`
expect(p.parse("PrMan 1:1").osis()).toEqual("PrMan.1.1")
`
true
describe "Localized book Deut (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Deut (zu)", ->
`
expect(p.parse("UDuteronomi 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("Duteronomi 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("Duter 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("Deut 1:1").osis()).toEqual("Deut.1.1")
p.include_apocrypha(false)
expect(p.parse("UDUTERONOMI 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("DUTERONOMI 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("DUTER 1:1").osis()).toEqual("Deut.1.1")
expect(p.parse("DEUT 1:1").osis()).toEqual("Deut.1.1")
`
true
describe "Localized book PI:NAME:<NAME>END_PIosh (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: PI:NAME:<NAME>END_PIosh (zu)", ->
`
expect(p.parse("UJoshuwa 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("Joshuwa 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("Josh 1:1").osis()).toEqual("Josh.1.1")
p.include_apocrypha(false)
expect(p.parse("UJOSHUWA 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("JOSHUWA 1:1").osis()).toEqual("Josh.1.1")
expect(p.parse("JOSH 1:1").osis()).toEqual("Josh.1.1")
`
true
describe "Localized book Judg (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Judg (zu)", ->
`
expect(p.parse("AbAhluleli 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("Judg 1:1").osis()).toEqual("Judg.1.1")
p.include_apocrypha(false)
expect(p.parse("ABAHLULELI 1:1").osis()).toEqual("Judg.1.1")
expect(p.parse("JUDG 1:1").osis()).toEqual("Judg.1.1")
`
true
describe "Localized book Ruth (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Ruth (zu)", ->
`
expect(p.parse("URuthe 1:1").osis()).toEqual("Ruth.1.1")
expect(p.parse("Ruthe 1:1").osis()).toEqual("Ruth.1.1")
expect(p.parse("Ruth 1:1").osis()).toEqual("Ruth.1.1")
p.include_apocrypha(false)
expect(p.parse("URUTHE 1:1").osis()).toEqual("Ruth.1.1")
expect(p.parse("RUTHE 1:1").osis()).toEqual("Ruth.1.1")
expect(p.parse("RUTH 1:1").osis()).toEqual("Ruth.1.1")
`
true
describe "Localized book 1Esd (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Esd (zu)", ->
`
expect(p.parse("1Esd 1:1").osis()).toEqual("1Esd.1.1")
`
true
describe "Localized book 2Esd (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Esd (zu)", ->
`
expect(p.parse("2Esd 1:1").osis()).toEqual("2Esd.1.1")
`
true
describe "Localized book Isa (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Isa (zu)", ->
`
expect(p.parse("U-Isaya 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("Isaya 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("Isa 1:1").osis()).toEqual("Isa.1.1")
p.include_apocrypha(false)
expect(p.parse("U-ISAYA 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("ISAYA 1:1").osis()).toEqual("Isa.1.1")
expect(p.parse("ISA 1:1").osis()).toEqual("Isa.1.1")
`
true
describe "Localized book 2Sam (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Sam (zu)", ->
`
expect(p.parse("2 uSamuweli 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 Samuweli 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 Sam 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2Sam 1:1").osis()).toEqual("2Sam.1.1")
p.include_apocrypha(false)
expect(p.parse("2 USAMUWELI 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 SAMUWELI 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2 SAM 1:1").osis()).toEqual("2Sam.1.1")
expect(p.parse("2SAM 1:1").osis()).toEqual("2Sam.1.1")
`
true
describe "Localized book 1Sam (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Sam (zu)", ->
`
expect(p.parse("1 uSamuweli 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 Samuweli 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 Sam 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1Sam 1:1").osis()).toEqual("1Sam.1.1")
p.include_apocrypha(false)
expect(p.parse("1 USAMUWELI 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 SAMUWELI 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1 SAM 1:1").osis()).toEqual("1Sam.1.1")
expect(p.parse("1SAM 1:1").osis()).toEqual("1Sam.1.1")
`
true
describe "Localized book 2Kgs (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Kgs (zu)", ->
`
expect(p.parse("2 AmaKhosi 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2Kgs 1:1").osis()).toEqual("2Kgs.1.1")
p.include_apocrypha(false)
expect(p.parse("2 AMAKHOSI 1:1").osis()).toEqual("2Kgs.1.1")
expect(p.parse("2KGS 1:1").osis()).toEqual("2Kgs.1.1")
`
true
describe "Localized book 1Kgs (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Kgs (zu)", ->
`
expect(p.parse("1 AmaKhosi 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1Kgs 1:1").osis()).toEqual("1Kgs.1.1")
p.include_apocrypha(false)
expect(p.parse("1 AMAKHOSI 1:1").osis()).toEqual("1Kgs.1.1")
expect(p.parse("1KGS 1:1").osis()).toEqual("1Kgs.1.1")
`
true
describe "Localized book 2Chr (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Chr (zu)", ->
`
expect(p.parse("2 IziKronike 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 iziKronike 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2Chr 1:1").osis()).toEqual("2Chr.1.1")
p.include_apocrypha(false)
expect(p.parse("2 IZIKRONIKE 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2 IZIKRONIKE 1:1").osis()).toEqual("2Chr.1.1")
expect(p.parse("2CHR 1:1").osis()).toEqual("2Chr.1.1")
`
true
describe "Localized book 1Chr (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Chr (zu)", ->
`
expect(p.parse("1 IziKronike 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 iziKronike 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1Chr 1:1").osis()).toEqual("1Chr.1.1")
p.include_apocrypha(false)
expect(p.parse("1 IZIKRONIKE 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1 IZIKRONIKE 1:1").osis()).toEqual("1Chr.1.1")
expect(p.parse("1CHR 1:1").osis()).toEqual("1Chr.1.1")
`
true
describe "Localized book Ezra (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Ezra (zu)", ->
`
expect(p.parse("U-Ezra 1:1").osis()).toEqual("Ezra.1.1")
expect(p.parse("Ezra 1:1").osis()).toEqual("Ezra.1.1")
p.include_apocrypha(false)
expect(p.parse("U-EZRA 1:1").osis()).toEqual("Ezra.1.1")
expect(p.parse("EZRA 1:1").osis()).toEqual("Ezra.1.1")
`
true
describe "Localized book Neh (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Neh (zu)", ->
`
expect(p.parse("UNehemiya 1:1").osis()).toEqual("Neh.1.1")
expect(p.parse("Nehemiya 1:1").osis()).toEqual("Neh.1.1")
expect(p.parse("Neh 1:1").osis()).toEqual("Neh.1.1")
p.include_apocrypha(false)
expect(p.parse("UNEHEMIYA 1:1").osis()).toEqual("Neh.1.1")
expect(p.parse("NEHEMIYA 1:1").osis()).toEqual("Neh.1.1")
expect(p.parse("NEH 1:1").osis()).toEqual("Neh.1.1")
`
true
describe "Localized book GkEsth (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: GkEsth (zu)", ->
`
expect(p.parse("GkEsth 1:1").osis()).toEqual("GkEsth.1.1")
`
true
describe "Localized book Esth (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Esth (zu)", ->
`
expect(p.parse("U-Esteri 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("Esteri 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("Esth 1:1").osis()).toEqual("Esth.1.1")
p.include_apocrypha(false)
expect(p.parse("U-ESTERI 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("ESTERI 1:1").osis()).toEqual("Esth.1.1")
expect(p.parse("ESTH 1:1").osis()).toEqual("Esth.1.1")
`
true
describe "Localized book Job (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Job (zu)", ->
`
expect(p.parse("UJobe 1:1").osis()).toEqual("Job.1.1")
expect(p.parse("Jobe 1:1").osis()).toEqual("Job.1.1")
expect(p.parse("Job 1:1").osis()).toEqual("Job.1.1")
p.include_apocrypha(false)
expect(p.parse("UJOBE 1:1").osis()).toEqual("Job.1.1")
expect(p.parse("JOBE 1:1").osis()).toEqual("Job.1.1")
expect(p.parse("JOB 1:1").osis()).toEqual("Job.1.1")
`
true
describe "Localized book Ps (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Ps (zu)", ->
`
expect(p.parse("AmaHubo 1:1").osis()).toEqual("Ps.1.1")
expect(p.parse("IHubo 1:1").osis()).toEqual("Ps.1.1")
expect(p.parse("Ps 1:1").osis()).toEqual("Ps.1.1")
p.include_apocrypha(false)
expect(p.parse("AMAHUBO 1:1").osis()).toEqual("Ps.1.1")
expect(p.parse("IHUBO 1:1").osis()).toEqual("Ps.1.1")
expect(p.parse("PS 1:1").osis()).toEqual("Ps.1.1")
`
true
describe "Localized book PrAzar (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: PrAzar (zu)", ->
`
expect(p.parse("PrAzar 1:1").osis()).toEqual("PrAzar.1.1")
`
true
describe "Localized book Prov (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Prov (zu)", ->
`
expect(p.parse("IzAga 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("Prov 1:1").osis()).toEqual("Prov.1.1")
p.include_apocrypha(false)
expect(p.parse("IZAGA 1:1").osis()).toEqual("Prov.1.1")
expect(p.parse("PROV 1:1").osis()).toEqual("Prov.1.1")
`
true
describe "Localized book Eccl (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Eccl (zu)", ->
`
expect(p.parse("UmShumayeli 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("Mshumayeli 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("Eccl 1:1").osis()).toEqual("Eccl.1.1")
p.include_apocrypha(false)
expect(p.parse("UMSHUMAYELI 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("MSHUMAYELI 1:1").osis()).toEqual("Eccl.1.1")
expect(p.parse("ECCL 1:1").osis()).toEqual("Eccl.1.1")
`
true
describe "Localized book SgThree (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: SgThree (zu)", ->
`
expect(p.parse("SgThree 1:1").osis()).toEqual("SgThree.1.1")
`
true
describe "Localized book Song (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Song (zu)", ->
`
expect(p.parse("IsiHlabelelo SeziHlabelelo 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("IsiHlabelelo seziHlabelelo 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("Song 1:1").osis()).toEqual("Song.1.1")
p.include_apocrypha(false)
expect(p.parse("ISIHLABELELO SEZIHLABELELO 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("ISIHLABELELO SEZIHLABELELO 1:1").osis()).toEqual("Song.1.1")
expect(p.parse("SONG 1:1").osis()).toEqual("Song.1.1")
`
true
describe "Localized book Jer (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Jer (zu)", ->
`
expect(p.parse("UJeremiya 1:1").osis()).toEqual("Jer.1.1")
expect(p.parse("Jeremiya 1:1").osis()).toEqual("Jer.1.1")
expect(p.parse("Jer 1:1").osis()).toEqual("Jer.1.1")
p.include_apocrypha(false)
expect(p.parse("UJEREMIYA 1:1").osis()).toEqual("Jer.1.1")
expect(p.parse("JEREMIYA 1:1").osis()).toEqual("Jer.1.1")
expect(p.parse("JER 1:1").osis()).toEqual("Jer.1.1")
`
true
describe "Localized book Ezek (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Ezek (zu)", ->
`
expect(p.parse("UHezekeli 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("Hezekeli 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("Ezek 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("Hez 1:1").osis()).toEqual("Ezek.1.1")
p.include_apocrypha(false)
expect(p.parse("UHEZEKELI 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("HEZEKELI 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("EZEK 1:1").osis()).toEqual("Ezek.1.1")
expect(p.parse("HEZ 1:1").osis()).toEqual("Ezek.1.1")
`
true
describe "Localized book PI:NAME:<NAME>END_PI (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: PI:NAME:<NAME>END_PI (zu)", ->
`
expect(p.parse("UDaniyeli 1:1").osis()).toEqual("Dan.1.1")
expect(p.parse("Daniyeli 1:1").osis()).toEqual("Dan.1.1")
expect(p.parse("Dan 1:1").osis()).toEqual("Dan.1.1")
p.include_apocrypha(false)
expect(p.parse("UDANIYELI 1:1").osis()).toEqual("Dan.1.1")
expect(p.parse("DANIYELI 1:1").osis()).toEqual("Dan.1.1")
expect(p.parse("DAN 1:1").osis()).toEqual("Dan.1.1")
`
true
describe "Localized book Hos (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Hos (zu)", ->
`
expect(p.parse("UHoseya 1:1").osis()).toEqual("Hos.1.1")
expect(p.parse("Hoseya 1:1").osis()).toEqual("Hos.1.1")
expect(p.parse("Hos 1:1").osis()).toEqual("Hos.1.1")
p.include_apocrypha(false)
expect(p.parse("UHOSEYA 1:1").osis()).toEqual("Hos.1.1")
expect(p.parse("HOSEYA 1:1").osis()).toEqual("Hos.1.1")
expect(p.parse("HOS 1:1").osis()).toEqual("Hos.1.1")
`
true
describe "Localized book Joel (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Joel (zu)", ->
`
expect(p.parse("UJoweli 1:1").osis()).toEqual("Joel.1.1")
expect(p.parse("Joweli 1:1").osis()).toEqual("Joel.1.1")
expect(p.parse("Joel 1:1").osis()).toEqual("Joel.1.1")
p.include_apocrypha(false)
expect(p.parse("UJOWELI 1:1").osis()).toEqual("Joel.1.1")
expect(p.parse("JOWELI 1:1").osis()).toEqual("Joel.1.1")
expect(p.parse("JOEL 1:1").osis()).toEqual("Joel.1.1")
`
true
describe "Localized book Amos (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Amos (zu)", ->
`
expect(p.parse("U-Amose 1:1").osis()).toEqual("Amos.1.1")
expect(p.parse("Amose 1:1").osis()).toEqual("Amos.1.1")
expect(p.parse("Amos 1:1").osis()).toEqual("Amos.1.1")
p.include_apocrypha(false)
expect(p.parse("U-AMOSE 1:1").osis()).toEqual("Amos.1.1")
expect(p.parse("AMOSE 1:1").osis()).toEqual("Amos.1.1")
expect(p.parse("AMOS 1:1").osis()).toEqual("Amos.1.1")
`
true
describe "Localized book Obad (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: PI:NAME:<NAME>END_PI (zu)", ->
`
expect(p.parse("U-Obadiya 1:1").osis()).toEqual("Obad.1.1")
expect(p.parse("Obadiya 1:1").osis()).toEqual("Obad.1.1")
expect(p.parse("Obad 1:1").osis()).toEqual("Obad.1.1")
p.include_apocrypha(false)
expect(p.parse("U-OBADIYA 1:1").osis()).toEqual("Obad.1.1")
expect(p.parse("OBADIYA 1:1").osis()).toEqual("Obad.1.1")
expect(p.parse("OBAD 1:1").osis()).toEqual("Obad.1.1")
`
true
describe "Localized book PI:NAME:<NAME>END_PIah (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: PI:NAME:<NAME>END_PIah (zu)", ->
`
expect(p.parse("Jonah 1:1").osis()).toEqual("Jonah.1.1")
expect(p.parse("UJona 1:1").osis()).toEqual("Jonah.1.1")
expect(p.parse("Jona 1:1").osis()).toEqual("Jonah.1.1")
p.include_apocrypha(false)
expect(p.parse("JONAH 1:1").osis()).toEqual("Jonah.1.1")
expect(p.parse("UJONA 1:1").osis()).toEqual("Jonah.1.1")
expect(p.parse("JONA 1:1").osis()).toEqual("Jonah.1.1")
`
true
describe "Localized book Mic (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Mic (zu)", ->
`
expect(p.parse("UMika 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("Mika 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("Mic 1:1").osis()).toEqual("Mic.1.1")
p.include_apocrypha(false)
expect(p.parse("UMIKA 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("MIKA 1:1").osis()).toEqual("Mic.1.1")
expect(p.parse("MIC 1:1").osis()).toEqual("Mic.1.1")
`
true
describe "Localized book Nah (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Nah (zu)", ->
`
expect(p.parse("UNahume 1:1").osis()).toEqual("Nah.1.1")
expect(p.parse("Nahume 1:1").osis()).toEqual("Nah.1.1")
expect(p.parse("Nah 1:1").osis()).toEqual("Nah.1.1")
p.include_apocrypha(false)
expect(p.parse("UNAHUME 1:1").osis()).toEqual("Nah.1.1")
expect(p.parse("NAHUME 1:1").osis()).toEqual("Nah.1.1")
expect(p.parse("NAH 1:1").osis()).toEqual("Nah.1.1")
`
true
describe "Localized book Hab (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Hab (zu)", ->
`
expect(p.parse("UHabakuki 1:1").osis()).toEqual("Hab.1.1")
expect(p.parse("Habakuki 1:1").osis()).toEqual("Hab.1.1")
expect(p.parse("Hab 1:1").osis()).toEqual("Hab.1.1")
p.include_apocrypha(false)
expect(p.parse("UHABAKUKI 1:1").osis()).toEqual("Hab.1.1")
expect(p.parse("HABAKUKI 1:1").osis()).toEqual("Hab.1.1")
expect(p.parse("HAB 1:1").osis()).toEqual("Hab.1.1")
`
true
describe "Localized book Zeph (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Zeph (zu)", ->
`
expect(p.parse("UZefaniya 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("Zefaniya 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("Zeph 1:1").osis()).toEqual("Zeph.1.1")
p.include_apocrypha(false)
expect(p.parse("UZEFANIYA 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("ZEFANIYA 1:1").osis()).toEqual("Zeph.1.1")
expect(p.parse("ZEPH 1:1").osis()).toEqual("Zeph.1.1")
`
true
describe "Localized book Hag (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Hag (zu)", ->
`
expect(p.parse("UHagayi 1:1").osis()).toEqual("Hag.1.1")
expect(p.parse("Hagayi 1:1").osis()).toEqual("Hag.1.1")
expect(p.parse("Hag 1:1").osis()).toEqual("Hag.1.1")
p.include_apocrypha(false)
expect(p.parse("UHAGAYI 1:1").osis()).toEqual("Hag.1.1")
expect(p.parse("HAGAYI 1:1").osis()).toEqual("Hag.1.1")
expect(p.parse("HAG 1:1").osis()).toEqual("Hag.1.1")
`
true
describe "Localized book Zech (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Zech (zu)", ->
`
expect(p.parse("UZakariya 1:1").osis()).toEqual("Zech.1.1")
expect(p.parse("Zakariya 1:1").osis()).toEqual("Zech.1.1")
expect(p.parse("Zech 1:1").osis()).toEqual("Zech.1.1")
p.include_apocrypha(false)
expect(p.parse("UZAKARIYA 1:1").osis()).toEqual("Zech.1.1")
expect(p.parse("ZAKARIYA 1:1").osis()).toEqual("Zech.1.1")
expect(p.parse("ZECH 1:1").osis()).toEqual("Zech.1.1")
`
true
describe "Localized book Mal (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Mal (zu)", ->
`
expect(p.parse("UMalaki 1:1").osis()).toEqual("Mal.1.1")
expect(p.parse("Malaki 1:1").osis()).toEqual("Mal.1.1")
expect(p.parse("Mal 1:1").osis()).toEqual("Mal.1.1")
p.include_apocrypha(false)
expect(p.parse("UMALAKI 1:1").osis()).toEqual("Mal.1.1")
expect(p.parse("MALAKI 1:1").osis()).toEqual("Mal.1.1")
expect(p.parse("MAL 1:1").osis()).toEqual("Mal.1.1")
`
true
describe "Localized book MPI:NAME:<NAME>END_PI (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: PI:NAME:<NAME>END_PI (zu)", ->
`
expect(p.parse("NgokukaMathewu 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("Mathewu 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("Math 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("Matt 1:1").osis()).toEqual("Matt.1.1")
p.include_apocrypha(false)
expect(p.parse("NGOKUKAMATHEWU 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("MATHEWU 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("MATH 1:1").osis()).toEqual("Matt.1.1")
expect(p.parse("MATT 1:1").osis()).toEqual("Matt.1.1")
`
true
describe "Localized book Mark (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Mark (zu)", ->
`
expect(p.parse("NgokukaMarku 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("Marko 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("Marku 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("Mark 1:1").osis()).toEqual("Mark.1.1")
p.include_apocrypha(false)
expect(p.parse("NGOKUKAMARKU 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("MARKO 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("MARKU 1:1").osis()).toEqual("Mark.1.1")
expect(p.parse("MARK 1:1").osis()).toEqual("Mark.1.1")
`
true
describe "Localized book PI:NAME:<NAME>END_PI (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: PI:NAME:<NAME>END_PI (zu)", ->
`
expect(p.parse("NgokukaLuka 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("Luka 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("Luke 1:1").osis()).toEqual("Luke.1.1")
p.include_apocrypha(false)
expect(p.parse("NGOKUKALUKA 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("LUKA 1:1").osis()).toEqual("Luke.1.1")
expect(p.parse("LUKE 1:1").osis()).toEqual("Luke.1.1")
`
true
describe "Localized book 1John (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: PI:NAME:<NAME>END_PIJohn (zu)", ->
`
expect(p.parse("1 kaJohane 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 Johane 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1John 1:1").osis()).toEqual("1John.1.1")
p.include_apocrypha(false)
expect(p.parse("1 KAJOHANE 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1 JOHANE 1:1").osis()).toEqual("1John.1.1")
expect(p.parse("1JOHN 1:1").osis()).toEqual("1John.1.1")
`
true
describe "Localized book PI:NAME:<NAME>END_PIJohn (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2John (zu)", ->
`
expect(p.parse("2 kaJohane 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 Johane 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2John 1:1").osis()).toEqual("2John.1.1")
p.include_apocrypha(false)
expect(p.parse("2 KAJOHANE 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2 JOHANE 1:1").osis()).toEqual("2John.1.1")
expect(p.parse("2JOHN 1:1").osis()).toEqual("2John.1.1")
`
true
describe "Localized book 3John (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 3John (zu)", ->
`
expect(p.parse("3 kaJohane 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 Johane 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3John 1:1").osis()).toEqual("3John.1.1")
p.include_apocrypha(false)
expect(p.parse("3 KAJOHANE 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3 JOHANE 1:1").osis()).toEqual("3John.1.1")
expect(p.parse("3JOHN 1:1").osis()).toEqual("3John.1.1")
`
true
describe "Localized book PI:NAME:<NAME>END_PI (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: PI:NAME:<NAME>END_PI (zu)", ->
`
expect(p.parse("NgokukaJohane 1:1").osis()).toEqual("John.1.1")
expect(p.parse("Johane 1:1").osis()).toEqual("John.1.1")
expect(p.parse("John 1:1").osis()).toEqual("John.1.1")
p.include_apocrypha(false)
expect(p.parse("NGOKUKAJOHANE 1:1").osis()).toEqual("John.1.1")
expect(p.parse("JOHANE 1:1").osis()).toEqual("John.1.1")
expect(p.parse("JOHN 1:1").osis()).toEqual("John.1.1")
`
true
describe "Localized book Acts (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Acts (zu)", ->
`
expect(p.parse("IZenzo 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("IzEnzo 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("Acts 1:1").osis()).toEqual("Acts.1.1")
p.include_apocrypha(false)
expect(p.parse("IZENZO 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("IZENZO 1:1").osis()).toEqual("Acts.1.1")
expect(p.parse("ACTS 1:1").osis()).toEqual("Acts.1.1")
`
true
describe "Localized book Rom (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Rom (zu)", ->
`
expect(p.parse("KwabaseRoma 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("AmaRoma 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("Roma 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("Rom 1:1").osis()).toEqual("Rom.1.1")
p.include_apocrypha(false)
expect(p.parse("KWABASEROMA 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("AMAROMA 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROMA 1:1").osis()).toEqual("Rom.1.1")
expect(p.parse("ROM 1:1").osis()).toEqual("Rom.1.1")
`
true
describe "Localized book 2Cor (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Cor (zu)", ->
`
expect(p.parse("2 kwabaseKorinte 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 Korinte 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 Kor 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2Cor 1:1").osis()).toEqual("2Cor.1.1")
p.include_apocrypha(false)
expect(p.parse("2 KWABASEKORINTE 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KORINTE 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2 KOR 1:1").osis()).toEqual("2Cor.1.1")
expect(p.parse("2COR 1:1").osis()).toEqual("2Cor.1.1")
`
true
describe "Localized book 1Cor (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Cor (zu)", ->
`
expect(p.parse("1 kwabaseKorinte 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 Korinte 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 Kor 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1Cor 1:1").osis()).toEqual("1Cor.1.1")
p.include_apocrypha(false)
expect(p.parse("1 KWABASEKORINTE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KORINTE 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1 KOR 1:1").osis()).toEqual("1Cor.1.1")
expect(p.parse("1COR 1:1").osis()).toEqual("1Cor.1.1")
`
true
describe "Localized book Gal (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Gal (zu)", ->
`
expect(p.parse("KwabaseGalathiya 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("Galathiya 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("Gal 1:1").osis()).toEqual("Gal.1.1")
p.include_apocrypha(false)
expect(p.parse("KWABASEGALATHIYA 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GALATHIYA 1:1").osis()).toEqual("Gal.1.1")
expect(p.parse("GAL 1:1").osis()).toEqual("Gal.1.1")
`
true
describe "Localized book Eph (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Eph (zu)", ->
`
expect(p.parse("Kwabase-Efesu 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("Efesu 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("Eph 1:1").osis()).toEqual("Eph.1.1")
p.include_apocrypha(false)
expect(p.parse("KWABASE-EFESU 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EFESU 1:1").osis()).toEqual("Eph.1.1")
expect(p.parse("EPH 1:1").osis()).toEqual("Eph.1.1")
`
true
describe "Localized book Phil (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: PI:NAME:<NAME>END_PI (zu)", ->
`
expect(p.parse("KwabaseFilipi 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("Filipi 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("Phil 1:1").osis()).toEqual("Phil.1.1")
p.include_apocrypha(false)
expect(p.parse("KWABASEFILIPI 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("FILIPI 1:1").osis()).toEqual("Phil.1.1")
expect(p.parse("PHIL 1:1").osis()).toEqual("Phil.1.1")
`
true
describe "Localized book Col (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Col (zu)", ->
`
expect(p.parse("KwabaseKolose 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("Kolose 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("Col 1:1").osis()).toEqual("Col.1.1")
p.include_apocrypha(false)
expect(p.parse("KWABASEKOLOSE 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("KOLOSE 1:1").osis()).toEqual("Col.1.1")
expect(p.parse("COL 1:1").osis()).toEqual("Col.1.1")
`
true
describe "Localized book 2Thess (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Thess (zu)", ->
`
expect(p.parse("2 kwabaseThesalonika 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 Thesalonika 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2Thess 1:1").osis()).toEqual("2Thess.1.1")
p.include_apocrypha(false)
expect(p.parse("2 KWABASETHESALONIKA 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2 THESALONIKA 1:1").osis()).toEqual("2Thess.1.1")
expect(p.parse("2THESS 1:1").osis()).toEqual("2Thess.1.1")
`
true
describe "Localized book 1Thess (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Thess (zu)", ->
`
expect(p.parse("1 kwabaseThesalonika 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 Thesalonika 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1Thess 1:1").osis()).toEqual("1Thess.1.1")
p.include_apocrypha(false)
expect(p.parse("1 KWABASETHESALONIKA 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1 THESALONIKA 1:1").osis()).toEqual("1Thess.1.1")
expect(p.parse("1THESS 1:1").osis()).toEqual("1Thess.1.1")
`
true
describe "Localized book 2Tim (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Tim (zu)", ->
`
expect(p.parse("2 kuThimothewu 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 Thimothewu 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2Tim 1:1").osis()).toEqual("2Tim.1.1")
p.include_apocrypha(false)
expect(p.parse("2 KUTHIMOTHEWU 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2 THIMOTHEWU 1:1").osis()).toEqual("2Tim.1.1")
expect(p.parse("2TIM 1:1").osis()).toEqual("2Tim.1.1")
`
true
describe "Localized book 1Tim (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Tim (zu)", ->
`
expect(p.parse("1 kuThimothewu 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 Thimothewu 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1Tim 1:1").osis()).toEqual("1Tim.1.1")
p.include_apocrypha(false)
expect(p.parse("1 KUTHIMOTHEWU 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1 THIMOTHEWU 1:1").osis()).toEqual("1Tim.1.1")
expect(p.parse("1TIM 1:1").osis()).toEqual("1Tim.1.1")
`
true
describe "Localized book Titus (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Titus (zu)", ->
`
expect(p.parse("KuThithu 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("Titus 1:1").osis()).toEqual("Titus.1.1")
p.include_apocrypha(false)
expect(p.parse("KUTHITHU 1:1").osis()).toEqual("Titus.1.1")
expect(p.parse("TITUS 1:1").osis()).toEqual("Titus.1.1")
`
true
describe "Localized book Phlm (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Phlm (zu)", ->
`
expect(p.parse("KuFilemoni 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("Phlm 1:1").osis()).toEqual("Phlm.1.1")
p.include_apocrypha(false)
expect(p.parse("KUFILEMONI 1:1").osis()).toEqual("Phlm.1.1")
expect(p.parse("PHLM 1:1").osis()).toEqual("Phlm.1.1")
`
true
describe "Localized book Heb (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Heb (zu)", ->
`
expect(p.parse("KumaHeberu 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("Heb 1:1").osis()).toEqual("Heb.1.1")
p.include_apocrypha(false)
expect(p.parse("KUMAHEBERU 1:1").osis()).toEqual("Heb.1.1")
expect(p.parse("HEB 1:1").osis()).toEqual("Heb.1.1")
`
true
describe "Localized book Jas (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Jas (zu)", ->
`
expect(p.parse("EkaJakobe 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("Jakobe 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("Jas 1:1").osis()).toEqual("Jas.1.1")
p.include_apocrypha(false)
expect(p.parse("EKAJAKOBE 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("JAKOBE 1:1").osis()).toEqual("Jas.1.1")
expect(p.parse("JAS 1:1").osis()).toEqual("Jas.1.1")
`
true
describe "Localized book 2Pet (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Pet (zu)", ->
`
expect(p.parse("2 kaPetru 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 Petru 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2Pet 1:1").osis()).toEqual("2Pet.1.1")
p.include_apocrypha(false)
expect(p.parse("2 KAPETRU 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2 PETRU 1:1").osis()).toEqual("2Pet.1.1")
expect(p.parse("2PET 1:1").osis()).toEqual("2Pet.1.1")
`
true
describe "Localized book 1Pet (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Pet (zu)", ->
`
expect(p.parse("1 kaPetru 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 Petru 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1Pet 1:1").osis()).toEqual("1Pet.1.1")
p.include_apocrypha(false)
expect(p.parse("1 KAPETRU 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1 PETRU 1:1").osis()).toEqual("1Pet.1.1")
expect(p.parse("1PET 1:1").osis()).toEqual("1Pet.1.1")
`
true
describe "Localized book Jude (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Jude (zu)", ->
`
expect(p.parse("EkaJuda 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("Jude 1:1").osis()).toEqual("Jude.1.1")
p.include_apocrypha(false)
expect(p.parse("EKAJUDA 1:1").osis()).toEqual("Jude.1.1")
expect(p.parse("JUDE 1:1").osis()).toEqual("Jude.1.1")
`
true
describe "Localized book Tob (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Tob (zu)", ->
`
expect(p.parse("Tob 1:1").osis()).toEqual("Tob.1.1")
`
true
describe "Localized book Jdt (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Jdt (zu)", ->
`
expect(p.parse("Jdt 1:1").osis()).toEqual("Jdt.1.1")
`
true
describe "Localized book Bar (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Bar (zu)", ->
`
expect(p.parse("Bar 1:1").osis()).toEqual("Bar.1.1")
`
true
describe "Localized book Sus (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: Sus (zu)", ->
`
expect(p.parse("Sus 1:1").osis()).toEqual("Sus.1.1")
`
true
describe "Localized book 2Macc (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 2Macc (zu)", ->
`
expect(p.parse("2Macc 1:1").osis()).toEqual("2Macc.1.1")
`
true
describe "Localized book 3Macc (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 3Macc (zu)", ->
`
expect(p.parse("3Macc 1:1").osis()).toEqual("3Macc.1.1")
`
true
describe "Localized book 4Macc (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 4Macc (zu)", ->
`
expect(p.parse("4Macc 1:1").osis()).toEqual("4Macc.1.1")
`
true
describe "Localized book 1Macc (zu)", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore",book_sequence_strategy: "ignore",osis_compaction_strategy: "bc",captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should handle book: 1Macc (zu)", ->
`
expect(p.parse("1Macc 1:1").osis()).toEqual("1Macc.1.1")
`
true
describe "Miscellaneous tests", ->
p = {}
beforeEach ->
p = new bcv_parser
p.set_options book_alone_strategy: "ignore", book_sequence_strategy: "ignore", osis_compaction_strategy: "bc", captive_end_digits_strategy: "delete"
p.include_apocrypha true
it "should return the expected language", ->
expect(p.languages).toEqual ["zu"]
it "should handle ranges (zu)", ->
expect(p.parse("Titus 1:1 kuye 2").osis()).toEqual "Titus.1.1-Titus.1.2"
expect(p.parse("Matt 1kuye2").osis()).toEqual "Matt.1-Matt.2"
expect(p.parse("Phlm 2 KUYE 3").osis()).toEqual "Phlm.1.2-Phlm.1.3"
it "should handle chapters (zu)", ->
expect(p.parse("Titus 1:1, isahluko 2").osis()).toEqual "Titus.1.1,Titus.2"
expect(p.parse("Matt 3:4 ISAHLUKO 6").osis()).toEqual "Matt.3.4,Matt.6"
it "should handle verses (zu)", ->
expect(p.parse("Exod 1:1 ivesi 3").osis()).toEqual "Exod.1.1,Exod.1.3"
expect(p.parse("Phlm IVESI 6").osis()).toEqual "Phlm.1.6"
it "should handle 'and' (zu)", ->
expect(p.parse("Exod 1:1 futhi 3").osis()).toEqual "Exod.1.1,Exod.1.3"
expect(p.parse("Phlm 2 FUTHI 6").osis()).toEqual "Phlm.1.2,Phlm.1.6"
it "should handle titles (zu)", ->
expect(p.parse("Ps 3 title, 4:2, 5:title").osis()).toEqual "Ps.3.1,Ps.4.2,Ps.5.1"
expect(p.parse("PS 3 TITLE, 4:2, 5:TITLE").osis()).toEqual "Ps.3.1,Ps.4.2,Ps.5.1"
it "should handle 'ff' (zu)", ->
expect(p.parse("Rev 3ff, 4:2ff").osis()).toEqual "Rev.3-Rev.22,Rev.4.2-Rev.4.11"
expect(p.parse("REV 3 FF, 4:2 FF").osis()).toEqual "Rev.3-Rev.22,Rev.4.2-Rev.4.11"
it "should handle translations (zu)", ->
expect(p.parse("Lev 1 (zul59)").osis_and_translations()).toEqual [["Lev.1", "zul59"]]
expect(p.parse("lev 1 zul59").osis_and_translations()).toEqual [["Lev.1", "zul59"]]
it "should handle boundaries (zu)", ->
p.set_options {book_alone_strategy: "full"}
expect(p.parse("\u2014Matt\u2014").osis()).toEqual "Matt.1-Matt.28"
expect(p.parse("\u201cMatt 1:1\u201d").osis()).toEqual "Matt.1.1"
|
[
{
"context": "c Quo Module\n\n@namespace Quo\n@class Query\n\n@author Javier Jimenez Villar <javi@tapquo.com> || @soyjavi\n###\n\"use strict\"\n\nd",
"end": 80,
"score": 0.9998193383216858,
"start": 59,
"tag": "NAME",
"value": "Javier Jimenez Villar"
},
{
"context": " Quo\n@class Query\n\n@author Javier Jimenez Villar <javi@tapquo.com> || @soyjavi\n###\n\"use strict\"\n\ndo ($$ = Quo) ->\n\n",
"end": 97,
"score": 0.999936044216156,
"start": 82,
"tag": "EMAIL",
"value": "javi@tapquo.com"
},
{
"context": "@author Javier Jimenez Villar <javi@tapquo.com> || @soyjavi\n###\n\"use strict\"\n\ndo ($$ = Quo) ->\n\n PARENT_NODE",
"end": 110,
"score": 0.7908273935317993,
"start": 102,
"tag": "USERNAME",
"value": "@soyjavi"
}
] | source/quo.query.coffee | TNT-RoX/QuoJS | 1 | ###
Basic Quo Module
@namespace Quo
@class Query
@author Javier Jimenez Villar <javi@tapquo.com> || @soyjavi
###
"use strict"
do ($$ = Quo) ->
PARENT_NODE = "parentNode"
###
Get the descendants of each element in the current instance
@method find
@param {string} A string containing a selector expression to match elements against.
###
$$.fn.find = (selector) ->
if @length is 1
result = Quo.query @[0], selector
else
result = @map -> Quo.query(@, selector)
$$ result
###
Get the parent of each element in the current instance
@method parent
@param {string} A string containing a selector expression to match elements against.
###
$$.fn.parent = (selector) ->
ancestors = if selector then _findAncestors(@) else @instance(PARENT_NODE)
_filtered ancestors, selector
###
Get the children of each element in the current instance
@method children
@param {string} A string containing a selector expression to match elements against.
###
$$.fn.children = (selector) ->
elements = @map -> Array::slice.call @children
_filtered elements, selector
###
Get the siblings of each element in the current instance
@method siblings
@param {string} A string containing a selector expression to match elements against.
###
$$.fn.siblings = (selector) ->
elements = @map((index, element) ->
Array::slice.call(element.parentNode.children).filter (child) ->
child isnt element
)
_filtered elements, selector
###
Retrieve the DOM elements matched by the QuoJS object.
@method get
@param {number} [OPTIONAL] A zero-based integer indicating which element to retrieve
###
$$.fn.get = (index) ->
@[index] or null
###
Reduce the set of matched elements to the first in the set.
@method first
###
$$.fn.first = ->
$$ @[0]
###
Reduce the set of matched elements to the final one in the set.
@method last
###
$$.fn.last = ->
$$ @[@length - 1]
###
Reduce the set of matched elements to the final one in the set.
@method closest
@param {string} A string containing a selector expression to match elements against.
@param {instance} [OPTIONAL] A DOM element within which a matching element may be found.
###
$$.fn.closest = (selector, context) ->
node = @[0]
candidates = $$(selector)
node = null unless candidates.length
while node and candidates.indexOf(node) < 0
node = node isnt context and node isnt document and node.parentNode
$$ node
###
Get the immediately following sibling of each element in the instance.
@method next
###
$$.fn.next = ->
_getSibling.call @, "nextSibling"
###
Get the immediately preceding sibling of each element in the instance.
@method prev
###
$$.fn.prev = ->
_getSibling.call @, "previousSibling"
$$.fn.instance = (property) ->
@map -> @[property]
$$.fn.map = (callback) ->
$$.map @, (el, i) -> callback.call el, i, el
# ---------------------------------------------------------------------------
# Private Methods
# ---------------------------------------------------------------------------
_findAncestors = (nodes) ->
ancestors = []
while nodes.length > 0
nodes = $$.map(nodes, (node) ->
node = node.parentNode
if node isnt document and ancestors.indexOf(node) < 0
ancestors.push node
node
)
ancestors
_filtered = (nodes, selector) ->
if selector? then $$(nodes).filter(selector) else $$(nodes)
_getSibling = (command) ->
element = @[0][command]
element = element[command] while element and element.nodeType isnt 1
$$ element
| 212788 | ###
Basic Quo Module
@namespace Quo
@class Query
@author <NAME> <<EMAIL>> || @soyjavi
###
"use strict"
do ($$ = Quo) ->
PARENT_NODE = "parentNode"
###
Get the descendants of each element in the current instance
@method find
@param {string} A string containing a selector expression to match elements against.
###
$$.fn.find = (selector) ->
if @length is 1
result = Quo.query @[0], selector
else
result = @map -> Quo.query(@, selector)
$$ result
###
Get the parent of each element in the current instance
@method parent
@param {string} A string containing a selector expression to match elements against.
###
$$.fn.parent = (selector) ->
ancestors = if selector then _findAncestors(@) else @instance(PARENT_NODE)
_filtered ancestors, selector
###
Get the children of each element in the current instance
@method children
@param {string} A string containing a selector expression to match elements against.
###
$$.fn.children = (selector) ->
elements = @map -> Array::slice.call @children
_filtered elements, selector
###
Get the siblings of each element in the current instance
@method siblings
@param {string} A string containing a selector expression to match elements against.
###
$$.fn.siblings = (selector) ->
elements = @map((index, element) ->
Array::slice.call(element.parentNode.children).filter (child) ->
child isnt element
)
_filtered elements, selector
###
Retrieve the DOM elements matched by the QuoJS object.
@method get
@param {number} [OPTIONAL] A zero-based integer indicating which element to retrieve
###
$$.fn.get = (index) ->
@[index] or null
###
Reduce the set of matched elements to the first in the set.
@method first
###
$$.fn.first = ->
$$ @[0]
###
Reduce the set of matched elements to the final one in the set.
@method last
###
$$.fn.last = ->
$$ @[@length - 1]
###
Reduce the set of matched elements to the final one in the set.
@method closest
@param {string} A string containing a selector expression to match elements against.
@param {instance} [OPTIONAL] A DOM element within which a matching element may be found.
###
$$.fn.closest = (selector, context) ->
node = @[0]
candidates = $$(selector)
node = null unless candidates.length
while node and candidates.indexOf(node) < 0
node = node isnt context and node isnt document and node.parentNode
$$ node
###
Get the immediately following sibling of each element in the instance.
@method next
###
$$.fn.next = ->
_getSibling.call @, "nextSibling"
###
Get the immediately preceding sibling of each element in the instance.
@method prev
###
$$.fn.prev = ->
_getSibling.call @, "previousSibling"
$$.fn.instance = (property) ->
@map -> @[property]
$$.fn.map = (callback) ->
$$.map @, (el, i) -> callback.call el, i, el
# ---------------------------------------------------------------------------
# Private Methods
# ---------------------------------------------------------------------------
_findAncestors = (nodes) ->
ancestors = []
while nodes.length > 0
nodes = $$.map(nodes, (node) ->
node = node.parentNode
if node isnt document and ancestors.indexOf(node) < 0
ancestors.push node
node
)
ancestors
_filtered = (nodes, selector) ->
if selector? then $$(nodes).filter(selector) else $$(nodes)
_getSibling = (command) ->
element = @[0][command]
element = element[command] while element and element.nodeType isnt 1
$$ element
| true | ###
Basic Quo Module
@namespace Quo
@class Query
@author PI:NAME:<NAME>END_PI <PI:EMAIL:<EMAIL>END_PI> || @soyjavi
###
"use strict"
do ($$ = Quo) ->
PARENT_NODE = "parentNode"
###
Get the descendants of each element in the current instance
@method find
@param {string} A string containing a selector expression to match elements against.
###
$$.fn.find = (selector) ->
if @length is 1
result = Quo.query @[0], selector
else
result = @map -> Quo.query(@, selector)
$$ result
###
Get the parent of each element in the current instance
@method parent
@param {string} A string containing a selector expression to match elements against.
###
$$.fn.parent = (selector) ->
ancestors = if selector then _findAncestors(@) else @instance(PARENT_NODE)
_filtered ancestors, selector
###
Get the children of each element in the current instance
@method children
@param {string} A string containing a selector expression to match elements against.
###
$$.fn.children = (selector) ->
elements = @map -> Array::slice.call @children
_filtered elements, selector
###
Get the siblings of each element in the current instance
@method siblings
@param {string} A string containing a selector expression to match elements against.
###
$$.fn.siblings = (selector) ->
elements = @map((index, element) ->
Array::slice.call(element.parentNode.children).filter (child) ->
child isnt element
)
_filtered elements, selector
###
Retrieve the DOM elements matched by the QuoJS object.
@method get
@param {number} [OPTIONAL] A zero-based integer indicating which element to retrieve
###
$$.fn.get = (index) ->
@[index] or null
###
Reduce the set of matched elements to the first in the set.
@method first
###
$$.fn.first = ->
$$ @[0]
###
Reduce the set of matched elements to the final one in the set.
@method last
###
$$.fn.last = ->
$$ @[@length - 1]
###
Reduce the set of matched elements to the final one in the set.
@method closest
@param {string} A string containing a selector expression to match elements against.
@param {instance} [OPTIONAL] A DOM element within which a matching element may be found.
###
$$.fn.closest = (selector, context) ->
node = @[0]
candidates = $$(selector)
node = null unless candidates.length
while node and candidates.indexOf(node) < 0
node = node isnt context and node isnt document and node.parentNode
$$ node
###
Get the immediately following sibling of each element in the instance.
@method next
###
$$.fn.next = ->
_getSibling.call @, "nextSibling"
###
Get the immediately preceding sibling of each element in the instance.
@method prev
###
$$.fn.prev = ->
_getSibling.call @, "previousSibling"
$$.fn.instance = (property) ->
@map -> @[property]
$$.fn.map = (callback) ->
$$.map @, (el, i) -> callback.call el, i, el
# ---------------------------------------------------------------------------
# Private Methods
# ---------------------------------------------------------------------------
_findAncestors = (nodes) ->
ancestors = []
while nodes.length > 0
nodes = $$.map(nodes, (node) ->
node = node.parentNode
if node isnt document and ancestors.indexOf(node) < 0
ancestors.push node
node
)
ancestors
_filtered = (nodes, selector) ->
if selector? then $$(nodes).filter(selector) else $$(nodes)
_getSibling = (command) ->
element = @[0][command]
element = element[command] while element and element.nodeType isnt 1
$$ element
|
[
{
"context": "# Copyright (c) Konode. All rights reserved.\n# This source code is subje",
"end": 22,
"score": 0.9822193384170532,
"start": 16,
"tag": "NAME",
"value": "Konode"
}
] | src/clientFilePage/chart.coffee | LogicalOutcomes/KoNote | 1 | # Copyright (c) Konode. All rights reserved.
# This source code is subject to the terms of the Mozilla Public License, v. 2.0
# that can be found in the LICENSE file or at: http://mozilla.org/MPL/2.0
# Chart component that generates and interacts with C3 API from prop changes
Imm = require 'immutable'
Moment = require 'moment'
Config = require '../config'
load = (win) ->
$ = win.jQuery
C3 = win.c3
React = win.React
{PropTypes} = React
R = React.DOM
{TimestampFormat} = require('../persist/utils')
D3TimestampFormat = '%Y%m%dT%H%M%S%L%Z'
hiddenId = "-h-" # Fake/hidden datapoint's ID
minChartHeight = 400
palette = ['#66c088', '#43c5f1', '#5f707e', '#f06362', '#e5be31', '#9560ab', '#e883c0', '#ef8f39', '#42a795', '#999999', '#ccc5a8', '5569d8']
Chart = React.createFactory React.createClass
displayName: 'Chart'
mixins: [React.addons.PureRenderMixin]
# TODO: propTypes
getInitialState: -> {
eventRows: 0
hoveredMetric: null
}
# TODO: propTypes
render: ->
return R.div({
className: 'chartInner'
ref: 'chartInner'
},
ChartEventsStyling({
ref: (comp) => @chartEventsStyling = comp
selectedMetricIds: @props.selectedMetricIds
progEvents: @props.progEvents
eventRows: @state.eventRows
})
R.div({
id: 'eventInfo'
ref: 'eventInfo'
},
R.div({className: 'title'})
R.div({className: 'info'}
R.div({className: 'description'})
R.div({className: 'timeSpan'},
R.div({className: 'start'})
R.div({className: 'end'})
)
)
)
R.div({
className: "chart"
ref: 'chartDiv'
})
)
# TODO: Use componentWillReceiveProps here?
componentDidUpdate: (oldProps, oldState) ->
# Perform resize first so chart renders new data properly
@_refreshChartHeight()
# Update timeSpan?
sameTimeSpan = Imm.is @props.timeSpan, oldProps.timeSpan
unless sameTimeSpan
newMin = @props.timeSpan.get('start')
newMax = @props.timeSpan.get('end')
# C3 requires there's some kind of span (even if it's 1ms)
# todo check this
if newMin is newMax
newMax = newMax.clone().endOf 'day'
# avoid repeating x axis labels bug
if (newMax.diff(newMin, 'days') > 3)
@_chart.internal.config.axis_x_tick_format = '%b %d'
else
@_chart.internal.config.axis_x_tick_format = null
@_chart.axis.min {x: newMin}
@_chart.axis.max {x: newMax}
# Update selected metrics?
sameSelectedMetrics = Imm.is @props.selectedMetricIds, oldProps.selectedMetricIds
unless sameSelectedMetrics
@_refreshSelectedMetrics()
# Destroy and re-mount chart when values changed
# TODO: Make this more efficient
sameMetricValues = Imm.is @props.metricValues, oldProps.metricValues
if not sameMetricValues and @_chart?
console.info "Re-drawing chart..."
@_chart.destroy()
@componentDidMount()
# Update selected progEvents?
sameProgEvents = Imm.is @props.progEvents, oldProps.progEvents
unless sameProgEvents
@_refreshProgEvents()
# Update chart min/max range from changed xTicks?
sameXTicks = Imm.is @props.xTicks, oldProps.xTicks
unless sameXTicks
@_chart.axis.range {
min: {x: @props.xTicks.first()}
max: {x: @props.xTicks.last()}
}
# Update chart type?
sameChartType = Imm.is @props.chartType, oldProps.chartType
unless sameChartType
@_generateChart()
@_refreshSelectedMetrics()
@_refreshProgEvents()
componentDidMount: ->
@_generateChart()
@_refreshSelectedMetrics()
@_refreshProgEvents()
@_refreshChartHeight(true)
_generateChart: ->
console.log "Generating Chart...."
# Create a Map from metric ID to data series,
# where each data series is a sequence of [x, y] pairs
# Inject hidden datapoint, with value well outside y-span
metricValues = @props.metricValues.push Imm.Map {
id: hiddenId
timestamp: Moment().format(TimestampFormat)
value: -99999
}
dataSeries = metricValues
.groupBy (metricValue) -> # group by metric
return metricValue.get('id')
.map (metricValues) -> # for each data series
return metricValues.map (metricValue) -> # for each data point
# [x, y]
return [metricValue.get('timestamp'), metricValue.get('value')]
seriesNamesById = dataSeries.keySeq().map (metricId) =>
# Ignore hidden datapoint
metricName = if metricId is hiddenId
metricId
else
@props.metricsById.get(metricId).get('name')
return [metricId, metricName]
.fromEntrySeq().toMap()
# Create set to show which x maps to which y
xsMap = dataSeries.keySeq()
.map (seriesId) ->
return [seriesId, '?x-' + seriesId]
.fromEntrySeq().toMap()
dataSeriesNames = dataSeries.keySeq()
.map (seriesId) =>
return [seriesId, seriesNamesById.get(seriesId)]
.fromEntrySeq().toMap()
dataSeries = dataSeries.entrySeq().flatMap ([seriesId, dataPoints]) ->
# Ensure ordered by earliest-latest
orderedDataPoints = dataPoints
.sortBy ([x, y]) -> x
xValues = Imm.List(['?x-' + seriesId]).concat(
orderedDataPoints.map ([x, y]) -> x
)
yValues = Imm.List([seriesId]).concat(
orderedDataPoints.map ([x, y]) -> y
)
return Imm.List([xValues, yValues])
scaledDataSeries = dataSeries.map (series) ->
# Scaling only applies to y series
return series if series.first()[0] is '?'
# Ignore hidden datapoint
return series if series.first() is hiddenId
# Filter out id's to figure out min & max
values = series.flatten()
.filterNot (y) -> isNaN(y)
.map (val) -> return Number(val)
# Figure out min and max series values
# Min is enforced as 0 for better visual proportions
# unless lowest value is negative
lowestValue = values.min()
hasNegativeValue = lowestValue < 0
min = if hasNegativeValue then lowestValue else 0
max = values.max()
# Center the line vertically if constant value
if min is max
min -= 1
max += 1
scaleFactor = max - min
# Map scaleFactor on to numerical values
return series.map (dataPoint) ->
unless isNaN(dataPoint)
(dataPoint - min) / scaleFactor
else
dataPoint
# Min/Max x dates
#minDate = @props.xTicks.first()
#maxDate = @props.xTicks.last()
minDate = @props.timeSpan.get('start')
maxDate = @props.timeSpan.get('end')
# YEAR LINES
# Build Imm.List of years and timestamps to matching
newYearLines = Imm.List()
firstYear = minDate.year()
lastYear = maxDate.year()
# Don't bother if only 1 year (doesn't go past calendar year)
unless firstYear is lastYear
newYearLines = Imm.List([firstYear..lastYear]).map (year) =>
return {
value: Moment().year(year).startOf('year')
text: year
position: 'middle'
class: 'yearLine'
}
# Generate and bind the chart
@_chart = C3.generate {
bindto: @refs.chartDiv
grid: {
x: {
lines: newYearLines.toJS()
}
}
axis: {
x: {
min: minDate
max: maxDate
type: 'timeseries'
tick: {
fit: false
format: '%b %d'
}
}
y: {
show: false
max: 1
min: 0
}
}
transition: {
duration: 0
}
data: {
type: @props.chartType
hide: true
xFormat: D3TimestampFormat
columns: scaledDataSeries.toJS()
xs: xsMap.toJS()
names: dataSeriesNames.toJS()
classes: {
hiddenId: 'hiddenId'
}
# Get/destroy hovered metric point data in local memory
onmouseover: (d) => @hoveredMetric = d
onmouseout: (d) => @hoveredMetric = null if @hoveredMetric? and @hoveredMetric.id is d.id
}
spline: {
interpolation: {
type: 'monotone'
}
}
point: {
r: 5
}
tooltip: {
format: {
value: (value, ratio, id, index) ->
actualValue = dataSeries
.find (series) -> series.contains id
.get(index + 1)
return actualValue
title: (timestamp) ->
return Moment(timestamp).format(Config.longTimestampFormat)
}
# Customization from original c3 tooltip DOM code: http://stackoverflow.com/a/25750639
contents: (metrics, defaultTitleFormat, defaultValueFormat, color) =>
# Lets us distinguish @_chart's local `this` (->) methods from Chart's `this` (=>)
# http://stackoverflow.com/a/15422322
$$ = ` this `
config = $$.config
titleFormat = config.tooltip_format_title or defaultTitleFormat
nameFormat = config.tooltip_format_name or (name) -> name
valueFormat = config.tooltip_format_value or defaultValueFormat
text = undefined
title = undefined
value = undefined
name = undefined
bgcolor = undefined
tableContents = metrics
.sort (a, b) -> b.value - a.value # Sort by scaled value (desc)
.forEach (currentMetric) =>
# Is this metric is currently being hovered over?
isHoveredMetric = @hoveredMetric? and (
@hoveredMetric.id is currentMetric.id or # Is currently hovered (top layer)
Math.abs(@hoveredMetric.value - currentMetric.value) < 0.025 # Is hiding behind hovered metric
)
# Ignore empty values? TODO: Check this
if !(currentMetric and (currentMetric.value or currentMetric.value == 0))
return
if !text
title = if titleFormat then titleFormat(currentMetric.x) else currentMetric.x
text = '<table class=\'' + $$.CLASS.tooltip + '\'>' + (if title or title == 0 then '<tr><th colspan=\'2\'>' + title + '</th></tr>' else '')
name = nameFormat(currentMetric.name)
value = valueFormat(currentMetric.value, currentMetric.ratio, currentMetric.id, currentMetric.index)
hoverClass = if isHoveredMetric then 'isHovered' else ''
bgcolor = if $$.levelColor then $$.levelColor(currentMetric.value) else color(currentMetric.id)
text += '<tr class=\'' + $$.CLASS.tooltipName + '-' + currentMetric.id + ' ' + hoverClass + '\'>'
text += '<td class=\'name\'><span style=\'background-color:' + bgcolor + '\'></span>' + name + '</td>'
text += '<td class=\'value\'>' + value + '</td>'
text += '</tr>'
# TODO: Show definitions for other metrics w/ overlapping regular or scaled values
if isHoveredMetric
metricDefinition = @props.metricsById.getIn [currentMetric.id, 'definition']
# Truncate definition to 100ch + ...
if metricDefinition.length > 100
metricDefinition = metricDefinition.substring(0, 100) + "..."
text += '<tr class=\'' + $$.CLASS.tooltipName + '-' + currentMetric.id + ' + \'>'
text += '<td class=\'definition\' colspan=\'2\'>' + metricDefinition + '</td>'
text += '</tr>'
return text
text += '</table>'
return text
}
item: {
onclick: (id) -> return false
}
padding: {
left: 25
right: 25
}
size: {
height: @_calculateChartHeight()
}
legend: {
show: false
}
onresize: @_refreshChartHeight # Manually calculate chart height
}
_calculateChartHeight: ->
fullHeight = $(@refs.chartInner).height() - 20
# Half-height for only metrics/events
if @props.selectedMetricIds.isEmpty() or @props.progEvents.isEmpty()
# Can return minimum height instead
halfHeight = fullHeight / 2
if halfHeight > minChartHeight
return Math.floor halfHeight
else
return minChartHeight
return fullHeight
_refreshChartHeight: (isForced = false) ->
return unless @_chart?
height = @_calculateChartHeight()
# Skip c3 update if is current height
if not isForced and height is $(@refs.chartDiv).height()
return
# Update event regions' v-positioning if necessary
if not @props.progEvents.isEmpty() and @chartEventsStyling?
@chartEventsStyling.updateChartHeight(height)
# Proceed with resizing the chart itself
@_chart.resize {height}
_refreshSelectedMetrics: ->
@_chart.hide(null, {withLegend: true})
unless @props.selectedMetricIds.size > 0
# for events to work
@_chart.show(hiddenId)
return
@props.selectedMetricIds.forEach (metricId) =>
# choose metric color from palette
# todo: move to analysis tab to save a render?
if palette.includes(@_chart.data.colors()[metricId])
# do not change the color if already set for this metric
return
else
# assign a color from the palette that is not already in use
for index, color of palette
if Object.values(@_chart.data.colors()).includes color
else
@_chart.data.colors({"#{metricId}": palette[index]})
return
@_chart.show(@props.selectedMetricIds.toJS(), {withLegend: true})
# fire metric colors back up to analysis tab
@props.updateMetricColors Imm.Map(@_chart.data.colors())
_refreshProgEvents: ->
console.log "Refreshing progEvents..."
# Generate c3 regions array
progEventRegions = @_generateProgEventRegions()
# Flush and re-apply regions to c3 chart
@_chart.regions.remove()
# C3 Regions have some kind of animation attached, which
# messes up remove/add
setTimeout(=>
@_chart.regions progEventRegions.toJS()
@_attachKeyBindings()
, 500)
_generateProgEventRegions: ->
# Build Imm.List of region objects
progEventRegions = @props.progEvents
.map (progEvent) =>
eventRegion = {
start: @_toUnixMs progEvent.get('startTimestamp')
class: "progEventRange #{progEvent.get('id')} typeId-"
}
eventRegion['class'] += if progEvent.get('typeId')
progEvent.get('typeId')
else
"null" # typeId-null is how we know it doesn't have an eventType
if Moment(progEvent.get('endTimestamp'), TimestampFormat).isValid()
eventRegion.end = @_toUnixMs progEvent.get('endTimestamp')
# TODO: Classify singular event
return eventRegion
# Sort regions in order of start timestamp
sortedEvents = progEventRegions.sortBy (event) => event['start']
# Setting up vars for row sorting
remainingEvents = sortedEvents
eventRows = Imm.List()
progEvents = Imm.List()
rowIndex = 0
# Process progEvents for regions while remaining events
while remainingEvents.size > 0
# Init new eventRow
eventRows = eventRows.push Imm.List()
# Loop through events, pluck any for the given row with non-conflicting dates
remainingEvents.forEach (thisEvent) =>
thisRow = eventRows.get(rowIndex)
# Can't rely on forEach index, because .delete() offsets it
liveIndex = remainingEvents.indexOf(thisEvent)
# Let's pluck this progEvent if no rows or timestamps don't conflict
if thisRow.size is 0 or (
not thisRow.last().get('end')? or
thisEvent.start >= thisRow.last().get('end')
)
# Append class with row number
progEvent = Imm.fromJS(thisEvent)
newClass = "#{progEvent.get('class')} row#{rowIndex}"
# Convert single-point event date to a short span
if not progEvent.get('end')
startDate = Moment progEvent.get('start')
progEvent = progEvent.set 'end', startDate.clone().add(6, 'hours')
newClass = newClass + " singlePoint"
# Update class (needs to be 'class' for C3js)
progEvent = progEvent.set('class', newClass)
# Update eventRows, remove from remainingEvents
updatedRow = eventRows.get(rowIndex).push progEvent
eventRows = eventRows.set rowIndex, updatedRow
remainingEvents = remainingEvents.delete(liveIndex)
# Concat to final (flat) output for c3
progEvents = progEvents.concat eventRows.get(rowIndex)
rowIndex++
# Determine regions height
chartHeightY = if eventRows.isEmpty() then 1 else 2
# Metrics can be bigger when only 1 progEvent row
if eventRows.size is 1
chartHeightY = 1.5
@setState {eventRows: eventRows.size}
@_chart.axis.max {
y: chartHeightY
}
return progEvents
_attachKeyBindings: ->
# TODO: Make sure listeners are removed when componentWillUnmount
# Find our hidden eventInfo box
eventInfo = $('#eventInfo')
dateFormat = 'Do MMM [at] h:mm A'
@props.progEvents.forEach (progEvent) =>
# Attach hover binding to progEvent region
$('.' + progEvent.get('id')).hover((event) =>
description = progEvent.get('description') or "(no description)"
if description.length > 1000
description = description.substring(0, 2000) + " . . ."
title = progEvent.get('title')
# Tack on eventType to title
# TODO: Do this earlier on, to save redundancy
if progEvent.get('typeId')
eventType = @props.eventTypes.find (eventType) -> eventType.get('id') is progEvent.get('typeId')
eventTypeName = eventType.get('name')
title = if title then "#{title} (#{eventTypeName})" else eventTypeName
eventInfo.addClass('show')
eventInfo.find('.title').text title
eventInfo.find('.description').text(description)
startTimestamp = new Moment(progEvent.get('startTimestamp'), TimestampFormat)
endTimestamp = new Moment(progEvent.get('endTimestamp'), TimestampFormat)
startText = startTimestamp.format(dateFormat)
endText = if endTimestamp.isValid() then endTimestamp.format(dateFormat) else null
if endText?
startText = "From: " + startText
endText = "Until: " + endText
eventInfo.find('.start').text startText
eventInfo.find('.end').text endText
# Make eventInfo follow the mouse
$(win.document).on('mousemove', (event) ->
eventInfo.css 'top', event.clientY + 25
eventInfo.css 'left', event.clientX
)
, =>
# Hide and unbind!
eventInfo.removeClass('show')
$(win.document).off('mousemove')
)
rect = $('.' + progEvent.get('id')).find('rect')[0]
# Fill progEvent region with eventType color if exists
if progEvent.get('typeId') and not @props.eventTypes.isEmpty()
eventType = @props.eventTypes
.find (type) -> type.get('id') is progEvent.get('typeId')
$(rect).attr({
style:
"fill: #{eventType.get('colorKeyHex')} !important;
stroke: #{eventType.get('colorKeyHex')} !important;"
})
else
# At least clear it for non-typed events
$(rect).attr({style: ''})
_toUnixMs: (timestamp) ->
# Converts to unix ms
return Moment(timestamp, TimestampFormat).valueOf()
ChartEventsStyling = React.createFactory React.createClass
displayName: 'ChartEventsStyling'
propTypes: {
eventRows: PropTypes.number.isRequired
progEvents: PropTypes.instanceOf(Imm.List).isRequired
selectedMetricIds: PropTypes.instanceOf(Imm.Set).isRequired
}
getInitialState: -> {
chartHeight: null
}
updateChartHeight: (chartHeight) ->
# This gets called alongside @_chart.resize
@setState {chartHeight}
render: ->
# Calculate scaled height of each region (larger if no metrics)
scaleFactor = if @props.selectedMetricIds.isEmpty() then 0.65 else 0.3
scaleY = (scaleFactor / @props.eventRows).toFixed(2)
R.style({},
(Imm.List([0..@props.eventRows]).map (rowNumber) =>
translateY = rowNumber * @state.chartHeight
return ".chart .c3-regions .c3-region.row#{rowNumber} > rect {
transform: scaleY(#{scaleY}) translateY(#{translateY}px) !important
}"
)
)
return Chart
module.exports = {load}
| 103992 | # Copyright (c) <NAME>. All rights reserved.
# This source code is subject to the terms of the Mozilla Public License, v. 2.0
# that can be found in the LICENSE file or at: http://mozilla.org/MPL/2.0
# Chart component that generates and interacts with C3 API from prop changes
Imm = require 'immutable'
Moment = require 'moment'
Config = require '../config'
load = (win) ->
$ = win.jQuery
C3 = win.c3
React = win.React
{PropTypes} = React
R = React.DOM
{TimestampFormat} = require('../persist/utils')
D3TimestampFormat = '%Y%m%dT%H%M%S%L%Z'
hiddenId = "-h-" # Fake/hidden datapoint's ID
minChartHeight = 400
palette = ['#66c088', '#43c5f1', '#5f707e', '#f06362', '#e5be31', '#9560ab', '#e883c0', '#ef8f39', '#42a795', '#999999', '#ccc5a8', '5569d8']
Chart = React.createFactory React.createClass
displayName: 'Chart'
mixins: [React.addons.PureRenderMixin]
# TODO: propTypes
getInitialState: -> {
eventRows: 0
hoveredMetric: null
}
# TODO: propTypes
render: ->
return R.div({
className: 'chartInner'
ref: 'chartInner'
},
ChartEventsStyling({
ref: (comp) => @chartEventsStyling = comp
selectedMetricIds: @props.selectedMetricIds
progEvents: @props.progEvents
eventRows: @state.eventRows
})
R.div({
id: 'eventInfo'
ref: 'eventInfo'
},
R.div({className: 'title'})
R.div({className: 'info'}
R.div({className: 'description'})
R.div({className: 'timeSpan'},
R.div({className: 'start'})
R.div({className: 'end'})
)
)
)
R.div({
className: "chart"
ref: 'chartDiv'
})
)
# TODO: Use componentWillReceiveProps here?
componentDidUpdate: (oldProps, oldState) ->
# Perform resize first so chart renders new data properly
@_refreshChartHeight()
# Update timeSpan?
sameTimeSpan = Imm.is @props.timeSpan, oldProps.timeSpan
unless sameTimeSpan
newMin = @props.timeSpan.get('start')
newMax = @props.timeSpan.get('end')
# C3 requires there's some kind of span (even if it's 1ms)
# todo check this
if newMin is newMax
newMax = newMax.clone().endOf 'day'
# avoid repeating x axis labels bug
if (newMax.diff(newMin, 'days') > 3)
@_chart.internal.config.axis_x_tick_format = '%b %d'
else
@_chart.internal.config.axis_x_tick_format = null
@_chart.axis.min {x: newMin}
@_chart.axis.max {x: newMax}
# Update selected metrics?
sameSelectedMetrics = Imm.is @props.selectedMetricIds, oldProps.selectedMetricIds
unless sameSelectedMetrics
@_refreshSelectedMetrics()
# Destroy and re-mount chart when values changed
# TODO: Make this more efficient
sameMetricValues = Imm.is @props.metricValues, oldProps.metricValues
if not sameMetricValues and @_chart?
console.info "Re-drawing chart..."
@_chart.destroy()
@componentDidMount()
# Update selected progEvents?
sameProgEvents = Imm.is @props.progEvents, oldProps.progEvents
unless sameProgEvents
@_refreshProgEvents()
# Update chart min/max range from changed xTicks?
sameXTicks = Imm.is @props.xTicks, oldProps.xTicks
unless sameXTicks
@_chart.axis.range {
min: {x: @props.xTicks.first()}
max: {x: @props.xTicks.last()}
}
# Update chart type?
sameChartType = Imm.is @props.chartType, oldProps.chartType
unless sameChartType
@_generateChart()
@_refreshSelectedMetrics()
@_refreshProgEvents()
componentDidMount: ->
@_generateChart()
@_refreshSelectedMetrics()
@_refreshProgEvents()
@_refreshChartHeight(true)
_generateChart: ->
console.log "Generating Chart...."
# Create a Map from metric ID to data series,
# where each data series is a sequence of [x, y] pairs
# Inject hidden datapoint, with value well outside y-span
metricValues = @props.metricValues.push Imm.Map {
id: hiddenId
timestamp: Moment().format(TimestampFormat)
value: -99999
}
dataSeries = metricValues
.groupBy (metricValue) -> # group by metric
return metricValue.get('id')
.map (metricValues) -> # for each data series
return metricValues.map (metricValue) -> # for each data point
# [x, y]
return [metricValue.get('timestamp'), metricValue.get('value')]
seriesNamesById = dataSeries.keySeq().map (metricId) =>
# Ignore hidden datapoint
metricName = if metricId is hiddenId
metricId
else
@props.metricsById.get(metricId).get('name')
return [metricId, metricName]
.fromEntrySeq().toMap()
# Create set to show which x maps to which y
xsMap = dataSeries.keySeq()
.map (seriesId) ->
return [seriesId, '?x-' + seriesId]
.fromEntrySeq().toMap()
dataSeriesNames = dataSeries.keySeq()
.map (seriesId) =>
return [seriesId, seriesNamesById.get(seriesId)]
.fromEntrySeq().toMap()
dataSeries = dataSeries.entrySeq().flatMap ([seriesId, dataPoints]) ->
# Ensure ordered by earliest-latest
orderedDataPoints = dataPoints
.sortBy ([x, y]) -> x
xValues = Imm.List(['?x-' + seriesId]).concat(
orderedDataPoints.map ([x, y]) -> x
)
yValues = Imm.List([seriesId]).concat(
orderedDataPoints.map ([x, y]) -> y
)
return Imm.List([xValues, yValues])
scaledDataSeries = dataSeries.map (series) ->
# Scaling only applies to y series
return series if series.first()[0] is '?'
# Ignore hidden datapoint
return series if series.first() is hiddenId
# Filter out id's to figure out min & max
values = series.flatten()
.filterNot (y) -> isNaN(y)
.map (val) -> return Number(val)
# Figure out min and max series values
# Min is enforced as 0 for better visual proportions
# unless lowest value is negative
lowestValue = values.min()
hasNegativeValue = lowestValue < 0
min = if hasNegativeValue then lowestValue else 0
max = values.max()
# Center the line vertically if constant value
if min is max
min -= 1
max += 1
scaleFactor = max - min
# Map scaleFactor on to numerical values
return series.map (dataPoint) ->
unless isNaN(dataPoint)
(dataPoint - min) / scaleFactor
else
dataPoint
# Min/Max x dates
#minDate = @props.xTicks.first()
#maxDate = @props.xTicks.last()
minDate = @props.timeSpan.get('start')
maxDate = @props.timeSpan.get('end')
# YEAR LINES
# Build Imm.List of years and timestamps to matching
newYearLines = Imm.List()
firstYear = minDate.year()
lastYear = maxDate.year()
# Don't bother if only 1 year (doesn't go past calendar year)
unless firstYear is lastYear
newYearLines = Imm.List([firstYear..lastYear]).map (year) =>
return {
value: Moment().year(year).startOf('year')
text: year
position: 'middle'
class: 'yearLine'
}
# Generate and bind the chart
@_chart = C3.generate {
bindto: @refs.chartDiv
grid: {
x: {
lines: newYearLines.toJS()
}
}
axis: {
x: {
min: minDate
max: maxDate
type: 'timeseries'
tick: {
fit: false
format: '%b %d'
}
}
y: {
show: false
max: 1
min: 0
}
}
transition: {
duration: 0
}
data: {
type: @props.chartType
hide: true
xFormat: D3TimestampFormat
columns: scaledDataSeries.toJS()
xs: xsMap.toJS()
names: dataSeriesNames.toJS()
classes: {
hiddenId: 'hiddenId'
}
# Get/destroy hovered metric point data in local memory
onmouseover: (d) => @hoveredMetric = d
onmouseout: (d) => @hoveredMetric = null if @hoveredMetric? and @hoveredMetric.id is d.id
}
spline: {
interpolation: {
type: 'monotone'
}
}
point: {
r: 5
}
tooltip: {
format: {
value: (value, ratio, id, index) ->
actualValue = dataSeries
.find (series) -> series.contains id
.get(index + 1)
return actualValue
title: (timestamp) ->
return Moment(timestamp).format(Config.longTimestampFormat)
}
# Customization from original c3 tooltip DOM code: http://stackoverflow.com/a/25750639
contents: (metrics, defaultTitleFormat, defaultValueFormat, color) =>
# Lets us distinguish @_chart's local `this` (->) methods from Chart's `this` (=>)
# http://stackoverflow.com/a/15422322
$$ = ` this `
config = $$.config
titleFormat = config.tooltip_format_title or defaultTitleFormat
nameFormat = config.tooltip_format_name or (name) -> name
valueFormat = config.tooltip_format_value or defaultValueFormat
text = undefined
title = undefined
value = undefined
name = undefined
bgcolor = undefined
tableContents = metrics
.sort (a, b) -> b.value - a.value # Sort by scaled value (desc)
.forEach (currentMetric) =>
# Is this metric is currently being hovered over?
isHoveredMetric = @hoveredMetric? and (
@hoveredMetric.id is currentMetric.id or # Is currently hovered (top layer)
Math.abs(@hoveredMetric.value - currentMetric.value) < 0.025 # Is hiding behind hovered metric
)
# Ignore empty values? TODO: Check this
if !(currentMetric and (currentMetric.value or currentMetric.value == 0))
return
if !text
title = if titleFormat then titleFormat(currentMetric.x) else currentMetric.x
text = '<table class=\'' + $$.CLASS.tooltip + '\'>' + (if title or title == 0 then '<tr><th colspan=\'2\'>' + title + '</th></tr>' else '')
name = nameFormat(currentMetric.name)
value = valueFormat(currentMetric.value, currentMetric.ratio, currentMetric.id, currentMetric.index)
hoverClass = if isHoveredMetric then 'isHovered' else ''
bgcolor = if $$.levelColor then $$.levelColor(currentMetric.value) else color(currentMetric.id)
text += '<tr class=\'' + $$.CLASS.tooltipName + '-' + currentMetric.id + ' ' + hoverClass + '\'>'
text += '<td class=\'name\'><span style=\'background-color:' + bgcolor + '\'></span>' + name + '</td>'
text += '<td class=\'value\'>' + value + '</td>'
text += '</tr>'
# TODO: Show definitions for other metrics w/ overlapping regular or scaled values
if isHoveredMetric
metricDefinition = @props.metricsById.getIn [currentMetric.id, 'definition']
# Truncate definition to 100ch + ...
if metricDefinition.length > 100
metricDefinition = metricDefinition.substring(0, 100) + "..."
text += '<tr class=\'' + $$.CLASS.tooltipName + '-' + currentMetric.id + ' + \'>'
text += '<td class=\'definition\' colspan=\'2\'>' + metricDefinition + '</td>'
text += '</tr>'
return text
text += '</table>'
return text
}
item: {
onclick: (id) -> return false
}
padding: {
left: 25
right: 25
}
size: {
height: @_calculateChartHeight()
}
legend: {
show: false
}
onresize: @_refreshChartHeight # Manually calculate chart height
}
_calculateChartHeight: ->
fullHeight = $(@refs.chartInner).height() - 20
# Half-height for only metrics/events
if @props.selectedMetricIds.isEmpty() or @props.progEvents.isEmpty()
# Can return minimum height instead
halfHeight = fullHeight / 2
if halfHeight > minChartHeight
return Math.floor halfHeight
else
return minChartHeight
return fullHeight
_refreshChartHeight: (isForced = false) ->
return unless @_chart?
height = @_calculateChartHeight()
# Skip c3 update if is current height
if not isForced and height is $(@refs.chartDiv).height()
return
# Update event regions' v-positioning if necessary
if not @props.progEvents.isEmpty() and @chartEventsStyling?
@chartEventsStyling.updateChartHeight(height)
# Proceed with resizing the chart itself
@_chart.resize {height}
_refreshSelectedMetrics: ->
@_chart.hide(null, {withLegend: true})
unless @props.selectedMetricIds.size > 0
# for events to work
@_chart.show(hiddenId)
return
@props.selectedMetricIds.forEach (metricId) =>
# choose metric color from palette
# todo: move to analysis tab to save a render?
if palette.includes(@_chart.data.colors()[metricId])
# do not change the color if already set for this metric
return
else
# assign a color from the palette that is not already in use
for index, color of palette
if Object.values(@_chart.data.colors()).includes color
else
@_chart.data.colors({"#{metricId}": palette[index]})
return
@_chart.show(@props.selectedMetricIds.toJS(), {withLegend: true})
# fire metric colors back up to analysis tab
@props.updateMetricColors Imm.Map(@_chart.data.colors())
_refreshProgEvents: ->
console.log "Refreshing progEvents..."
# Generate c3 regions array
progEventRegions = @_generateProgEventRegions()
# Flush and re-apply regions to c3 chart
@_chart.regions.remove()
# C3 Regions have some kind of animation attached, which
# messes up remove/add
setTimeout(=>
@_chart.regions progEventRegions.toJS()
@_attachKeyBindings()
, 500)
_generateProgEventRegions: ->
# Build Imm.List of region objects
progEventRegions = @props.progEvents
.map (progEvent) =>
eventRegion = {
start: @_toUnixMs progEvent.get('startTimestamp')
class: "progEventRange #{progEvent.get('id')} typeId-"
}
eventRegion['class'] += if progEvent.get('typeId')
progEvent.get('typeId')
else
"null" # typeId-null is how we know it doesn't have an eventType
if Moment(progEvent.get('endTimestamp'), TimestampFormat).isValid()
eventRegion.end = @_toUnixMs progEvent.get('endTimestamp')
# TODO: Classify singular event
return eventRegion
# Sort regions in order of start timestamp
sortedEvents = progEventRegions.sortBy (event) => event['start']
# Setting up vars for row sorting
remainingEvents = sortedEvents
eventRows = Imm.List()
progEvents = Imm.List()
rowIndex = 0
# Process progEvents for regions while remaining events
while remainingEvents.size > 0
# Init new eventRow
eventRows = eventRows.push Imm.List()
# Loop through events, pluck any for the given row with non-conflicting dates
remainingEvents.forEach (thisEvent) =>
thisRow = eventRows.get(rowIndex)
# Can't rely on forEach index, because .delete() offsets it
liveIndex = remainingEvents.indexOf(thisEvent)
# Let's pluck this progEvent if no rows or timestamps don't conflict
if thisRow.size is 0 or (
not thisRow.last().get('end')? or
thisEvent.start >= thisRow.last().get('end')
)
# Append class with row number
progEvent = Imm.fromJS(thisEvent)
newClass = "#{progEvent.get('class')} row#{rowIndex}"
# Convert single-point event date to a short span
if not progEvent.get('end')
startDate = Moment progEvent.get('start')
progEvent = progEvent.set 'end', startDate.clone().add(6, 'hours')
newClass = newClass + " singlePoint"
# Update class (needs to be 'class' for C3js)
progEvent = progEvent.set('class', newClass)
# Update eventRows, remove from remainingEvents
updatedRow = eventRows.get(rowIndex).push progEvent
eventRows = eventRows.set rowIndex, updatedRow
remainingEvents = remainingEvents.delete(liveIndex)
# Concat to final (flat) output for c3
progEvents = progEvents.concat eventRows.get(rowIndex)
rowIndex++
# Determine regions height
chartHeightY = if eventRows.isEmpty() then 1 else 2
# Metrics can be bigger when only 1 progEvent row
if eventRows.size is 1
chartHeightY = 1.5
@setState {eventRows: eventRows.size}
@_chart.axis.max {
y: chartHeightY
}
return progEvents
_attachKeyBindings: ->
# TODO: Make sure listeners are removed when componentWillUnmount
# Find our hidden eventInfo box
eventInfo = $('#eventInfo')
dateFormat = 'Do MMM [at] h:mm A'
@props.progEvents.forEach (progEvent) =>
# Attach hover binding to progEvent region
$('.' + progEvent.get('id')).hover((event) =>
description = progEvent.get('description') or "(no description)"
if description.length > 1000
description = description.substring(0, 2000) + " . . ."
title = progEvent.get('title')
# Tack on eventType to title
# TODO: Do this earlier on, to save redundancy
if progEvent.get('typeId')
eventType = @props.eventTypes.find (eventType) -> eventType.get('id') is progEvent.get('typeId')
eventTypeName = eventType.get('name')
title = if title then "#{title} (#{eventTypeName})" else eventTypeName
eventInfo.addClass('show')
eventInfo.find('.title').text title
eventInfo.find('.description').text(description)
startTimestamp = new Moment(progEvent.get('startTimestamp'), TimestampFormat)
endTimestamp = new Moment(progEvent.get('endTimestamp'), TimestampFormat)
startText = startTimestamp.format(dateFormat)
endText = if endTimestamp.isValid() then endTimestamp.format(dateFormat) else null
if endText?
startText = "From: " + startText
endText = "Until: " + endText
eventInfo.find('.start').text startText
eventInfo.find('.end').text endText
# Make eventInfo follow the mouse
$(win.document).on('mousemove', (event) ->
eventInfo.css 'top', event.clientY + 25
eventInfo.css 'left', event.clientX
)
, =>
# Hide and unbind!
eventInfo.removeClass('show')
$(win.document).off('mousemove')
)
rect = $('.' + progEvent.get('id')).find('rect')[0]
# Fill progEvent region with eventType color if exists
if progEvent.get('typeId') and not @props.eventTypes.isEmpty()
eventType = @props.eventTypes
.find (type) -> type.get('id') is progEvent.get('typeId')
$(rect).attr({
style:
"fill: #{eventType.get('colorKeyHex')} !important;
stroke: #{eventType.get('colorKeyHex')} !important;"
})
else
# At least clear it for non-typed events
$(rect).attr({style: ''})
_toUnixMs: (timestamp) ->
# Converts to unix ms
return Moment(timestamp, TimestampFormat).valueOf()
ChartEventsStyling = React.createFactory React.createClass
displayName: 'ChartEventsStyling'
propTypes: {
eventRows: PropTypes.number.isRequired
progEvents: PropTypes.instanceOf(Imm.List).isRequired
selectedMetricIds: PropTypes.instanceOf(Imm.Set).isRequired
}
getInitialState: -> {
chartHeight: null
}
updateChartHeight: (chartHeight) ->
# This gets called alongside @_chart.resize
@setState {chartHeight}
render: ->
# Calculate scaled height of each region (larger if no metrics)
scaleFactor = if @props.selectedMetricIds.isEmpty() then 0.65 else 0.3
scaleY = (scaleFactor / @props.eventRows).toFixed(2)
R.style({},
(Imm.List([0..@props.eventRows]).map (rowNumber) =>
translateY = rowNumber * @state.chartHeight
return ".chart .c3-regions .c3-region.row#{rowNumber} > rect {
transform: scaleY(#{scaleY}) translateY(#{translateY}px) !important
}"
)
)
return Chart
module.exports = {load}
| true | # Copyright (c) PI:NAME:<NAME>END_PI. All rights reserved.
# This source code is subject to the terms of the Mozilla Public License, v. 2.0
# that can be found in the LICENSE file or at: http://mozilla.org/MPL/2.0
# Chart component that generates and interacts with C3 API from prop changes
Imm = require 'immutable'
Moment = require 'moment'
Config = require '../config'
load = (win) ->
$ = win.jQuery
C3 = win.c3
React = win.React
{PropTypes} = React
R = React.DOM
{TimestampFormat} = require('../persist/utils')
D3TimestampFormat = '%Y%m%dT%H%M%S%L%Z'
hiddenId = "-h-" # Fake/hidden datapoint's ID
minChartHeight = 400
palette = ['#66c088', '#43c5f1', '#5f707e', '#f06362', '#e5be31', '#9560ab', '#e883c0', '#ef8f39', '#42a795', '#999999', '#ccc5a8', '5569d8']
Chart = React.createFactory React.createClass
displayName: 'Chart'
mixins: [React.addons.PureRenderMixin]
# TODO: propTypes
getInitialState: -> {
eventRows: 0
hoveredMetric: null
}
# TODO: propTypes
render: ->
return R.div({
className: 'chartInner'
ref: 'chartInner'
},
ChartEventsStyling({
ref: (comp) => @chartEventsStyling = comp
selectedMetricIds: @props.selectedMetricIds
progEvents: @props.progEvents
eventRows: @state.eventRows
})
R.div({
id: 'eventInfo'
ref: 'eventInfo'
},
R.div({className: 'title'})
R.div({className: 'info'}
R.div({className: 'description'})
R.div({className: 'timeSpan'},
R.div({className: 'start'})
R.div({className: 'end'})
)
)
)
R.div({
className: "chart"
ref: 'chartDiv'
})
)
# TODO: Use componentWillReceiveProps here?
componentDidUpdate: (oldProps, oldState) ->
# Perform resize first so chart renders new data properly
@_refreshChartHeight()
# Update timeSpan?
sameTimeSpan = Imm.is @props.timeSpan, oldProps.timeSpan
unless sameTimeSpan
newMin = @props.timeSpan.get('start')
newMax = @props.timeSpan.get('end')
# C3 requires there's some kind of span (even if it's 1ms)
# todo check this
if newMin is newMax
newMax = newMax.clone().endOf 'day'
# avoid repeating x axis labels bug
if (newMax.diff(newMin, 'days') > 3)
@_chart.internal.config.axis_x_tick_format = '%b %d'
else
@_chart.internal.config.axis_x_tick_format = null
@_chart.axis.min {x: newMin}
@_chart.axis.max {x: newMax}
# Update selected metrics?
sameSelectedMetrics = Imm.is @props.selectedMetricIds, oldProps.selectedMetricIds
unless sameSelectedMetrics
@_refreshSelectedMetrics()
# Destroy and re-mount chart when values changed
# TODO: Make this more efficient
sameMetricValues = Imm.is @props.metricValues, oldProps.metricValues
if not sameMetricValues and @_chart?
console.info "Re-drawing chart..."
@_chart.destroy()
@componentDidMount()
# Update selected progEvents?
sameProgEvents = Imm.is @props.progEvents, oldProps.progEvents
unless sameProgEvents
@_refreshProgEvents()
# Update chart min/max range from changed xTicks?
sameXTicks = Imm.is @props.xTicks, oldProps.xTicks
unless sameXTicks
@_chart.axis.range {
min: {x: @props.xTicks.first()}
max: {x: @props.xTicks.last()}
}
# Update chart type?
sameChartType = Imm.is @props.chartType, oldProps.chartType
unless sameChartType
@_generateChart()
@_refreshSelectedMetrics()
@_refreshProgEvents()
componentDidMount: ->
@_generateChart()
@_refreshSelectedMetrics()
@_refreshProgEvents()
@_refreshChartHeight(true)
_generateChart: ->
console.log "Generating Chart...."
# Create a Map from metric ID to data series,
# where each data series is a sequence of [x, y] pairs
# Inject hidden datapoint, with value well outside y-span
metricValues = @props.metricValues.push Imm.Map {
id: hiddenId
timestamp: Moment().format(TimestampFormat)
value: -99999
}
dataSeries = metricValues
.groupBy (metricValue) -> # group by metric
return metricValue.get('id')
.map (metricValues) -> # for each data series
return metricValues.map (metricValue) -> # for each data point
# [x, y]
return [metricValue.get('timestamp'), metricValue.get('value')]
seriesNamesById = dataSeries.keySeq().map (metricId) =>
# Ignore hidden datapoint
metricName = if metricId is hiddenId
metricId
else
@props.metricsById.get(metricId).get('name')
return [metricId, metricName]
.fromEntrySeq().toMap()
# Create set to show which x maps to which y
xsMap = dataSeries.keySeq()
.map (seriesId) ->
return [seriesId, '?x-' + seriesId]
.fromEntrySeq().toMap()
dataSeriesNames = dataSeries.keySeq()
.map (seriesId) =>
return [seriesId, seriesNamesById.get(seriesId)]
.fromEntrySeq().toMap()
dataSeries = dataSeries.entrySeq().flatMap ([seriesId, dataPoints]) ->
# Ensure ordered by earliest-latest
orderedDataPoints = dataPoints
.sortBy ([x, y]) -> x
xValues = Imm.List(['?x-' + seriesId]).concat(
orderedDataPoints.map ([x, y]) -> x
)
yValues = Imm.List([seriesId]).concat(
orderedDataPoints.map ([x, y]) -> y
)
return Imm.List([xValues, yValues])
scaledDataSeries = dataSeries.map (series) ->
# Scaling only applies to y series
return series if series.first()[0] is '?'
# Ignore hidden datapoint
return series if series.first() is hiddenId
# Filter out id's to figure out min & max
values = series.flatten()
.filterNot (y) -> isNaN(y)
.map (val) -> return Number(val)
# Figure out min and max series values
# Min is enforced as 0 for better visual proportions
# unless lowest value is negative
lowestValue = values.min()
hasNegativeValue = lowestValue < 0
min = if hasNegativeValue then lowestValue else 0
max = values.max()
# Center the line vertically if constant value
if min is max
min -= 1
max += 1
scaleFactor = max - min
# Map scaleFactor on to numerical values
return series.map (dataPoint) ->
unless isNaN(dataPoint)
(dataPoint - min) / scaleFactor
else
dataPoint
# Min/Max x dates
#minDate = @props.xTicks.first()
#maxDate = @props.xTicks.last()
minDate = @props.timeSpan.get('start')
maxDate = @props.timeSpan.get('end')
# YEAR LINES
# Build Imm.List of years and timestamps to matching
newYearLines = Imm.List()
firstYear = minDate.year()
lastYear = maxDate.year()
# Don't bother if only 1 year (doesn't go past calendar year)
unless firstYear is lastYear
newYearLines = Imm.List([firstYear..lastYear]).map (year) =>
return {
value: Moment().year(year).startOf('year')
text: year
position: 'middle'
class: 'yearLine'
}
# Generate and bind the chart
@_chart = C3.generate {
bindto: @refs.chartDiv
grid: {
x: {
lines: newYearLines.toJS()
}
}
axis: {
x: {
min: minDate
max: maxDate
type: 'timeseries'
tick: {
fit: false
format: '%b %d'
}
}
y: {
show: false
max: 1
min: 0
}
}
transition: {
duration: 0
}
data: {
type: @props.chartType
hide: true
xFormat: D3TimestampFormat
columns: scaledDataSeries.toJS()
xs: xsMap.toJS()
names: dataSeriesNames.toJS()
classes: {
hiddenId: 'hiddenId'
}
# Get/destroy hovered metric point data in local memory
onmouseover: (d) => @hoveredMetric = d
onmouseout: (d) => @hoveredMetric = null if @hoveredMetric? and @hoveredMetric.id is d.id
}
spline: {
interpolation: {
type: 'monotone'
}
}
point: {
r: 5
}
tooltip: {
format: {
value: (value, ratio, id, index) ->
actualValue = dataSeries
.find (series) -> series.contains id
.get(index + 1)
return actualValue
title: (timestamp) ->
return Moment(timestamp).format(Config.longTimestampFormat)
}
# Customization from original c3 tooltip DOM code: http://stackoverflow.com/a/25750639
contents: (metrics, defaultTitleFormat, defaultValueFormat, color) =>
# Lets us distinguish @_chart's local `this` (->) methods from Chart's `this` (=>)
# http://stackoverflow.com/a/15422322
$$ = ` this `
config = $$.config
titleFormat = config.tooltip_format_title or defaultTitleFormat
nameFormat = config.tooltip_format_name or (name) -> name
valueFormat = config.tooltip_format_value or defaultValueFormat
text = undefined
title = undefined
value = undefined
name = undefined
bgcolor = undefined
tableContents = metrics
.sort (a, b) -> b.value - a.value # Sort by scaled value (desc)
.forEach (currentMetric) =>
# Is this metric is currently being hovered over?
isHoveredMetric = @hoveredMetric? and (
@hoveredMetric.id is currentMetric.id or # Is currently hovered (top layer)
Math.abs(@hoveredMetric.value - currentMetric.value) < 0.025 # Is hiding behind hovered metric
)
# Ignore empty values? TODO: Check this
if !(currentMetric and (currentMetric.value or currentMetric.value == 0))
return
if !text
title = if titleFormat then titleFormat(currentMetric.x) else currentMetric.x
text = '<table class=\'' + $$.CLASS.tooltip + '\'>' + (if title or title == 0 then '<tr><th colspan=\'2\'>' + title + '</th></tr>' else '')
name = nameFormat(currentMetric.name)
value = valueFormat(currentMetric.value, currentMetric.ratio, currentMetric.id, currentMetric.index)
hoverClass = if isHoveredMetric then 'isHovered' else ''
bgcolor = if $$.levelColor then $$.levelColor(currentMetric.value) else color(currentMetric.id)
text += '<tr class=\'' + $$.CLASS.tooltipName + '-' + currentMetric.id + ' ' + hoverClass + '\'>'
text += '<td class=\'name\'><span style=\'background-color:' + bgcolor + '\'></span>' + name + '</td>'
text += '<td class=\'value\'>' + value + '</td>'
text += '</tr>'
# TODO: Show definitions for other metrics w/ overlapping regular or scaled values
if isHoveredMetric
metricDefinition = @props.metricsById.getIn [currentMetric.id, 'definition']
# Truncate definition to 100ch + ...
if metricDefinition.length > 100
metricDefinition = metricDefinition.substring(0, 100) + "..."
text += '<tr class=\'' + $$.CLASS.tooltipName + '-' + currentMetric.id + ' + \'>'
text += '<td class=\'definition\' colspan=\'2\'>' + metricDefinition + '</td>'
text += '</tr>'
return text
text += '</table>'
return text
}
item: {
onclick: (id) -> return false
}
padding: {
left: 25
right: 25
}
size: {
height: @_calculateChartHeight()
}
legend: {
show: false
}
onresize: @_refreshChartHeight # Manually calculate chart height
}
_calculateChartHeight: ->
fullHeight = $(@refs.chartInner).height() - 20
# Half-height for only metrics/events
if @props.selectedMetricIds.isEmpty() or @props.progEvents.isEmpty()
# Can return minimum height instead
halfHeight = fullHeight / 2
if halfHeight > minChartHeight
return Math.floor halfHeight
else
return minChartHeight
return fullHeight
_refreshChartHeight: (isForced = false) ->
return unless @_chart?
height = @_calculateChartHeight()
# Skip c3 update if is current height
if not isForced and height is $(@refs.chartDiv).height()
return
# Update event regions' v-positioning if necessary
if not @props.progEvents.isEmpty() and @chartEventsStyling?
@chartEventsStyling.updateChartHeight(height)
# Proceed with resizing the chart itself
@_chart.resize {height}
_refreshSelectedMetrics: ->
@_chart.hide(null, {withLegend: true})
unless @props.selectedMetricIds.size > 0
# for events to work
@_chart.show(hiddenId)
return
@props.selectedMetricIds.forEach (metricId) =>
# choose metric color from palette
# todo: move to analysis tab to save a render?
if palette.includes(@_chart.data.colors()[metricId])
# do not change the color if already set for this metric
return
else
# assign a color from the palette that is not already in use
for index, color of palette
if Object.values(@_chart.data.colors()).includes color
else
@_chart.data.colors({"#{metricId}": palette[index]})
return
@_chart.show(@props.selectedMetricIds.toJS(), {withLegend: true})
# fire metric colors back up to analysis tab
@props.updateMetricColors Imm.Map(@_chart.data.colors())
_refreshProgEvents: ->
console.log "Refreshing progEvents..."
# Generate c3 regions array
progEventRegions = @_generateProgEventRegions()
# Flush and re-apply regions to c3 chart
@_chart.regions.remove()
# C3 Regions have some kind of animation attached, which
# messes up remove/add
setTimeout(=>
@_chart.regions progEventRegions.toJS()
@_attachKeyBindings()
, 500)
_generateProgEventRegions: ->
# Build Imm.List of region objects
progEventRegions = @props.progEvents
.map (progEvent) =>
eventRegion = {
start: @_toUnixMs progEvent.get('startTimestamp')
class: "progEventRange #{progEvent.get('id')} typeId-"
}
eventRegion['class'] += if progEvent.get('typeId')
progEvent.get('typeId')
else
"null" # typeId-null is how we know it doesn't have an eventType
if Moment(progEvent.get('endTimestamp'), TimestampFormat).isValid()
eventRegion.end = @_toUnixMs progEvent.get('endTimestamp')
# TODO: Classify singular event
return eventRegion
# Sort regions in order of start timestamp
sortedEvents = progEventRegions.sortBy (event) => event['start']
# Setting up vars for row sorting
remainingEvents = sortedEvents
eventRows = Imm.List()
progEvents = Imm.List()
rowIndex = 0
# Process progEvents for regions while remaining events
while remainingEvents.size > 0
# Init new eventRow
eventRows = eventRows.push Imm.List()
# Loop through events, pluck any for the given row with non-conflicting dates
remainingEvents.forEach (thisEvent) =>
thisRow = eventRows.get(rowIndex)
# Can't rely on forEach index, because .delete() offsets it
liveIndex = remainingEvents.indexOf(thisEvent)
# Let's pluck this progEvent if no rows or timestamps don't conflict
if thisRow.size is 0 or (
not thisRow.last().get('end')? or
thisEvent.start >= thisRow.last().get('end')
)
# Append class with row number
progEvent = Imm.fromJS(thisEvent)
newClass = "#{progEvent.get('class')} row#{rowIndex}"
# Convert single-point event date to a short span
if not progEvent.get('end')
startDate = Moment progEvent.get('start')
progEvent = progEvent.set 'end', startDate.clone().add(6, 'hours')
newClass = newClass + " singlePoint"
# Update class (needs to be 'class' for C3js)
progEvent = progEvent.set('class', newClass)
# Update eventRows, remove from remainingEvents
updatedRow = eventRows.get(rowIndex).push progEvent
eventRows = eventRows.set rowIndex, updatedRow
remainingEvents = remainingEvents.delete(liveIndex)
# Concat to final (flat) output for c3
progEvents = progEvents.concat eventRows.get(rowIndex)
rowIndex++
# Determine regions height
chartHeightY = if eventRows.isEmpty() then 1 else 2
# Metrics can be bigger when only 1 progEvent row
if eventRows.size is 1
chartHeightY = 1.5
@setState {eventRows: eventRows.size}
@_chart.axis.max {
y: chartHeightY
}
return progEvents
_attachKeyBindings: ->
# TODO: Make sure listeners are removed when componentWillUnmount
# Find our hidden eventInfo box
eventInfo = $('#eventInfo')
dateFormat = 'Do MMM [at] h:mm A'
@props.progEvents.forEach (progEvent) =>
# Attach hover binding to progEvent region
$('.' + progEvent.get('id')).hover((event) =>
description = progEvent.get('description') or "(no description)"
if description.length > 1000
description = description.substring(0, 2000) + " . . ."
title = progEvent.get('title')
# Tack on eventType to title
# TODO: Do this earlier on, to save redundancy
if progEvent.get('typeId')
eventType = @props.eventTypes.find (eventType) -> eventType.get('id') is progEvent.get('typeId')
eventTypeName = eventType.get('name')
title = if title then "#{title} (#{eventTypeName})" else eventTypeName
eventInfo.addClass('show')
eventInfo.find('.title').text title
eventInfo.find('.description').text(description)
startTimestamp = new Moment(progEvent.get('startTimestamp'), TimestampFormat)
endTimestamp = new Moment(progEvent.get('endTimestamp'), TimestampFormat)
startText = startTimestamp.format(dateFormat)
endText = if endTimestamp.isValid() then endTimestamp.format(dateFormat) else null
if endText?
startText = "From: " + startText
endText = "Until: " + endText
eventInfo.find('.start').text startText
eventInfo.find('.end').text endText
# Make eventInfo follow the mouse
$(win.document).on('mousemove', (event) ->
eventInfo.css 'top', event.clientY + 25
eventInfo.css 'left', event.clientX
)
, =>
# Hide and unbind!
eventInfo.removeClass('show')
$(win.document).off('mousemove')
)
rect = $('.' + progEvent.get('id')).find('rect')[0]
# Fill progEvent region with eventType color if exists
if progEvent.get('typeId') and not @props.eventTypes.isEmpty()
eventType = @props.eventTypes
.find (type) -> type.get('id') is progEvent.get('typeId')
$(rect).attr({
style:
"fill: #{eventType.get('colorKeyHex')} !important;
stroke: #{eventType.get('colorKeyHex')} !important;"
})
else
# At least clear it for non-typed events
$(rect).attr({style: ''})
_toUnixMs: (timestamp) ->
# Converts to unix ms
return Moment(timestamp, TimestampFormat).valueOf()
ChartEventsStyling = React.createFactory React.createClass
displayName: 'ChartEventsStyling'
propTypes: {
eventRows: PropTypes.number.isRequired
progEvents: PropTypes.instanceOf(Imm.List).isRequired
selectedMetricIds: PropTypes.instanceOf(Imm.Set).isRequired
}
getInitialState: -> {
chartHeight: null
}
updateChartHeight: (chartHeight) ->
# This gets called alongside @_chart.resize
@setState {chartHeight}
render: ->
# Calculate scaled height of each region (larger if no metrics)
scaleFactor = if @props.selectedMetricIds.isEmpty() then 0.65 else 0.3
scaleY = (scaleFactor / @props.eventRows).toFixed(2)
R.style({},
(Imm.List([0..@props.eventRows]).map (rowNumber) =>
translateY = rowNumber * @state.chartHeight
return ".chart .c3-regions .c3-region.row#{rowNumber} > rect {
transform: scaleY(#{scaleY}) translateY(#{translateY}px) !important
}"
)
)
return Chart
module.exports = {load}
|
[
{
"context": "xpiry: ''\n focused: null\n namePlaceholder: 'FULL NAME'\n expiryBefore: 'month/year'\n expiryAfter: ",
"end": 307,
"score": 0.9987630844116211,
"start": 298,
"tag": "NAME",
"value": "FULL NAME"
}
] | source/card.cjsx | zouiteryoussef/react-credit-card-master | 240 |
React = require('react')
Payment = require('payment')
images = require('./card-images.js')
validate = Payment.fns
module.exports = React.createClass
displayName: 'Card'
getDefaultProps:->
number: null
cvc: null
name: ''
expiry: ''
focused: null
namePlaceholder: 'FULL NAME'
expiryBefore: 'month/year'
expiryAfter: 'valid thru'
shinyAfterBack: ''
type:null
render:->
isAmex = @state.type and @state.type.name is "amex"
<div className = "#{exp.prefix}__container">
<div className = { "#{exp.prefix} " + @typeClassName() + if @props.focused is "cvc" and not isAmex then " #{exp.prefix}--flipped" else ""} >
<div className = "#{exp.prefix}__front" >
<div className = "#{exp.prefix}__lower">
<div className = "#{exp.prefix}__shiny"/>
<img
className = {"#{exp.prefix}__logo " + @typeClassName()}
src = {images[if @props.type then @props.type else @state.type.name]}
/>
{if isAmex then <div className = {@displayClassName("cvc_front")}>{@getValue("cvc")}</div>}
<div className = {@displayClassName("number")}>{@getValue("number")}</div>
<div className = {@displayClassName("name")} >{@getValue("name")}</div>
<div
className = {@displayClassName("expiry")}
data-before = {@props.expiryBefore}
data-after = {@props.expiryAfter}
>{@getValue("expiry")}</div>
</div>
</div>
<div className = "#{exp.prefix}__back">
<div className = "#{exp.prefix}__bar"/>
<div className = {@displayClassName("cvc")}>{@getValue("cvc")}</div>
<div className = "#{exp.prefix}__shiny" data-after = {@props.shinyAfterBack}/>
</div>
</div>
</div>
displayClassName:(base)->
className = "#{exp.prefix}__" + base + " #{exp.prefix}__display"
if @props.focused is base
className += " #{exp.prefix}--focused"
return className
typeClassName:-> "#{exp.prefix}--" + if @props.type then @props.type else @state.type.name
getValue:(name)-> @[name]()
componentWillMount: -> @updateType(@props)
componentWillReceiveProps:(nextProps)-> @updateType(nextProps)
getInitialState: -> type: {name:"unknown", length: 16}
updateType:(props)->
if !props.number
return @setState type: name:"unknown", length: 16
if type = @props.type or validate.cardType(props.number)
if type is "amex"
return @setState type: name:type, length: 15
else
return @setState type: name:type, length: 16
return @setState type: name:"unknown", length: 16
number:->
if !@props.number
string = ""
else
string = @props.number.toString()
maxLength = @state.type.length
if string.length > maxLength then string = string.slice(0,maxLength)
while string.length < maxLength
string += "•"
if @state.type.name is "amex"
space_index1 = 4;
space_index2 = 10;
string = string.substring(0, space_index1) + " " + string.substring(space_index1, space_index2) + " " + string.substring(space_index2)
else
amountOfSpaces = Math.ceil(maxLength/4)
for i in [1...amountOfSpaces]
space_index = (i*4 + (i - 1))
string = string.slice(0, space_index) + " " + string.slice(space_index)
return string
name:->
if @props.name is ""
return @props.namePlaceholder
else
return @props.name
expiry:->
if @props.expiry is ""
return "••/••"
else
expiry = @props.expiry.toString()
expiryMaxLength = 6 # 2 for month and 4 for year
if expiry.match /\//
expiry = expiry.replace("/", "")
if !expiry.match /^[0-9]*$/
return "••/••"
while expiry.length < 4
expiry += "•"
expiry = expiry.slice(0, 2) + "/" + expiry.slice(2, expiryMaxLength)
return expiry
cvc:->
if @props.cvc is null
return "•••"
else
return if @props.cvc.toString().length <= 4 then @props.cvc else @props.cvc.toString().slice(0, 4)
exp = module.exports
exp.prefix = "react-credit-card"
| 118799 |
React = require('react')
Payment = require('payment')
images = require('./card-images.js')
validate = Payment.fns
module.exports = React.createClass
displayName: 'Card'
getDefaultProps:->
number: null
cvc: null
name: ''
expiry: ''
focused: null
namePlaceholder: '<NAME>'
expiryBefore: 'month/year'
expiryAfter: 'valid thru'
shinyAfterBack: ''
type:null
render:->
isAmex = @state.type and @state.type.name is "amex"
<div className = "#{exp.prefix}__container">
<div className = { "#{exp.prefix} " + @typeClassName() + if @props.focused is "cvc" and not isAmex then " #{exp.prefix}--flipped" else ""} >
<div className = "#{exp.prefix}__front" >
<div className = "#{exp.prefix}__lower">
<div className = "#{exp.prefix}__shiny"/>
<img
className = {"#{exp.prefix}__logo " + @typeClassName()}
src = {images[if @props.type then @props.type else @state.type.name]}
/>
{if isAmex then <div className = {@displayClassName("cvc_front")}>{@getValue("cvc")}</div>}
<div className = {@displayClassName("number")}>{@getValue("number")}</div>
<div className = {@displayClassName("name")} >{@getValue("name")}</div>
<div
className = {@displayClassName("expiry")}
data-before = {@props.expiryBefore}
data-after = {@props.expiryAfter}
>{@getValue("expiry")}</div>
</div>
</div>
<div className = "#{exp.prefix}__back">
<div className = "#{exp.prefix}__bar"/>
<div className = {@displayClassName("cvc")}>{@getValue("cvc")}</div>
<div className = "#{exp.prefix}__shiny" data-after = {@props.shinyAfterBack}/>
</div>
</div>
</div>
displayClassName:(base)->
className = "#{exp.prefix}__" + base + " #{exp.prefix}__display"
if @props.focused is base
className += " #{exp.prefix}--focused"
return className
typeClassName:-> "#{exp.prefix}--" + if @props.type then @props.type else @state.type.name
getValue:(name)-> @[name]()
componentWillMount: -> @updateType(@props)
componentWillReceiveProps:(nextProps)-> @updateType(nextProps)
getInitialState: -> type: {name:"unknown", length: 16}
updateType:(props)->
if !props.number
return @setState type: name:"unknown", length: 16
if type = @props.type or validate.cardType(props.number)
if type is "amex"
return @setState type: name:type, length: 15
else
return @setState type: name:type, length: 16
return @setState type: name:"unknown", length: 16
number:->
if !@props.number
string = ""
else
string = @props.number.toString()
maxLength = @state.type.length
if string.length > maxLength then string = string.slice(0,maxLength)
while string.length < maxLength
string += "•"
if @state.type.name is "amex"
space_index1 = 4;
space_index2 = 10;
string = string.substring(0, space_index1) + " " + string.substring(space_index1, space_index2) + " " + string.substring(space_index2)
else
amountOfSpaces = Math.ceil(maxLength/4)
for i in [1...amountOfSpaces]
space_index = (i*4 + (i - 1))
string = string.slice(0, space_index) + " " + string.slice(space_index)
return string
name:->
if @props.name is ""
return @props.namePlaceholder
else
return @props.name
expiry:->
if @props.expiry is ""
return "••/••"
else
expiry = @props.expiry.toString()
expiryMaxLength = 6 # 2 for month and 4 for year
if expiry.match /\//
expiry = expiry.replace("/", "")
if !expiry.match /^[0-9]*$/
return "••/••"
while expiry.length < 4
expiry += "•"
expiry = expiry.slice(0, 2) + "/" + expiry.slice(2, expiryMaxLength)
return expiry
cvc:->
if @props.cvc is null
return "•••"
else
return if @props.cvc.toString().length <= 4 then @props.cvc else @props.cvc.toString().slice(0, 4)
exp = module.exports
exp.prefix = "react-credit-card"
| true |
React = require('react')
Payment = require('payment')
images = require('./card-images.js')
validate = Payment.fns
module.exports = React.createClass
displayName: 'Card'
getDefaultProps:->
number: null
cvc: null
name: ''
expiry: ''
focused: null
namePlaceholder: 'PI:NAME:<NAME>END_PI'
expiryBefore: 'month/year'
expiryAfter: 'valid thru'
shinyAfterBack: ''
type:null
render:->
isAmex = @state.type and @state.type.name is "amex"
<div className = "#{exp.prefix}__container">
<div className = { "#{exp.prefix} " + @typeClassName() + if @props.focused is "cvc" and not isAmex then " #{exp.prefix}--flipped" else ""} >
<div className = "#{exp.prefix}__front" >
<div className = "#{exp.prefix}__lower">
<div className = "#{exp.prefix}__shiny"/>
<img
className = {"#{exp.prefix}__logo " + @typeClassName()}
src = {images[if @props.type then @props.type else @state.type.name]}
/>
{if isAmex then <div className = {@displayClassName("cvc_front")}>{@getValue("cvc")}</div>}
<div className = {@displayClassName("number")}>{@getValue("number")}</div>
<div className = {@displayClassName("name")} >{@getValue("name")}</div>
<div
className = {@displayClassName("expiry")}
data-before = {@props.expiryBefore}
data-after = {@props.expiryAfter}
>{@getValue("expiry")}</div>
</div>
</div>
<div className = "#{exp.prefix}__back">
<div className = "#{exp.prefix}__bar"/>
<div className = {@displayClassName("cvc")}>{@getValue("cvc")}</div>
<div className = "#{exp.prefix}__shiny" data-after = {@props.shinyAfterBack}/>
</div>
</div>
</div>
displayClassName:(base)->
className = "#{exp.prefix}__" + base + " #{exp.prefix}__display"
if @props.focused is base
className += " #{exp.prefix}--focused"
return className
typeClassName:-> "#{exp.prefix}--" + if @props.type then @props.type else @state.type.name
getValue:(name)-> @[name]()
componentWillMount: -> @updateType(@props)
componentWillReceiveProps:(nextProps)-> @updateType(nextProps)
getInitialState: -> type: {name:"unknown", length: 16}
updateType:(props)->
if !props.number
return @setState type: name:"unknown", length: 16
if type = @props.type or validate.cardType(props.number)
if type is "amex"
return @setState type: name:type, length: 15
else
return @setState type: name:type, length: 16
return @setState type: name:"unknown", length: 16
number:->
if !@props.number
string = ""
else
string = @props.number.toString()
maxLength = @state.type.length
if string.length > maxLength then string = string.slice(0,maxLength)
while string.length < maxLength
string += "•"
if @state.type.name is "amex"
space_index1 = 4;
space_index2 = 10;
string = string.substring(0, space_index1) + " " + string.substring(space_index1, space_index2) + " " + string.substring(space_index2)
else
amountOfSpaces = Math.ceil(maxLength/4)
for i in [1...amountOfSpaces]
space_index = (i*4 + (i - 1))
string = string.slice(0, space_index) + " " + string.slice(space_index)
return string
name:->
if @props.name is ""
return @props.namePlaceholder
else
return @props.name
expiry:->
if @props.expiry is ""
return "••/••"
else
expiry = @props.expiry.toString()
expiryMaxLength = 6 # 2 for month and 4 for year
if expiry.match /\//
expiry = expiry.replace("/", "")
if !expiry.match /^[0-9]*$/
return "••/••"
while expiry.length < 4
expiry += "•"
expiry = expiry.slice(0, 2) + "/" + expiry.slice(2, expiryMaxLength)
return expiry
cvc:->
if @props.cvc is null
return "•••"
else
return if @props.cvc.toString().length <= 4 then @props.cvc else @props.cvc.toString().slice(0, 4)
exp = module.exports
exp.prefix = "react-credit-card"
|
[
{
"context": "t-er-animating\n# requestAnimationFrame polyfill by Erik Möller. fixes from Paul Irish and Tino Zijdel.\n#++++++++",
"end": 288,
"score": 0.9998925924301147,
"start": 277,
"tag": "NAME",
"value": "Erik Möller"
},
{
"context": "polyfill by Erik Möller. fixes from Paul Irish and Tino Zijdel.\n#+++++++++++++++++++++++++++++++++++\n# @author ",
"end": 327,
"score": 0.9998359680175781,
"start": 316,
"tag": "NAME",
"value": "Tino Zijdel"
},
{
"context": "#+++++++++++++++++++++++++++++++++++\n# @author : Prabhat Kumar\n# @license : Apache License [v2.0]\n# Copyright ©",
"end": 393,
"score": 0.9999015927314758,
"start": 380,
"tag": "NAME",
"value": "Prabhat Kumar"
},
{
"context": "cense : Apache License [v2.0]\n# Copyright © 2015, Prabhat Kumar. All rights reserved.\n###\ndo ->\n lastTime = 0\n ",
"end": 463,
"score": 0.9998976588249207,
"start": 450,
"tag": "NAME",
"value": "Prabhat Kumar"
}
] | src/assets/coffee/module/helper/rAF.coffee | BioD3/BioD3 | 4 | ###!
# requestAnimationFrame CoffeeScript
# ==================================
# http://paulirish.com/2011/requestanimationframe-for-smart-animating/
# http://my.opera.com/emoller/blog/2011/12/20/requestanimationframe-for-smart-er-animating
# requestAnimationFrame polyfill by Erik Möller. fixes from Paul Irish and Tino Zijdel.
#+++++++++++++++++++++++++++++++++++
# @author : Prabhat Kumar
# @license : Apache License [v2.0]
# Copyright © 2015, Prabhat Kumar. All rights reserved.
###
do ->
lastTime = 0
vendors = [
'ms'
'moz'
'webkit'
'o'
]
x = 0
while x < vendors.length and !window.requestAnimationFrame
window.requestAnimationFrame = window[vendors[x] + 'RequestAnimationFrame']
window.cancelAnimationFrame = window[vendors[x] + 'CancelAnimationFrame'] or window[vendors[x] + 'CancelRequestAnimationFrame']
++x
if !window.requestAnimationFrame
window.requestAnimationFrame = (callback, element) ->
currTime = (new Date).getTime()
timeToCall = Math.max(0, 16 - (currTime - lastTime))
id = window.setTimeout((->
callback currTime + timeToCall
return
), timeToCall)
lastTime = currTime + timeToCall
id
if !window.cancelAnimationFrame
window.cancelAnimationFrame = (id) ->
clearTimeout id
return
return
# ---
# generated by js2coffee [JavaScript to CoffeeScript compiler]
| 94026 | ###!
# requestAnimationFrame CoffeeScript
# ==================================
# http://paulirish.com/2011/requestanimationframe-for-smart-animating/
# http://my.opera.com/emoller/blog/2011/12/20/requestanimationframe-for-smart-er-animating
# requestAnimationFrame polyfill by <NAME>. fixes from Paul Irish and <NAME>.
#+++++++++++++++++++++++++++++++++++
# @author : <NAME>
# @license : Apache License [v2.0]
# Copyright © 2015, <NAME>. All rights reserved.
###
do ->
lastTime = 0
vendors = [
'ms'
'moz'
'webkit'
'o'
]
x = 0
while x < vendors.length and !window.requestAnimationFrame
window.requestAnimationFrame = window[vendors[x] + 'RequestAnimationFrame']
window.cancelAnimationFrame = window[vendors[x] + 'CancelAnimationFrame'] or window[vendors[x] + 'CancelRequestAnimationFrame']
++x
if !window.requestAnimationFrame
window.requestAnimationFrame = (callback, element) ->
currTime = (new Date).getTime()
timeToCall = Math.max(0, 16 - (currTime - lastTime))
id = window.setTimeout((->
callback currTime + timeToCall
return
), timeToCall)
lastTime = currTime + timeToCall
id
if !window.cancelAnimationFrame
window.cancelAnimationFrame = (id) ->
clearTimeout id
return
return
# ---
# generated by js2coffee [JavaScript to CoffeeScript compiler]
| true | ###!
# requestAnimationFrame CoffeeScript
# ==================================
# http://paulirish.com/2011/requestanimationframe-for-smart-animating/
# http://my.opera.com/emoller/blog/2011/12/20/requestanimationframe-for-smart-er-animating
# requestAnimationFrame polyfill by PI:NAME:<NAME>END_PI. fixes from Paul Irish and PI:NAME:<NAME>END_PI.
#+++++++++++++++++++++++++++++++++++
# @author : PI:NAME:<NAME>END_PI
# @license : Apache License [v2.0]
# Copyright © 2015, PI:NAME:<NAME>END_PI. All rights reserved.
###
do ->
lastTime = 0
vendors = [
'ms'
'moz'
'webkit'
'o'
]
x = 0
while x < vendors.length and !window.requestAnimationFrame
window.requestAnimationFrame = window[vendors[x] + 'RequestAnimationFrame']
window.cancelAnimationFrame = window[vendors[x] + 'CancelAnimationFrame'] or window[vendors[x] + 'CancelRequestAnimationFrame']
++x
if !window.requestAnimationFrame
window.requestAnimationFrame = (callback, element) ->
currTime = (new Date).getTime()
timeToCall = Math.max(0, 16 - (currTime - lastTime))
id = window.setTimeout((->
callback currTime + timeToCall
return
), timeToCall)
lastTime = currTime + timeToCall
id
if !window.cancelAnimationFrame
window.cancelAnimationFrame = (id) ->
clearTimeout id
return
return
# ---
# generated by js2coffee [JavaScript to CoffeeScript compiler]
|
[
{
"context": "s\n [v, w] = edge\n if v < w\n key = \"#{v},#{w}\"\n else\n key = \"#{w},#{v}\"\n unless",
"end": 1517,
"score": 0.9982298016548157,
"start": 1506,
"tag": "KEY",
"value": "\"#{v},#{w}\""
},
{
"context": " key = \"#{v},#{w}\"\n else\n key = \"#{w},#{v}\"\n unless key of seen\n seen[key] = id\n ",
"end": 1554,
"score": 0.9982848763465881,
"start": 1543,
"tag": "KEY",
"value": "\"#{w},#{v}\""
},
{
"context": "+1]\n for yt in [yr, yr-1, yr+1]\n key = \"#{xt},#{yt}\"\n for v in @hash[key] ? []\n if @ep",
"end": 2625,
"score": 0.9992113709449768,
"start": 2612,
"tag": "KEY",
"value": "\"#{xt},#{yt}\""
},
{
"context": "silon)\n yr = Math.round(y * @epsilon)\n key = \"#{xr},#{yr}\"\n\n insert: (coord) ->\n v = @lookup coord\n r",
"end": 2879,
"score": 0.9989237785339355,
"start": 2866,
"tag": "KEY",
"value": "\"#{xr},#{yr}\""
}
] | src/filter.coffee | Alex-Padron/Paper | 1 | geom = require './geom'
filter = exports
filter.edgesAssigned = (fold, target) ->
i for assignment, i in fold.edges_assignment when assignment == target
filter.mountainEdges = (fold) ->
assignment.edgesAssigned fold, 'M'
filter.valleyEdges = (fold) ->
assignment.edgesAssigned fold, 'V'
filter.flatEdges = (fold) ->
assignment.edgesAssigned fold, 'F'
filter.boundaryEdges = (fold) ->
assignment.edgesAssigned fold, 'B'
filter.unassignedEdges = (fold) ->
assignment.edgesAssigned fold, 'F'
filter.keysStartingWith = (fold, prefix) ->
key for key of fold when key[...prefix.length] == prefix
filter.keysEndingWith = (fold, suffix) ->
key for key of fold when key[-suffix.length..] == suffix
filter.remapField = (fold, field, old2new) ->
###
old2new: null means throw away that object
###
new2old = []
for j, i in old2new ## later overwrites earlier
new2old[j] = i if j?
for key in filter.keysStartingWith fold, field + '_'
fold[key] = (fold[key][old] for old in new2old)
for key in filter.keysEndingWith fold, '_' + field
fold[key] = (old2new[old] for old in array for array in fold[key])
fold
filter.remapFieldSubset = (fold, field, keep) ->
id = 0
old2new =
for value in keep
if value
id++
else
null ## remove
filter.remapField fold, field, old2new
filter.removeDuplicateEdges_vertices = (fold) ->
seen = {}
id = 0
old2new =
for edge in fold.edges_vertices
[v, w] = edge
if v < w
key = "#{v},#{w}"
else
key = "#{w},#{v}"
unless key of seen
seen[key] = id
id += 1
seen[key]
filter.remapField fold, 'edges', old2new
filter.edges_verticesIncident = (e1, e2) ->
for v in e1
if v in e2
return true
false
## Use hashing to find points within an epsilon > 0 distance from each other.
## Each integer cell will have O(1) distinct points before matching
## (number of disjoint half-unit disks that fit in a unit square).
class RepeatedPointsDS
constructor: (@vertices_coords, @epsilon) ->
## Note: if vertices_coords has some duplicates in the initial state,
## then we will detect them but won't remove them here. Rather,
## future duplicate inserts will return the higher-index vertex.
@hash = {}
for coord, v in @vertices_coords
key = @key coord
@hash[key] = [] unless key of @hash
@hash[key].push v
null
lookup: (coord) ->
[x, y] = coord
xr = Math.round(x / @epsilon)
yr = Math.round(y / @epsilon)
for xt in [xr, xr-1, xr+1]
for yt in [yr, yr-1, yr+1]
key = "#{xt},#{yt}"
for v in @hash[key] ? []
if @epsilon > geom.dist @vertices_coords[v], coord
return v
null
key: (coord) ->
[x, y] = coord
xr = Math.round(x * @epsilon)
yr = Math.round(y * @epsilon)
key = "#{xr},#{yr}"
insert: (coord) ->
v = @lookup coord
return v if v?
key = @key coord
@hash[key] = [] unless key of @hash
@hash[key].push v = @vertices_coords.length
@vertices_coords.push coord
v
filter.collapseNearbyVertices = (fold, epsilon) ->
vertices = new RepeatedPointsDS [], epsilon
old2new =
for coords in fold.vertices_coords
vertices.insert coords
filter.remapField fold, 'vertices', old2new
## In particular: fold.vertices_coords = vertices.vertices_coords
filter.removeLoopEdges = (fold) ->
###
Remove edges whose endpoints are identical. After collapsing via
`filter.collapseNearbyVertices`, this removes epsilon-length edges.
###
filter.remapFieldSubset fold, 'edges',
for edge in fold.edges_vertices
edge[0] != edge[1]
filter.subdivideCrossingEdges_vertices = (fold, epsilon) ->
###
Takes quadratic time. xxx Should be O(n log n) via plane sweep.
###
## Handle overlapping edges by subdividing edges at any vertices on them.
for p, v in fold.vertices_coords
for e, i in fold.edges_vertices
continue if v in e
s = (fold.vertices_coords[u] for u in e)
if geom.pointStrictlyInSegment p, s ## implicit epsilon
#console.log p, 'in', s
fold.edges_vertices.push [v, e[1]]
e[1] = v
filter.removeDuplicateEdges_vertices fold
filter.removeLoopEdges fold
## Handle crossing edges
vertices = new RepeatedPointsDS fold.vertices_coords, epsilon
for e1, i1 in fold.edges_vertices
s1 = (fold.vertices_coords[v] for v in e1)
for e2, i2 in fold.edges_vertices[...i1]
s2 = (fold.vertices_coords[v] for v in e2)
if not filter.edges_verticesIncident(e1, e2) and geom.segmentsCross s1, s2
## segment intersection is too sensitive a test;
## segmentsCross more reliable
#cross = segmentIntersectSegment s1, s2
cross = geom.lineIntersectLine s1, s2
crossI = vertices.insert cross
#console.log e1, s1, 'intersects', e2, s2, 'at', cross, crossI
unless crossI in e1 and crossI in e2 ## don't add endpoint again
#console.log e1, e2, '->'
unless crossI in e1
fold.edges_vertices.push [crossI, e1[1]]
e1[1] = crossI
#console.log '->', e1, fold.edges_vertices[fold.edges_vertices.length-1]
unless crossI in e2
fold.edges_vertices.push [crossI, e2[1]]
e2[1] = crossI
#console.log '->', e2, fold.edges_vertices[fold.edges_vertices.length-1]
# xxx should renumber other edges arrays?
fold
filter.edges_vertices_to_vertices_vertices = (fold) ->
###
Works for abstract structures, so NOT SORTED.
Use sort_vertices_vertices to sort in counterclockwise order.
###
vertices_vertices = []
for edge in fold.edges_vertices
[v, w] = edge
while v >= vertices_vertices.length
vertices_vertices.push []
while w >= vertices_vertices.length
vertices_vertices.push []
vertices_vertices[v].push w
vertices_vertices[w].push v
vertices_vertices
| 49842 | geom = require './geom'
filter = exports
filter.edgesAssigned = (fold, target) ->
i for assignment, i in fold.edges_assignment when assignment == target
filter.mountainEdges = (fold) ->
assignment.edgesAssigned fold, 'M'
filter.valleyEdges = (fold) ->
assignment.edgesAssigned fold, 'V'
filter.flatEdges = (fold) ->
assignment.edgesAssigned fold, 'F'
filter.boundaryEdges = (fold) ->
assignment.edgesAssigned fold, 'B'
filter.unassignedEdges = (fold) ->
assignment.edgesAssigned fold, 'F'
filter.keysStartingWith = (fold, prefix) ->
key for key of fold when key[...prefix.length] == prefix
filter.keysEndingWith = (fold, suffix) ->
key for key of fold when key[-suffix.length..] == suffix
filter.remapField = (fold, field, old2new) ->
###
old2new: null means throw away that object
###
new2old = []
for j, i in old2new ## later overwrites earlier
new2old[j] = i if j?
for key in filter.keysStartingWith fold, field + '_'
fold[key] = (fold[key][old] for old in new2old)
for key in filter.keysEndingWith fold, '_' + field
fold[key] = (old2new[old] for old in array for array in fold[key])
fold
filter.remapFieldSubset = (fold, field, keep) ->
id = 0
old2new =
for value in keep
if value
id++
else
null ## remove
filter.remapField fold, field, old2new
filter.removeDuplicateEdges_vertices = (fold) ->
seen = {}
id = 0
old2new =
for edge in fold.edges_vertices
[v, w] = edge
if v < w
key = <KEY>
else
key = <KEY>
unless key of seen
seen[key] = id
id += 1
seen[key]
filter.remapField fold, 'edges', old2new
filter.edges_verticesIncident = (e1, e2) ->
for v in e1
if v in e2
return true
false
## Use hashing to find points within an epsilon > 0 distance from each other.
## Each integer cell will have O(1) distinct points before matching
## (number of disjoint half-unit disks that fit in a unit square).
class RepeatedPointsDS
constructor: (@vertices_coords, @epsilon) ->
## Note: if vertices_coords has some duplicates in the initial state,
## then we will detect them but won't remove them here. Rather,
## future duplicate inserts will return the higher-index vertex.
@hash = {}
for coord, v in @vertices_coords
key = @key coord
@hash[key] = [] unless key of @hash
@hash[key].push v
null
lookup: (coord) ->
[x, y] = coord
xr = Math.round(x / @epsilon)
yr = Math.round(y / @epsilon)
for xt in [xr, xr-1, xr+1]
for yt in [yr, yr-1, yr+1]
key = <KEY>
for v in @hash[key] ? []
if @epsilon > geom.dist @vertices_coords[v], coord
return v
null
key: (coord) ->
[x, y] = coord
xr = Math.round(x * @epsilon)
yr = Math.round(y * @epsilon)
key = <KEY>
insert: (coord) ->
v = @lookup coord
return v if v?
key = @key coord
@hash[key] = [] unless key of @hash
@hash[key].push v = @vertices_coords.length
@vertices_coords.push coord
v
filter.collapseNearbyVertices = (fold, epsilon) ->
vertices = new RepeatedPointsDS [], epsilon
old2new =
for coords in fold.vertices_coords
vertices.insert coords
filter.remapField fold, 'vertices', old2new
## In particular: fold.vertices_coords = vertices.vertices_coords
filter.removeLoopEdges = (fold) ->
###
Remove edges whose endpoints are identical. After collapsing via
`filter.collapseNearbyVertices`, this removes epsilon-length edges.
###
filter.remapFieldSubset fold, 'edges',
for edge in fold.edges_vertices
edge[0] != edge[1]
filter.subdivideCrossingEdges_vertices = (fold, epsilon) ->
###
Takes quadratic time. xxx Should be O(n log n) via plane sweep.
###
## Handle overlapping edges by subdividing edges at any vertices on them.
for p, v in fold.vertices_coords
for e, i in fold.edges_vertices
continue if v in e
s = (fold.vertices_coords[u] for u in e)
if geom.pointStrictlyInSegment p, s ## implicit epsilon
#console.log p, 'in', s
fold.edges_vertices.push [v, e[1]]
e[1] = v
filter.removeDuplicateEdges_vertices fold
filter.removeLoopEdges fold
## Handle crossing edges
vertices = new RepeatedPointsDS fold.vertices_coords, epsilon
for e1, i1 in fold.edges_vertices
s1 = (fold.vertices_coords[v] for v in e1)
for e2, i2 in fold.edges_vertices[...i1]
s2 = (fold.vertices_coords[v] for v in e2)
if not filter.edges_verticesIncident(e1, e2) and geom.segmentsCross s1, s2
## segment intersection is too sensitive a test;
## segmentsCross more reliable
#cross = segmentIntersectSegment s1, s2
cross = geom.lineIntersectLine s1, s2
crossI = vertices.insert cross
#console.log e1, s1, 'intersects', e2, s2, 'at', cross, crossI
unless crossI in e1 and crossI in e2 ## don't add endpoint again
#console.log e1, e2, '->'
unless crossI in e1
fold.edges_vertices.push [crossI, e1[1]]
e1[1] = crossI
#console.log '->', e1, fold.edges_vertices[fold.edges_vertices.length-1]
unless crossI in e2
fold.edges_vertices.push [crossI, e2[1]]
e2[1] = crossI
#console.log '->', e2, fold.edges_vertices[fold.edges_vertices.length-1]
# xxx should renumber other edges arrays?
fold
filter.edges_vertices_to_vertices_vertices = (fold) ->
###
Works for abstract structures, so NOT SORTED.
Use sort_vertices_vertices to sort in counterclockwise order.
###
vertices_vertices = []
for edge in fold.edges_vertices
[v, w] = edge
while v >= vertices_vertices.length
vertices_vertices.push []
while w >= vertices_vertices.length
vertices_vertices.push []
vertices_vertices[v].push w
vertices_vertices[w].push v
vertices_vertices
| true | geom = require './geom'
filter = exports
filter.edgesAssigned = (fold, target) ->
i for assignment, i in fold.edges_assignment when assignment == target
filter.mountainEdges = (fold) ->
assignment.edgesAssigned fold, 'M'
filter.valleyEdges = (fold) ->
assignment.edgesAssigned fold, 'V'
filter.flatEdges = (fold) ->
assignment.edgesAssigned fold, 'F'
filter.boundaryEdges = (fold) ->
assignment.edgesAssigned fold, 'B'
filter.unassignedEdges = (fold) ->
assignment.edgesAssigned fold, 'F'
filter.keysStartingWith = (fold, prefix) ->
key for key of fold when key[...prefix.length] == prefix
filter.keysEndingWith = (fold, suffix) ->
key for key of fold when key[-suffix.length..] == suffix
filter.remapField = (fold, field, old2new) ->
###
old2new: null means throw away that object
###
new2old = []
for j, i in old2new ## later overwrites earlier
new2old[j] = i if j?
for key in filter.keysStartingWith fold, field + '_'
fold[key] = (fold[key][old] for old in new2old)
for key in filter.keysEndingWith fold, '_' + field
fold[key] = (old2new[old] for old in array for array in fold[key])
fold
filter.remapFieldSubset = (fold, field, keep) ->
id = 0
old2new =
for value in keep
if value
id++
else
null ## remove
filter.remapField fold, field, old2new
filter.removeDuplicateEdges_vertices = (fold) ->
seen = {}
id = 0
old2new =
for edge in fold.edges_vertices
[v, w] = edge
if v < w
key = PI:KEY:<KEY>END_PI
else
key = PI:KEY:<KEY>END_PI
unless key of seen
seen[key] = id
id += 1
seen[key]
filter.remapField fold, 'edges', old2new
filter.edges_verticesIncident = (e1, e2) ->
for v in e1
if v in e2
return true
false
## Use hashing to find points within an epsilon > 0 distance from each other.
## Each integer cell will have O(1) distinct points before matching
## (number of disjoint half-unit disks that fit in a unit square).
class RepeatedPointsDS
constructor: (@vertices_coords, @epsilon) ->
## Note: if vertices_coords has some duplicates in the initial state,
## then we will detect them but won't remove them here. Rather,
## future duplicate inserts will return the higher-index vertex.
@hash = {}
for coord, v in @vertices_coords
key = @key coord
@hash[key] = [] unless key of @hash
@hash[key].push v
null
lookup: (coord) ->
[x, y] = coord
xr = Math.round(x / @epsilon)
yr = Math.round(y / @epsilon)
for xt in [xr, xr-1, xr+1]
for yt in [yr, yr-1, yr+1]
key = PI:KEY:<KEY>END_PI
for v in @hash[key] ? []
if @epsilon > geom.dist @vertices_coords[v], coord
return v
null
key: (coord) ->
[x, y] = coord
xr = Math.round(x * @epsilon)
yr = Math.round(y * @epsilon)
key = PI:KEY:<KEY>END_PI
insert: (coord) ->
v = @lookup coord
return v if v?
key = @key coord
@hash[key] = [] unless key of @hash
@hash[key].push v = @vertices_coords.length
@vertices_coords.push coord
v
filter.collapseNearbyVertices = (fold, epsilon) ->
vertices = new RepeatedPointsDS [], epsilon
old2new =
for coords in fold.vertices_coords
vertices.insert coords
filter.remapField fold, 'vertices', old2new
## In particular: fold.vertices_coords = vertices.vertices_coords
filter.removeLoopEdges = (fold) ->
###
Remove edges whose endpoints are identical. After collapsing via
`filter.collapseNearbyVertices`, this removes epsilon-length edges.
###
filter.remapFieldSubset fold, 'edges',
for edge in fold.edges_vertices
edge[0] != edge[1]
filter.subdivideCrossingEdges_vertices = (fold, epsilon) ->
###
Takes quadratic time. xxx Should be O(n log n) via plane sweep.
###
## Handle overlapping edges by subdividing edges at any vertices on them.
for p, v in fold.vertices_coords
for e, i in fold.edges_vertices
continue if v in e
s = (fold.vertices_coords[u] for u in e)
if geom.pointStrictlyInSegment p, s ## implicit epsilon
#console.log p, 'in', s
fold.edges_vertices.push [v, e[1]]
e[1] = v
filter.removeDuplicateEdges_vertices fold
filter.removeLoopEdges fold
## Handle crossing edges
vertices = new RepeatedPointsDS fold.vertices_coords, epsilon
for e1, i1 in fold.edges_vertices
s1 = (fold.vertices_coords[v] for v in e1)
for e2, i2 in fold.edges_vertices[...i1]
s2 = (fold.vertices_coords[v] for v in e2)
if not filter.edges_verticesIncident(e1, e2) and geom.segmentsCross s1, s2
## segment intersection is too sensitive a test;
## segmentsCross more reliable
#cross = segmentIntersectSegment s1, s2
cross = geom.lineIntersectLine s1, s2
crossI = vertices.insert cross
#console.log e1, s1, 'intersects', e2, s2, 'at', cross, crossI
unless crossI in e1 and crossI in e2 ## don't add endpoint again
#console.log e1, e2, '->'
unless crossI in e1
fold.edges_vertices.push [crossI, e1[1]]
e1[1] = crossI
#console.log '->', e1, fold.edges_vertices[fold.edges_vertices.length-1]
unless crossI in e2
fold.edges_vertices.push [crossI, e2[1]]
e2[1] = crossI
#console.log '->', e2, fold.edges_vertices[fold.edges_vertices.length-1]
# xxx should renumber other edges arrays?
fold
filter.edges_vertices_to_vertices_vertices = (fold) ->
###
Works for abstract structures, so NOT SORTED.
Use sort_vertices_vertices to sort in counterclockwise order.
###
vertices_vertices = []
for edge in fold.edges_vertices
[v, w] = edge
while v >= vertices_vertices.length
vertices_vertices.push []
while w >= vertices_vertices.length
vertices_vertices.push []
vertices_vertices[v].push w
vertices_vertices[w].push v
vertices_vertices
|
[
{
"context": "EditorUpload\n\n defaults: {\n csrfToken: \"csrf_token\",\n target: \"target url\",\n placehold",
"end": 192,
"score": 0.9446236491203308,
"start": 182,
"tag": "PASSWORD",
"value": "csrf_token"
}
] | spirit/core/static/spirit/scripts/src/editor_file_upload.coffee | Ke-xueting/Spirit | 974 | ###
Markdown editor image upload, should be loaded before $.editor()
requires: util.js
###
utils = stModules.utils
class EditorUpload
defaults: {
csrfToken: "csrf_token",
target: "target url",
placeholderText: "uploading {name}",
allowedFileMedia: ["*/*"]
}
_meta: {
fieldName: "file",
tag: "[{text}]({url})",
elm: ".js-box-file"
}
constructor: (el, options=null, meta=null) ->
@el = el
@options = Object.assign({}, @defaults, options or {})
@meta = Object.assign({}, @_meta, meta or {})
@textBox = el.querySelector('textarea')
@formFile = document.createElement('form')
@inputFile = document.createElement('input')
@inputFile.type = "file"
@inputFile.accept = @options.allowedFileMedia
@setUp()
setUp: ->
@formFile.appendChild(@inputFile)
@inputFile.addEventListener('change', @sendFile)
@el.querySelector(@meta.elm).addEventListener('click', @openFileDialog)
sendFile: =>
# todo: allow many files
file = @inputFile.files[0]
placeholder = @addPlaceholder(file)
formData = @buildFormData(file)
# Reset the input fixes uploading the same image twice
@formFile.reset()
headers = new Headers()
headers.append("X-Requested-With", "XMLHttpRequest")
fetch(@options.target, {
method: "POST",
headers: headers,
credentials: 'same-origin',
body: formData
})
.then((response) =>
if not response.ok
throw new Error("error: #{response.status} #{response.statusText}")
return response.json() # Promise
)
.then((data) =>
if "url" of data
@addFile(file.name, data.url, placeholder)
else
@addError(JSON.stringify(data.error), placeholder)
)
.catch((error) =>
console.log(error.message)
@addError(error.message, placeholder)
)
return
addPlaceholder: (file) =>
placeholder = utils.format(@meta.tag, {
text: utils.format(@options.placeholderText, {name: file.name}),
url: ""})
# todo: add at current pointer position
@textBox.value += placeholder
return placeholder
buildFormData: (file) =>
formData = new FormData()
formData.append('csrfmiddlewaretoken', @options.csrfToken)
formData.append(@meta.fieldName, file)
return formData
addFile: (name, url, placeholder) =>
imageTag = utils.format(@meta.tag, {text: name, url: url})
@textReplace(placeholder, imageTag)
addError: (error, placeholder) =>
@textReplace(
placeholder,
utils.format(@meta.tag, {text: error, url: ""}))
textReplace: (find, replace) =>
# todo: put current pointer position back
@textBox.value = @textBox.value.replace(find, replace)
return
openFileDialog: (e) =>
e.preventDefault()
e.stopPropagation()
# Avoid default editor button-click handler
e.stopImmediatePropagation()
@inputFile.click()
stModules.editorFileUpload = (elms, options) ->
return Array.from(elms).map((elm) -> new EditorUpload(elm, options))
stModules.editorImageUpload = (elms, options) ->
return Array.from(elms).map((elm) -> new EditorUpload(elm, options, {
fieldName: "image",
tag: "",
elm: ".js-box-image"
}))
stModules.EditorUpload = EditorUpload
| 118086 | ###
Markdown editor image upload, should be loaded before $.editor()
requires: util.js
###
utils = stModules.utils
class EditorUpload
defaults: {
csrfToken: "<PASSWORD>",
target: "target url",
placeholderText: "uploading {name}",
allowedFileMedia: ["*/*"]
}
_meta: {
fieldName: "file",
tag: "[{text}]({url})",
elm: ".js-box-file"
}
constructor: (el, options=null, meta=null) ->
@el = el
@options = Object.assign({}, @defaults, options or {})
@meta = Object.assign({}, @_meta, meta or {})
@textBox = el.querySelector('textarea')
@formFile = document.createElement('form')
@inputFile = document.createElement('input')
@inputFile.type = "file"
@inputFile.accept = @options.allowedFileMedia
@setUp()
setUp: ->
@formFile.appendChild(@inputFile)
@inputFile.addEventListener('change', @sendFile)
@el.querySelector(@meta.elm).addEventListener('click', @openFileDialog)
sendFile: =>
# todo: allow many files
file = @inputFile.files[0]
placeholder = @addPlaceholder(file)
formData = @buildFormData(file)
# Reset the input fixes uploading the same image twice
@formFile.reset()
headers = new Headers()
headers.append("X-Requested-With", "XMLHttpRequest")
fetch(@options.target, {
method: "POST",
headers: headers,
credentials: 'same-origin',
body: formData
})
.then((response) =>
if not response.ok
throw new Error("error: #{response.status} #{response.statusText}")
return response.json() # Promise
)
.then((data) =>
if "url" of data
@addFile(file.name, data.url, placeholder)
else
@addError(JSON.stringify(data.error), placeholder)
)
.catch((error) =>
console.log(error.message)
@addError(error.message, placeholder)
)
return
addPlaceholder: (file) =>
placeholder = utils.format(@meta.tag, {
text: utils.format(@options.placeholderText, {name: file.name}),
url: ""})
# todo: add at current pointer position
@textBox.value += placeholder
return placeholder
buildFormData: (file) =>
formData = new FormData()
formData.append('csrfmiddlewaretoken', @options.csrfToken)
formData.append(@meta.fieldName, file)
return formData
addFile: (name, url, placeholder) =>
imageTag = utils.format(@meta.tag, {text: name, url: url})
@textReplace(placeholder, imageTag)
addError: (error, placeholder) =>
@textReplace(
placeholder,
utils.format(@meta.tag, {text: error, url: ""}))
textReplace: (find, replace) =>
# todo: put current pointer position back
@textBox.value = @textBox.value.replace(find, replace)
return
openFileDialog: (e) =>
e.preventDefault()
e.stopPropagation()
# Avoid default editor button-click handler
e.stopImmediatePropagation()
@inputFile.click()
stModules.editorFileUpload = (elms, options) ->
return Array.from(elms).map((elm) -> new EditorUpload(elm, options))
stModules.editorImageUpload = (elms, options) ->
return Array.from(elms).map((elm) -> new EditorUpload(elm, options, {
fieldName: "image",
tag: "",
elm: ".js-box-image"
}))
stModules.EditorUpload = EditorUpload
| true | ###
Markdown editor image upload, should be loaded before $.editor()
requires: util.js
###
utils = stModules.utils
class EditorUpload
defaults: {
csrfToken: "PI:PASSWORD:<PASSWORD>END_PI",
target: "target url",
placeholderText: "uploading {name}",
allowedFileMedia: ["*/*"]
}
_meta: {
fieldName: "file",
tag: "[{text}]({url})",
elm: ".js-box-file"
}
constructor: (el, options=null, meta=null) ->
@el = el
@options = Object.assign({}, @defaults, options or {})
@meta = Object.assign({}, @_meta, meta or {})
@textBox = el.querySelector('textarea')
@formFile = document.createElement('form')
@inputFile = document.createElement('input')
@inputFile.type = "file"
@inputFile.accept = @options.allowedFileMedia
@setUp()
setUp: ->
@formFile.appendChild(@inputFile)
@inputFile.addEventListener('change', @sendFile)
@el.querySelector(@meta.elm).addEventListener('click', @openFileDialog)
sendFile: =>
# todo: allow many files
file = @inputFile.files[0]
placeholder = @addPlaceholder(file)
formData = @buildFormData(file)
# Reset the input fixes uploading the same image twice
@formFile.reset()
headers = new Headers()
headers.append("X-Requested-With", "XMLHttpRequest")
fetch(@options.target, {
method: "POST",
headers: headers,
credentials: 'same-origin',
body: formData
})
.then((response) =>
if not response.ok
throw new Error("error: #{response.status} #{response.statusText}")
return response.json() # Promise
)
.then((data) =>
if "url" of data
@addFile(file.name, data.url, placeholder)
else
@addError(JSON.stringify(data.error), placeholder)
)
.catch((error) =>
console.log(error.message)
@addError(error.message, placeholder)
)
return
addPlaceholder: (file) =>
placeholder = utils.format(@meta.tag, {
text: utils.format(@options.placeholderText, {name: file.name}),
url: ""})
# todo: add at current pointer position
@textBox.value += placeholder
return placeholder
buildFormData: (file) =>
formData = new FormData()
formData.append('csrfmiddlewaretoken', @options.csrfToken)
formData.append(@meta.fieldName, file)
return formData
addFile: (name, url, placeholder) =>
imageTag = utils.format(@meta.tag, {text: name, url: url})
@textReplace(placeholder, imageTag)
addError: (error, placeholder) =>
@textReplace(
placeholder,
utils.format(@meta.tag, {text: error, url: ""}))
textReplace: (find, replace) =>
# todo: put current pointer position back
@textBox.value = @textBox.value.replace(find, replace)
return
openFileDialog: (e) =>
e.preventDefault()
e.stopPropagation()
# Avoid default editor button-click handler
e.stopImmediatePropagation()
@inputFile.click()
stModules.editorFileUpload = (elms, options) ->
return Array.from(elms).map((elm) -> new EditorUpload(elm, options))
stModules.editorImageUpload = (elms, options) ->
return Array.from(elms).map((elm) -> new EditorUpload(elm, options, {
fieldName: "image",
tag: "",
elm: ".js-box-image"
}))
stModules.EditorUpload = EditorUpload
|
[
{
"context": "eu.ch\" onClick={TrackEvent.link('Signup:Footer')}>Greg Leuch</a>.</p>\n <p className=\"disclaimer\"><Col",
"end": 2083,
"score": 0.9992014169692993,
"start": 2073,
"tag": "NAME",
"value": "Greg Leuch"
}
] | app/assets/javascripts/components/pages/signup.js.jsx.coffee | gleuch/color-camp | 1 | @ColorSignupPage = React.createClass
render : ->
`<ColorSignupLogin {...this.props} signup={true} />`
@ColorLoginPage = React.createClass
render : ->
`<ColorSignupLogin {...this.props} signup={false} />`
@ColorSignupLogin = React.createClass
getInitialState : ->
{ }
componentDidMount : ->
#
componentWillUnmount : ->
#
componentWillUpdate : (p,s)->
#
componentDidUpdate : (p,s)->
#
render : ->
title = 'Signup/Login for ' + ColorInitialProps.default_title
if this.props.current_user
content = `<div>
<p>You are currently logged in as {this.props.current_user.name}. <ColorLink to={'/u/' + this.props.current_user.login}>Click here to view your profile</ColorLink></p>
</div>`
else
signin_providers = []
this.props.signin_providers.map (provider)->
providerName = provider.name.toLowerCase()
signin_providers.push `
<li key={providerName}>
<a href={provider.url} className={"btn btn-default btn-" + providerName} onClick={TrackEvent.link('Signup')}>connect via {providerName}</a>
</li>`
content = `<span>
<section>
<h1>signup / login</h1>
<p>to see a visual history of your browsing experience is a simple as signing in through Facebook or Twitter. after that, just download the Google Chrome extension.</p>
<ul className="buttons">
{signin_providers}
</ul>
<p className="disclaimer">your information is not sold or used for any other purpose but to visualize your browisng experience. afterall, this is only an art piece, not a unicorn startup failure. however, by signing in through any or all of the above services, you agree to share information about yourself and web sites you visit. any information you publish will be shared publicly for others to see. additionally, your information may be used in other features for this art piece or future works of art in this series by <a href="https://gleu.ch" onClick={TrackEvent.link('Signup:Footer')}>Greg Leuch</a>.</p>
<p className="disclaimer"><ColorLink to="/terms">click here</ColorLink> for complete terms of use.</p>
</section>
</span>`
`<DocumentTitle title={title}>
<ColorStaticPageDisplay content={content} pageName="signup-login" />
</DocumentTitle>`
| 18378 | @ColorSignupPage = React.createClass
render : ->
`<ColorSignupLogin {...this.props} signup={true} />`
@ColorLoginPage = React.createClass
render : ->
`<ColorSignupLogin {...this.props} signup={false} />`
@ColorSignupLogin = React.createClass
getInitialState : ->
{ }
componentDidMount : ->
#
componentWillUnmount : ->
#
componentWillUpdate : (p,s)->
#
componentDidUpdate : (p,s)->
#
render : ->
title = 'Signup/Login for ' + ColorInitialProps.default_title
if this.props.current_user
content = `<div>
<p>You are currently logged in as {this.props.current_user.name}. <ColorLink to={'/u/' + this.props.current_user.login}>Click here to view your profile</ColorLink></p>
</div>`
else
signin_providers = []
this.props.signin_providers.map (provider)->
providerName = provider.name.toLowerCase()
signin_providers.push `
<li key={providerName}>
<a href={provider.url} className={"btn btn-default btn-" + providerName} onClick={TrackEvent.link('Signup')}>connect via {providerName}</a>
</li>`
content = `<span>
<section>
<h1>signup / login</h1>
<p>to see a visual history of your browsing experience is a simple as signing in through Facebook or Twitter. after that, just download the Google Chrome extension.</p>
<ul className="buttons">
{signin_providers}
</ul>
<p className="disclaimer">your information is not sold or used for any other purpose but to visualize your browisng experience. afterall, this is only an art piece, not a unicorn startup failure. however, by signing in through any or all of the above services, you agree to share information about yourself and web sites you visit. any information you publish will be shared publicly for others to see. additionally, your information may be used in other features for this art piece or future works of art in this series by <a href="https://gleu.ch" onClick={TrackEvent.link('Signup:Footer')}><NAME></a>.</p>
<p className="disclaimer"><ColorLink to="/terms">click here</ColorLink> for complete terms of use.</p>
</section>
</span>`
`<DocumentTitle title={title}>
<ColorStaticPageDisplay content={content} pageName="signup-login" />
</DocumentTitle>`
| true | @ColorSignupPage = React.createClass
render : ->
`<ColorSignupLogin {...this.props} signup={true} />`
@ColorLoginPage = React.createClass
render : ->
`<ColorSignupLogin {...this.props} signup={false} />`
@ColorSignupLogin = React.createClass
getInitialState : ->
{ }
componentDidMount : ->
#
componentWillUnmount : ->
#
componentWillUpdate : (p,s)->
#
componentDidUpdate : (p,s)->
#
render : ->
title = 'Signup/Login for ' + ColorInitialProps.default_title
if this.props.current_user
content = `<div>
<p>You are currently logged in as {this.props.current_user.name}. <ColorLink to={'/u/' + this.props.current_user.login}>Click here to view your profile</ColorLink></p>
</div>`
else
signin_providers = []
this.props.signin_providers.map (provider)->
providerName = provider.name.toLowerCase()
signin_providers.push `
<li key={providerName}>
<a href={provider.url} className={"btn btn-default btn-" + providerName} onClick={TrackEvent.link('Signup')}>connect via {providerName}</a>
</li>`
content = `<span>
<section>
<h1>signup / login</h1>
<p>to see a visual history of your browsing experience is a simple as signing in through Facebook or Twitter. after that, just download the Google Chrome extension.</p>
<ul className="buttons">
{signin_providers}
</ul>
<p className="disclaimer">your information is not sold or used for any other purpose but to visualize your browisng experience. afterall, this is only an art piece, not a unicorn startup failure. however, by signing in through any or all of the above services, you agree to share information about yourself and web sites you visit. any information you publish will be shared publicly for others to see. additionally, your information may be used in other features for this art piece or future works of art in this series by <a href="https://gleu.ch" onClick={TrackEvent.link('Signup:Footer')}>PI:NAME:<NAME>END_PI</a>.</p>
<p className="disclaimer"><ColorLink to="/terms">click here</ColorLink> for complete terms of use.</p>
</section>
</span>`
`<DocumentTitle title={title}>
<ColorStaticPageDisplay content={content} pageName="signup-login" />
</DocumentTitle>`
|
[
{
"context": "\n# Commands:\n# tdr_now or tdr_md\n#\n# Author:\n# yuzumone\n#\n\ntwitter = require 'twitter'\nclient = new twitt",
"end": 161,
"score": 0.9997453093528748,
"start": 153,
"tag": "USERNAME",
"value": "yuzumone"
},
{
"context": " {\n \"fallback\": text\n \"author_name\": name,\n \"author_link\": 'https://twitter.com/' + ",
"end": 855,
"score": 0.9793035984039307,
"start": 851,
"tag": "NAME",
"value": "name"
}
] | scripts/twitter_tdr.coffee | yuzumone/hubot-script | 0 | # Description
# A Hubot script that search tweets of tdr information
#
# Dependencies:
# twitter
#
# Commands:
# tdr_now or tdr_md
#
# Author:
# yuzumone
#
twitter = require 'twitter'
client = new twitter {
consumer_key: process.env.TWITTER_CONSUMER_KEY,
consumer_secret: process.env.TWITTER_CONSUMER_SECRET,
access_token_key: process.env.TWITTER_ACCESS_TOKEN,
access_token_secret: process.env.TWITTER_ACCESS_SECRET
}
module.exports = (robot) ->
createAttachments = (statuses) ->
attachments = []
for status in statuses
name = status.user.screen_name
icon = status.user.profile_image_url_https
text = status.full_text
id = status.id_str
media = status.entities.media
createAt = new Date Date.parse(status.created_at)
attachment = {
"fallback": text
"author_name": name,
"author_link": 'https://twitter.com/' + name + '/status/' + id,
"author_icon": icon,
"text": text,
"ts": createAt.getTime() / 1000
}
if media
attachment["image_url"] = media[0].media_url_https
attachments.push(attachment)
return attachments
robot.hear /tdr_now/, (res) ->
room = res.message.room
if room is process.env.TWITTER_ROOM
now = new Date
diff = new Date now.getFullYear(), now.getMonth(), now.getDate(),
now.getHours() - 1, now.getMinutes()
params = {
q: '#tdr_now -rt', lang: 'ja', result_type: 'recent', count: '10',
include_entities: 1, tweet_mode: 'extended'
}
client.get 'search/tweets', params, (error, tweets, response) ->
statuses = tweets.statuses.filter (status) ->
createAt = new Date Date.parse(status.created_at)
if createAt > diff then true else false
attachments = createAttachments statuses
res.send { attachments: attachments }
robot.hear /tdr_md/, (res) ->
room = res.message.room
if room is process.env.TWITTER_ROOM
now = new Date
diff = new Date now.getFullYear(), now.getMonth(), now.getDate(),
now.getHours() - 8, now.getMinutes()
params = {
q: '#tdr_md -rt', lang: 'ja', result_type: 'recent', count: '10',
include_entities: 1, tweet_mode: 'extended'
}
client.get 'search/tweets', params, (error, tweets, response) ->
statuses = tweets.statuses.filter (status) ->
createAt = new Date Date.parse(status.created_at)
if createAt > diff then true else false
attachments = createAttachments statuses
res.send { attachments: attachments }
| 79048 | # Description
# A Hubot script that search tweets of tdr information
#
# Dependencies:
# twitter
#
# Commands:
# tdr_now or tdr_md
#
# Author:
# yuzumone
#
twitter = require 'twitter'
client = new twitter {
consumer_key: process.env.TWITTER_CONSUMER_KEY,
consumer_secret: process.env.TWITTER_CONSUMER_SECRET,
access_token_key: process.env.TWITTER_ACCESS_TOKEN,
access_token_secret: process.env.TWITTER_ACCESS_SECRET
}
module.exports = (robot) ->
createAttachments = (statuses) ->
attachments = []
for status in statuses
name = status.user.screen_name
icon = status.user.profile_image_url_https
text = status.full_text
id = status.id_str
media = status.entities.media
createAt = new Date Date.parse(status.created_at)
attachment = {
"fallback": text
"author_name": <NAME>,
"author_link": 'https://twitter.com/' + name + '/status/' + id,
"author_icon": icon,
"text": text,
"ts": createAt.getTime() / 1000
}
if media
attachment["image_url"] = media[0].media_url_https
attachments.push(attachment)
return attachments
robot.hear /tdr_now/, (res) ->
room = res.message.room
if room is process.env.TWITTER_ROOM
now = new Date
diff = new Date now.getFullYear(), now.getMonth(), now.getDate(),
now.getHours() - 1, now.getMinutes()
params = {
q: '#tdr_now -rt', lang: 'ja', result_type: 'recent', count: '10',
include_entities: 1, tweet_mode: 'extended'
}
client.get 'search/tweets', params, (error, tweets, response) ->
statuses = tweets.statuses.filter (status) ->
createAt = new Date Date.parse(status.created_at)
if createAt > diff then true else false
attachments = createAttachments statuses
res.send { attachments: attachments }
robot.hear /tdr_md/, (res) ->
room = res.message.room
if room is process.env.TWITTER_ROOM
now = new Date
diff = new Date now.getFullYear(), now.getMonth(), now.getDate(),
now.getHours() - 8, now.getMinutes()
params = {
q: '#tdr_md -rt', lang: 'ja', result_type: 'recent', count: '10',
include_entities: 1, tweet_mode: 'extended'
}
client.get 'search/tweets', params, (error, tweets, response) ->
statuses = tweets.statuses.filter (status) ->
createAt = new Date Date.parse(status.created_at)
if createAt > diff then true else false
attachments = createAttachments statuses
res.send { attachments: attachments }
| true | # Description
# A Hubot script that search tweets of tdr information
#
# Dependencies:
# twitter
#
# Commands:
# tdr_now or tdr_md
#
# Author:
# yuzumone
#
twitter = require 'twitter'
client = new twitter {
consumer_key: process.env.TWITTER_CONSUMER_KEY,
consumer_secret: process.env.TWITTER_CONSUMER_SECRET,
access_token_key: process.env.TWITTER_ACCESS_TOKEN,
access_token_secret: process.env.TWITTER_ACCESS_SECRET
}
module.exports = (robot) ->
createAttachments = (statuses) ->
attachments = []
for status in statuses
name = status.user.screen_name
icon = status.user.profile_image_url_https
text = status.full_text
id = status.id_str
media = status.entities.media
createAt = new Date Date.parse(status.created_at)
attachment = {
"fallback": text
"author_name": PI:NAME:<NAME>END_PI,
"author_link": 'https://twitter.com/' + name + '/status/' + id,
"author_icon": icon,
"text": text,
"ts": createAt.getTime() / 1000
}
if media
attachment["image_url"] = media[0].media_url_https
attachments.push(attachment)
return attachments
robot.hear /tdr_now/, (res) ->
room = res.message.room
if room is process.env.TWITTER_ROOM
now = new Date
diff = new Date now.getFullYear(), now.getMonth(), now.getDate(),
now.getHours() - 1, now.getMinutes()
params = {
q: '#tdr_now -rt', lang: 'ja', result_type: 'recent', count: '10',
include_entities: 1, tweet_mode: 'extended'
}
client.get 'search/tweets', params, (error, tweets, response) ->
statuses = tweets.statuses.filter (status) ->
createAt = new Date Date.parse(status.created_at)
if createAt > diff then true else false
attachments = createAttachments statuses
res.send { attachments: attachments }
robot.hear /tdr_md/, (res) ->
room = res.message.room
if room is process.env.TWITTER_ROOM
now = new Date
diff = new Date now.getFullYear(), now.getMonth(), now.getDate(),
now.getHours() - 8, now.getMinutes()
params = {
q: '#tdr_md -rt', lang: 'ja', result_type: 'recent', count: '10',
include_entities: 1, tweet_mode: 'extended'
}
client.get 'search/tweets', params, (error, tweets, response) ->
statuses = tweets.statuses.filter (status) ->
createAt = new Date Date.parse(status.created_at)
if createAt > diff then true else false
attachments = createAttachments statuses
res.send { attachments: attachments }
|
[
{
"context": "--------------\n# Copyright Joe Drago 2018.\n# Distributed under the Boost Softw",
"end": 123,
"score": 0.9998372793197632,
"start": 114,
"tag": "NAME",
"value": "Joe Drago"
}
] | lib/templates/src/coffee/utils.coffee | EwoutH/colorist | 0 | # ---------------------------------------------------------------------------
# Copyright Joe Drago 2018.
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE_1_0.txt or copy at
# http://www.boost.org/LICENSE_1_0.txt)
# ---------------------------------------------------------------------------
Number.prototype.clamp = (min, max) ->
return Math.min(Math.max(this, min), max)
module.exports =
fr: (number, digits) ->
s = number.toFixed(digits)
pieces = s.split(".")
if pieces.length > 1
pieces[1] = pieces[1].replace(/0+$/, "")
if pieces[1].length == 0
pieces.pop()
return pieces.join('.')
clamp: (v, min, max) ->
return Math.min(Math.max(v, min), max)
| 49172 | # ---------------------------------------------------------------------------
# Copyright <NAME> 2018.
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE_1_0.txt or copy at
# http://www.boost.org/LICENSE_1_0.txt)
# ---------------------------------------------------------------------------
Number.prototype.clamp = (min, max) ->
return Math.min(Math.max(this, min), max)
module.exports =
fr: (number, digits) ->
s = number.toFixed(digits)
pieces = s.split(".")
if pieces.length > 1
pieces[1] = pieces[1].replace(/0+$/, "")
if pieces[1].length == 0
pieces.pop()
return pieces.join('.')
clamp: (v, min, max) ->
return Math.min(Math.max(v, min), max)
| true | # ---------------------------------------------------------------------------
# Copyright PI:NAME:<NAME>END_PI 2018.
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE_1_0.txt or copy at
# http://www.boost.org/LICENSE_1_0.txt)
# ---------------------------------------------------------------------------
Number.prototype.clamp = (min, max) ->
return Math.min(Math.max(this, min), max)
module.exports =
fr: (number, digits) ->
s = number.toFixed(digits)
pieces = s.split(".")
if pieces.length > 1
pieces[1] = pieces[1].replace(/0+$/, "")
if pieces[1].length == 0
pieces.pop()
return pieces.join('.')
clamp: (v, min, max) ->
return Math.min(Math.max(v, min), max)
|
[
{
"context": "# SillyAI's strategy is to buy an arbitrary card with the",
"end": 7,
"score": 0.7478885650634766,
"start": 2,
"tag": "NAME",
"value": "Silly"
},
{
"context": " lot of possible\n# states of the game.\n{\n name: 'SillyAI'\n author: 'rspeer'\n requires: []\n gainPrio",
"end": 258,
"score": 0.7767840027809143,
"start": 255,
"tag": "NAME",
"value": "Sil"
},
{
"context": "t of possible\n# states of the game.\n{\n name: 'SillyAI'\n author: 'rspeer'\n requires: []\n gainPriority",
"end": 262,
"score": 0.8722870349884033,
"start": 258,
"tag": "USERNAME",
"value": "lyAI"
},
{
"context": "tates of the game.\n{\n name: 'SillyAI'\n author: 'rspeer'\n requires: []\n gainPriority: (state, my) -> []",
"end": 281,
"score": 0.9996781945228577,
"start": 275,
"tag": "USERNAME",
"value": "rspeer"
}
] | strategies/SillyAI.coffee | rspeer/dominiate | 65 | # SillyAI's strategy is to buy an arbitrary card with the highest available
# cost. It does a reasonable job of playing like a newbie, it occasionally
# gets lucky and pulls off nice combos, and it tests a lot of possible
# states of the game.
{
name: 'SillyAI'
author: 'rspeer'
requires: []
gainPriority: (state, my) -> []
gainValue: (state, card, my) ->
if my.turnsTaken > 100 and card isnt null
# oh god, just make it stop
return 1
if card.name is "Copper" or card.name is "Curse"
return -1
else
[coins, potions] = card.getCost(state)
return coins + potions*2 + Math.random()
}
| 198298 | # <NAME>AI's strategy is to buy an arbitrary card with the highest available
# cost. It does a reasonable job of playing like a newbie, it occasionally
# gets lucky and pulls off nice combos, and it tests a lot of possible
# states of the game.
{
name: '<NAME>lyAI'
author: 'rspeer'
requires: []
gainPriority: (state, my) -> []
gainValue: (state, card, my) ->
if my.turnsTaken > 100 and card isnt null
# oh god, just make it stop
return 1
if card.name is "Copper" or card.name is "Curse"
return -1
else
[coins, potions] = card.getCost(state)
return coins + potions*2 + Math.random()
}
| true | # PI:NAME:<NAME>END_PIAI's strategy is to buy an arbitrary card with the highest available
# cost. It does a reasonable job of playing like a newbie, it occasionally
# gets lucky and pulls off nice combos, and it tests a lot of possible
# states of the game.
{
name: 'PI:NAME:<NAME>END_PIlyAI'
author: 'rspeer'
requires: []
gainPriority: (state, my) -> []
gainValue: (state, card, my) ->
if my.turnsTaken > 100 and card isnt null
# oh god, just make it stop
return 1
if card.name is "Copper" or card.name is "Curse"
return -1
else
[coins, potions] = card.getCost(state)
return coins + potions*2 + Math.random()
}
|
[
{
"context": "**\n# *\n# * yOSON appLoad\n# *\n# * Copyright(c) 2011 yOSON <evangelizandolaweb@gmail.com>\n# * Web evangelist",
"end": 69,
"score": 0.9911029934883118,
"start": 64,
"tag": "USERNAME",
"value": "yOSON"
},
{
"context": " * yOSON appLoad\n# *\n# * Copyright(c) 2011 yOSON <evangelizandolaweb@gmail.com>\n# * Web evangelists <evangelizandolaweb@groups.f",
"end": 99,
"score": 0.9999328851699829,
"start": 71,
"tag": "EMAIL",
"value": "evangelizandolaweb@gmail.com"
},
{
"context": "vangelizandolaweb@gmail.com>\n# * Web evangelists <evangelizandolaweb@groups.facebook.com>\n# *\n# *\n# * MIT Licensed\n#\nmodu = yOSON.module\nc",
"end": 160,
"score": 0.9999333620071411,
"start": 122,
"tag": "EMAIL",
"value": "evangelizandolaweb@groups.facebook.com"
}
] | frontend/resources/coffee/libs/yosonjs-utils/appLoad.coffee | ronnyfly2/openvios | 0 | # *************
# *
# * yOSON appLoad
# *
# * Copyright(c) 2011 yOSON <evangelizandolaweb@gmail.com>
# * Web evangelists <evangelizandolaweb@groups.facebook.com>
# *
# *
# * MIT Licensed
#
modu = yOSON.module
ctrl = yOSON.controller
acti = yOSON.action
# console.log modu, ctrl, acti
#log "==> mod: #{modu} - ctrl:#{ctrl} - acti:#{acti}"
yOSON.AppSchema.modules.allModules()
if modu is "" or not yOSON.AppSchema.modules.hasOwnProperty(modu)
yOSON.AppSchema.modules.byDefault()
else
yOSON.AppSchema.modules[modu].allControllers()
if ctrl is "" or not yOSON.AppSchema.modules[modu].controllers.hasOwnProperty(ctrl)
yOSON.AppSchema.modules[modu].controllers.byDefault()
else
yOSON.AppSchema.modules[modu].controllers[ctrl].allActions()
if acti is "" or not yOSON.AppSchema.modules[modu].controllers[ctrl].actions.hasOwnProperty(acti)
yOSON.AppSchema.modules[modu].controllers[ctrl].actions.byDefault()
else
yOSON.AppSchema.modules[modu].controllers[ctrl].actions[acti]()
| 15810 | # *************
# *
# * yOSON appLoad
# *
# * Copyright(c) 2011 yOSON <<EMAIL>>
# * Web evangelists <<EMAIL>>
# *
# *
# * MIT Licensed
#
modu = yOSON.module
ctrl = yOSON.controller
acti = yOSON.action
# console.log modu, ctrl, acti
#log "==> mod: #{modu} - ctrl:#{ctrl} - acti:#{acti}"
yOSON.AppSchema.modules.allModules()
if modu is "" or not yOSON.AppSchema.modules.hasOwnProperty(modu)
yOSON.AppSchema.modules.byDefault()
else
yOSON.AppSchema.modules[modu].allControllers()
if ctrl is "" or not yOSON.AppSchema.modules[modu].controllers.hasOwnProperty(ctrl)
yOSON.AppSchema.modules[modu].controllers.byDefault()
else
yOSON.AppSchema.modules[modu].controllers[ctrl].allActions()
if acti is "" or not yOSON.AppSchema.modules[modu].controllers[ctrl].actions.hasOwnProperty(acti)
yOSON.AppSchema.modules[modu].controllers[ctrl].actions.byDefault()
else
yOSON.AppSchema.modules[modu].controllers[ctrl].actions[acti]()
| true | # *************
# *
# * yOSON appLoad
# *
# * Copyright(c) 2011 yOSON <PI:EMAIL:<EMAIL>END_PI>
# * Web evangelists <PI:EMAIL:<EMAIL>END_PI>
# *
# *
# * MIT Licensed
#
modu = yOSON.module
ctrl = yOSON.controller
acti = yOSON.action
# console.log modu, ctrl, acti
#log "==> mod: #{modu} - ctrl:#{ctrl} - acti:#{acti}"
yOSON.AppSchema.modules.allModules()
if modu is "" or not yOSON.AppSchema.modules.hasOwnProperty(modu)
yOSON.AppSchema.modules.byDefault()
else
yOSON.AppSchema.modules[modu].allControllers()
if ctrl is "" or not yOSON.AppSchema.modules[modu].controllers.hasOwnProperty(ctrl)
yOSON.AppSchema.modules[modu].controllers.byDefault()
else
yOSON.AppSchema.modules[modu].controllers[ctrl].allActions()
if acti is "" or not yOSON.AppSchema.modules[modu].controllers[ctrl].actions.hasOwnProperty(acti)
yOSON.AppSchema.modules[modu].controllers[ctrl].actions.byDefault()
else
yOSON.AppSchema.modules[modu].controllers[ctrl].actions[acti]()
|
[
{
"context": "gles:\n dongle1:\n id: 1\n masterSeed: 'af5920746fad1e40b2a8c7080ee40524a335f129cb374d4c6f82fd6bf3139b17191cb8c38b8e37f4003768b103479947cab1d4f68d908ae520cfe71263b2a0cd'\n mnemonic: 'fox luggage hero item busy harb",
"end": 326,
"score": 0.9995700716972351,
"start": 198,
"tag": "KEY",
"value": "af5920746fad1e40b2a8c7080ee40524a335f129cb374d4c6f82fd6bf3139b17191cb8c38b8e37f4003768b103479947cab1d4f68d908ae520cfe71263b2a0cd"
},
{
"context": "path tooth cup nation erosion'\n pairingKey: 'a26d9f9187c250beb7be79f9eb8ff249'\n pin: '0000'\n\n\n # Empty wallet account\n ",
"end": 546,
"score": 0.9997527003288269,
"start": 514,
"tag": "KEY",
"value": "a26d9f9187c250beb7be79f9eb8ff249"
},
{
"context": "count\n dongle2:\n id: 2\n masterSeed: '16eb9af19037ea27cb9d493654d612217547cbd995ae0542c47902f683398eb85ae39579b80b839757ae7dee52bbb895eee421aedaded5a14d87072554026186'\n mnemonic: 'forest zebra delay attend preve",
"end": 767,
"score": 0.9988780617713928,
"start": 639,
"tag": "KEY",
"value": "16eb9af19037ea27cb9d493654d612217547cbd995ae0542c47902f683398eb85ae39579b80b839757ae7dee52bbb895eee421aedaded5a14d87072554026186"
},
{
"context": "nit crumble tower skull tribe'\n pairingKey: 'a26d9f9187c250beb7be79f9eb8ff249'\n pin: '0000'\n\n\n # Bitcoin TestNet\n bi",
"end": 987,
"score": 0.9997521042823792,
"start": 955,
"tag": "KEY",
"value": "a26d9f9187c250beb7be79f9eb8ff249"
},
{
"context": " bitcoin_testnet:\n id: 3\n masterSeed: 'a637bca4c37332ed60a6cb2869ebd9f78e7d3c76541b49c57e532968b7f2c7689c774231bfb0bff49659b1390212af5ab6702bec5fed46b23be6f2bcb6c7f6cf'\n mnemonic: 'frost crew jaguar leisure desig",
"end": 1211,
"score": 0.9983481168746948,
"start": 1083,
"tag": "KEY",
"value": "a637bca4c37332ed60a6cb2869ebd9f78e7d3c76541b49c57e532968b7f2c7689c774231bfb0bff49659b1390212af5ab6702bec5fed46b23be6f2bcb6c7f6cf"
},
{
"context": "t loud rose panda tunnel deny'\n pairingKey: 'a26d9f9187c250beb7be79f9eb8ff249'\n pin: '0000'",
"end": 1427,
"score": 0.9997356534004211,
"start": 1395,
"tag": "KEY",
"value": "a26d9f9187c250beb7be79f9eb8ff249"
}
] | app/spec/fixtures/fixtures_dongle.coffee | romanornr/ledger-wallet-crw | 173 | ledger.specs.fixtures ?= {}
_.extend ledger.specs.fixtures,
# Please don't make transactions with this seed or you will screw up the tests
dongles:
dongle1:
id: 1
masterSeed: 'af5920746fad1e40b2a8c7080ee40524a335f129cb374d4c6f82fd6bf3139b17191cb8c38b8e37f4003768b103479947cab1d4f68d908ae520cfe71263b2a0cd'
mnemonic: 'fox luggage hero item busy harbor dawn veteran bottom antenna rigid upgrade merit cash cigar episode leg multiply fish path tooth cup nation erosion'
pairingKey: 'a26d9f9187c250beb7be79f9eb8ff249'
pin: '0000'
# Empty wallet account
dongle2:
id: 2
masterSeed: '16eb9af19037ea27cb9d493654d612217547cbd995ae0542c47902f683398eb85ae39579b80b839757ae7dee52bbb895eee421aedaded5a14d87072554026186'
mnemonic: 'forest zebra delay attend prevent lab game secret cattle open degree among cigar wolf wagon catch invest glare tumble unit crumble tower skull tribe'
pairingKey: 'a26d9f9187c250beb7be79f9eb8ff249'
pin: '0000'
# Bitcoin TestNet
bitcoin_testnet:
id: 3
masterSeed: 'a637bca4c37332ed60a6cb2869ebd9f78e7d3c76541b49c57e532968b7f2c7689c774231bfb0bff49659b1390212af5ab6702bec5fed46b23be6f2bcb6c7f6cf'
mnemonic: 'frost crew jaguar leisure design essence father badge ozone bleak able slot flash jazz uncle pledge flat piano toast loud rose panda tunnel deny'
pairingKey: 'a26d9f9187c250beb7be79f9eb8ff249'
pin: '0000' | 190462 | ledger.specs.fixtures ?= {}
_.extend ledger.specs.fixtures,
# Please don't make transactions with this seed or you will screw up the tests
dongles:
dongle1:
id: 1
masterSeed: '<KEY>'
mnemonic: 'fox luggage hero item busy harbor dawn veteran bottom antenna rigid upgrade merit cash cigar episode leg multiply fish path tooth cup nation erosion'
pairingKey: '<KEY>'
pin: '0000'
# Empty wallet account
dongle2:
id: 2
masterSeed: '<KEY>'
mnemonic: 'forest zebra delay attend prevent lab game secret cattle open degree among cigar wolf wagon catch invest glare tumble unit crumble tower skull tribe'
pairingKey: '<KEY>'
pin: '0000'
# Bitcoin TestNet
bitcoin_testnet:
id: 3
masterSeed: '<KEY>'
mnemonic: 'frost crew jaguar leisure design essence father badge ozone bleak able slot flash jazz uncle pledge flat piano toast loud rose panda tunnel deny'
pairingKey: '<KEY>'
pin: '0000' | true | ledger.specs.fixtures ?= {}
_.extend ledger.specs.fixtures,
# Please don't make transactions with this seed or you will screw up the tests
dongles:
dongle1:
id: 1
masterSeed: 'PI:KEY:<KEY>END_PI'
mnemonic: 'fox luggage hero item busy harbor dawn veteran bottom antenna rigid upgrade merit cash cigar episode leg multiply fish path tooth cup nation erosion'
pairingKey: 'PI:KEY:<KEY>END_PI'
pin: '0000'
# Empty wallet account
dongle2:
id: 2
masterSeed: 'PI:KEY:<KEY>END_PI'
mnemonic: 'forest zebra delay attend prevent lab game secret cattle open degree among cigar wolf wagon catch invest glare tumble unit crumble tower skull tribe'
pairingKey: 'PI:KEY:<KEY>END_PI'
pin: '0000'
# Bitcoin TestNet
bitcoin_testnet:
id: 3
masterSeed: 'PI:KEY:<KEY>END_PI'
mnemonic: 'frost crew jaguar leisure design essence father badge ozone bleak able slot flash jazz uncle pledge flat piano toast loud rose panda tunnel deny'
pairingKey: 'PI:KEY:<KEY>END_PI'
pin: '0000' |
[
{
"context": "lback run synchronously:\n\n csv()\n .from('82,Preisner,Zbigniew\\n94,Gainsbourg,Serge')\n .to(consol",
"end": 1568,
"score": 0.7736263275146484,
"start": 1563,
"tag": "NAME",
"value": "Preis"
},
{
"context": "nsform.js`, print:\n // 94,Gainsbourg,Serge\\n82,Preisner,Zbigniew\n\nTransform callback run asynchronou",
"end": 1779,
"score": 0.7281632423400879,
"start": 1776,
"tag": "NAME",
"value": "Pre"
},
{
"context": "back run asynchronously:\n\n csv()\n .from('82,Preisner,Zbigniew\\n94,Gainsbourg,Serge')\n .to(consol",
"end": 1864,
"score": 0.7728598713874817,
"start": 1859,
"tag": "NAME",
"value": "Preis"
},
{
"context": "ing `node samples/transform.js`, print:\n // 94,Gainsbourg,Serge\\n82,Preisner,Zbigniew\n\nTransform callback r",
"end": 2120,
"score": 0.8630344271659851,
"start": 2110,
"tag": "NAME",
"value": "Gainsbourg"
},
{
"context": "les/transform.js`, print:\n // 94,Gainsbourg,Serge\\n82,Preisner,Zbigniew\n\nTransform callback returni",
"end": 2126,
"score": 0.9661482572555542,
"start": 2124,
"tag": "NAME",
"value": "ge"
},
{
"context": "nsform.js`, print:\n // 94,Gainsbourg,Serge\\n82,Preisner,Zbigniew\n\nTransform callback returning a string:\n",
"end": 2139,
"score": 0.9985733032226562,
"start": 2131,
"tag": "NAME",
"value": "Preisner"
},
{
"context": "`, print:\n // 94,Gainsbourg,Serge\\n82,Preisner,Zbigniew\n\nTransform callback returning a string:\n\n csv(",
"end": 2148,
"score": 0.9949867129325867,
"start": 2140,
"tag": "NAME",
"value": "Zbigniew"
},
{
"context": "back returning a string:\n\n csv()\n .from('82,Preisner,Zbigniew\\n94,Gainsbourg,Serge')\n .to(console.l",
"end": 2222,
"score": 0.9804391860961914,
"start": 2214,
"tag": "NAME",
"value": "Preisner"
},
{
"context": "ing a string:\n\n csv()\n .from('82,Preisner,Zbigniew\\n94,Gainsbourg,Serge')\n .to(console.log)\n .",
"end": 2231,
"score": 0.6363027095794678,
"start": 2225,
"tag": "NAME",
"value": "igniew"
},
{
"context": "g:\n\n csv()\n .from('82,Preisner,Zbigniew\\n94,Gainsbourg,Serge')\n .to(console.log)\n .transform(funct",
"end": 2246,
"score": 0.9961190223693848,
"start": 2236,
"tag": "NAME",
"value": "Gainsbourg"
},
{
"context": "()\n .from('82,Preisner,Zbigniew\\n94,Gainsbourg,Serge')\n .to(console.log)\n .transform(function(ro",
"end": 2252,
"score": 0.9976658821105957,
"start": 2247,
"tag": "NAME",
"value": "Serge"
},
{
"context": "ing `node samples/transform.js`, print:\n // 82:Zbigniew Preisner,94:Serge Gainsbourg\n\n###\nTransformer = (csv) ->\n ",
"end": 2470,
"score": 0.9997199177742004,
"start": 2453,
"tag": "NAME",
"value": "Zbigniew Preisner"
},
{
"context": "nsform.js`, print:\n // 82:Zbigniew Preisner,94:Serge Gainsbourg\n\n###\nTransformer = (csv) ->\n @csv = csv\n @runni",
"end": 2490,
"score": 0.9996526837348938,
"start": 2474,
"tag": "NAME",
"value": "Serge Gainsbourg"
}
] | node_modules/csvtojson/node_modules/csv/src/transformer.coffee | thomjoy/sydney-buses | 1 |
stream = require 'stream'
###
Transforming data
=================
Transformations may occur synchronously or asynchronously depending
on the provided transform callback and its declared arguments length.
Callbacks are called for each line, with these arguments:
* *row*
CSV record
* *index*
Incremented counter
* *callback*
Callback function to be called in asynchronous mode
If you specify the `columns` read option, the `row` argument will be
provided as an object with keys matching columns names. Otherwise it
will be provided as an array.
In synchronous mode, the contract is quite simple, you will receive an array
of fields for each record and the transformed array should be returned.
In asynchronous mode, it is your responsibility to call the callback
provided as the third argument. It must be called with two arguments,
an error (if any), and the transformed record.
Transformed records may be an array, an associative array, a
string or `null`. If `null`, the record will simply be skipped. When the
returned value is an array, the fields are merged in order.
When the returned value is an object, the module will search for
the `columns` property in the write or in the read options and
intelligently order the values. If no `columns` options are found,
it will merge the values in their order of appearance. When the
returned value is a string, it is directly sent to the destination
and it is your responsibility to delimit, quote, escape
or define line breaks.
Transform callback run synchronously:
csv()
.from('82,Preisner,Zbigniew\n94,Gainsbourg,Serge')
.to(console.log)
.transform(function(row, index){
return row.reverse()
});
// Executing `node samples/transform.js`, print:
// 94,Gainsbourg,Serge\n82,Preisner,Zbigniew
Transform callback run asynchronously:
csv()
.from('82,Preisner,Zbigniew\n94,Gainsbourg,Serge')
.to(console.log)
.transform(function(row, index, callback){
process.nextTick(function(){
callback(null, row.reverse());
});
});
// Executing `node samples/transform.js`, print:
// 94,Gainsbourg,Serge\n82,Preisner,Zbigniew
Transform callback returning a string:
csv()
.from('82,Preisner,Zbigniew\n94,Gainsbourg,Serge')
.to(console.log)
.transform(function(row, index){
return (index>0 ? ',' : '') + row[0] + ":" + row[2] + ' ' + row[1];
});
// Executing `node samples/transform.js`, print:
// 82:Zbigniew Preisner,94:Serge Gainsbourg
###
Transformer = (csv) ->
@csv = csv
@running = 0
@options = parallel: 100
@todo = []
@
Transformer.prototype.__proto__ = stream.prototype
### no doc
`headers()`
----------------------------
Print headers.
###
Transformer.prototype.headers = ->
labels = @csv.options.to.columns or @csv.options.from.columns
# If columns is an object, keys are fields and values are labels
if typeof labels is 'object' then labels = for k, label of labels then label
@csv.stringifier.write labels
### no doc
`write(line)`
----------------------------------
Call a callback to transform a line. Called for each line after being parsed.
It is responsible for transforming the data and finally calling `write`.
###
Transformer.prototype.write = (line) ->
self = @
csv = @csv
# Sanitize columns option into state and cache the result
if not @columns?
columns = csv.options.from.columns
if typeof columns is 'object' and columns isnt null and not Array.isArray columns
columns = Object.keys columns
# Extract column names from the first line
if csv.state.count is 0 and columns is true
columns = csv.options.from.columns = line
return
@columns = if columns? then columns else false
else columns = @columns
# Convert line to an object
if columns
# Line provided as an array and stored as an object, keys are column names
if Array.isArray line
lineAsObject = {}
for column, i in columns
lineAsObject[column] = if line[i]? then line[i] else null
line = lineAsObject
# Line was provided as an object, we create a new one with only the defined columns
else
lineAsObject = {}
for column, i in columns
lineAsObject[column] = if line[column]? then line[column] else null
line = lineAsObject
finish = (line) ->
# Print header on first line if we need to
self.headers() if csv.options.to.header is true and (csv.state.count - self.running) is 1
# Stringify the transformed line
csv.stringifier.write line
# Pick line if any
line = self.todo.shift()
return run line if line
# Emit end event if we are closed and we have no more transformation going on
self.emit 'end', csv.state.count if csv.state.transforming is 0 and self.closed is true
csv.state.count++
return finish line unless @callback
sync = @callback.length isnt 3
csv.state.transforming++
self = @
done = (err, line) ->
self.running--
return csv.error err if err
isObject = typeof line is 'object' and not Array.isArray line
if isObject and csv.options.to.newColumns and not csv.options.to.columns
Object.keys(line)
.filter( (column) -> self.columns.indexOf(column) is -1 )
.forEach( (column) -> self.columns.push(column) )
csv.state.transforming--
finish line
run = (line) ->
self.running++
try
if sync
then done null, self. callback line, csv.state.count - self.todo.length - 1
else self.callback line, csv.state.count - self.todo.length - 1, done
catch err
done err
# Apply back pressure
if @running is @options.parallel
@todo.push line
return false
# Work on current line
run line
true
### no doc
`end()`
------------------------
A transformer instance extends the EventEmitter and
emit the 'end' event when the last callback is called.
###
Transformer.prototype.end = ->
return @csv.error new Error 'Transformer already closed' if @closed
@closed = true
@emit 'end' if @csv.state.transforming is 0
module.exports = (csv) -> new Transformer csv
module.exports.Transformer = Transformer
| 222088 |
stream = require 'stream'
###
Transforming data
=================
Transformations may occur synchronously or asynchronously depending
on the provided transform callback and its declared arguments length.
Callbacks are called for each line, with these arguments:
* *row*
CSV record
* *index*
Incremented counter
* *callback*
Callback function to be called in asynchronous mode
If you specify the `columns` read option, the `row` argument will be
provided as an object with keys matching columns names. Otherwise it
will be provided as an array.
In synchronous mode, the contract is quite simple, you will receive an array
of fields for each record and the transformed array should be returned.
In asynchronous mode, it is your responsibility to call the callback
provided as the third argument. It must be called with two arguments,
an error (if any), and the transformed record.
Transformed records may be an array, an associative array, a
string or `null`. If `null`, the record will simply be skipped. When the
returned value is an array, the fields are merged in order.
When the returned value is an object, the module will search for
the `columns` property in the write or in the read options and
intelligently order the values. If no `columns` options are found,
it will merge the values in their order of appearance. When the
returned value is a string, it is directly sent to the destination
and it is your responsibility to delimit, quote, escape
or define line breaks.
Transform callback run synchronously:
csv()
.from('82,<NAME>ner,Zbigniew\n94,Gainsbourg,Serge')
.to(console.log)
.transform(function(row, index){
return row.reverse()
});
// Executing `node samples/transform.js`, print:
// 94,Gainsbourg,Serge\n82,<NAME>isner,Zbigniew
Transform callback run asynchronously:
csv()
.from('82,<NAME>ner,Zbigniew\n94,Gainsbourg,Serge')
.to(console.log)
.transform(function(row, index, callback){
process.nextTick(function(){
callback(null, row.reverse());
});
});
// Executing `node samples/transform.js`, print:
// 94,<NAME>,Ser<NAME>\n82,<NAME>,<NAME>
Transform callback returning a string:
csv()
.from('82,<NAME>,Zb<NAME>\n94,<NAME>,<NAME>')
.to(console.log)
.transform(function(row, index){
return (index>0 ? ',' : '') + row[0] + ":" + row[2] + ' ' + row[1];
});
// Executing `node samples/transform.js`, print:
// 82:<NAME>,94:<NAME>
###
Transformer = (csv) ->
@csv = csv
@running = 0
@options = parallel: 100
@todo = []
@
Transformer.prototype.__proto__ = stream.prototype
### no doc
`headers()`
----------------------------
Print headers.
###
Transformer.prototype.headers = ->
labels = @csv.options.to.columns or @csv.options.from.columns
# If columns is an object, keys are fields and values are labels
if typeof labels is 'object' then labels = for k, label of labels then label
@csv.stringifier.write labels
### no doc
`write(line)`
----------------------------------
Call a callback to transform a line. Called for each line after being parsed.
It is responsible for transforming the data and finally calling `write`.
###
Transformer.prototype.write = (line) ->
self = @
csv = @csv
# Sanitize columns option into state and cache the result
if not @columns?
columns = csv.options.from.columns
if typeof columns is 'object' and columns isnt null and not Array.isArray columns
columns = Object.keys columns
# Extract column names from the first line
if csv.state.count is 0 and columns is true
columns = csv.options.from.columns = line
return
@columns = if columns? then columns else false
else columns = @columns
# Convert line to an object
if columns
# Line provided as an array and stored as an object, keys are column names
if Array.isArray line
lineAsObject = {}
for column, i in columns
lineAsObject[column] = if line[i]? then line[i] else null
line = lineAsObject
# Line was provided as an object, we create a new one with only the defined columns
else
lineAsObject = {}
for column, i in columns
lineAsObject[column] = if line[column]? then line[column] else null
line = lineAsObject
finish = (line) ->
# Print header on first line if we need to
self.headers() if csv.options.to.header is true and (csv.state.count - self.running) is 1
# Stringify the transformed line
csv.stringifier.write line
# Pick line if any
line = self.todo.shift()
return run line if line
# Emit end event if we are closed and we have no more transformation going on
self.emit 'end', csv.state.count if csv.state.transforming is 0 and self.closed is true
csv.state.count++
return finish line unless @callback
sync = @callback.length isnt 3
csv.state.transforming++
self = @
done = (err, line) ->
self.running--
return csv.error err if err
isObject = typeof line is 'object' and not Array.isArray line
if isObject and csv.options.to.newColumns and not csv.options.to.columns
Object.keys(line)
.filter( (column) -> self.columns.indexOf(column) is -1 )
.forEach( (column) -> self.columns.push(column) )
csv.state.transforming--
finish line
run = (line) ->
self.running++
try
if sync
then done null, self. callback line, csv.state.count - self.todo.length - 1
else self.callback line, csv.state.count - self.todo.length - 1, done
catch err
done err
# Apply back pressure
if @running is @options.parallel
@todo.push line
return false
# Work on current line
run line
true
### no doc
`end()`
------------------------
A transformer instance extends the EventEmitter and
emit the 'end' event when the last callback is called.
###
Transformer.prototype.end = ->
return @csv.error new Error 'Transformer already closed' if @closed
@closed = true
@emit 'end' if @csv.state.transforming is 0
module.exports = (csv) -> new Transformer csv
module.exports.Transformer = Transformer
| true |
stream = require 'stream'
###
Transforming data
=================
Transformations may occur synchronously or asynchronously depending
on the provided transform callback and its declared arguments length.
Callbacks are called for each line, with these arguments:
* *row*
CSV record
* *index*
Incremented counter
* *callback*
Callback function to be called in asynchronous mode
If you specify the `columns` read option, the `row` argument will be
provided as an object with keys matching columns names. Otherwise it
will be provided as an array.
In synchronous mode, the contract is quite simple, you will receive an array
of fields for each record and the transformed array should be returned.
In asynchronous mode, it is your responsibility to call the callback
provided as the third argument. It must be called with two arguments,
an error (if any), and the transformed record.
Transformed records may be an array, an associative array, a
string or `null`. If `null`, the record will simply be skipped. When the
returned value is an array, the fields are merged in order.
When the returned value is an object, the module will search for
the `columns` property in the write or in the read options and
intelligently order the values. If no `columns` options are found,
it will merge the values in their order of appearance. When the
returned value is a string, it is directly sent to the destination
and it is your responsibility to delimit, quote, escape
or define line breaks.
Transform callback run synchronously:
csv()
.from('82,PI:NAME:<NAME>END_PIner,Zbigniew\n94,Gainsbourg,Serge')
.to(console.log)
.transform(function(row, index){
return row.reverse()
});
// Executing `node samples/transform.js`, print:
// 94,Gainsbourg,Serge\n82,PI:NAME:<NAME>END_PIisner,Zbigniew
Transform callback run asynchronously:
csv()
.from('82,PI:NAME:<NAME>END_PIner,Zbigniew\n94,Gainsbourg,Serge')
.to(console.log)
.transform(function(row, index, callback){
process.nextTick(function(){
callback(null, row.reverse());
});
});
// Executing `node samples/transform.js`, print:
// 94,PI:NAME:<NAME>END_PI,SerPI:NAME:<NAME>END_PI\n82,PI:NAME:<NAME>END_PI,PI:NAME:<NAME>END_PI
Transform callback returning a string:
csv()
.from('82,PI:NAME:<NAME>END_PI,ZbPI:NAME:<NAME>END_PI\n94,PI:NAME:<NAME>END_PI,PI:NAME:<NAME>END_PI')
.to(console.log)
.transform(function(row, index){
return (index>0 ? ',' : '') + row[0] + ":" + row[2] + ' ' + row[1];
});
// Executing `node samples/transform.js`, print:
// 82:PI:NAME:<NAME>END_PI,94:PI:NAME:<NAME>END_PI
###
Transformer = (csv) ->
@csv = csv
@running = 0
@options = parallel: 100
@todo = []
@
Transformer.prototype.__proto__ = stream.prototype
### no doc
`headers()`
----------------------------
Print headers.
###
Transformer.prototype.headers = ->
labels = @csv.options.to.columns or @csv.options.from.columns
# If columns is an object, keys are fields and values are labels
if typeof labels is 'object' then labels = for k, label of labels then label
@csv.stringifier.write labels
### no doc
`write(line)`
----------------------------------
Call a callback to transform a line. Called for each line after being parsed.
It is responsible for transforming the data and finally calling `write`.
###
Transformer.prototype.write = (line) ->
self = @
csv = @csv
# Sanitize columns option into state and cache the result
if not @columns?
columns = csv.options.from.columns
if typeof columns is 'object' and columns isnt null and not Array.isArray columns
columns = Object.keys columns
# Extract column names from the first line
if csv.state.count is 0 and columns is true
columns = csv.options.from.columns = line
return
@columns = if columns? then columns else false
else columns = @columns
# Convert line to an object
if columns
# Line provided as an array and stored as an object, keys are column names
if Array.isArray line
lineAsObject = {}
for column, i in columns
lineAsObject[column] = if line[i]? then line[i] else null
line = lineAsObject
# Line was provided as an object, we create a new one with only the defined columns
else
lineAsObject = {}
for column, i in columns
lineAsObject[column] = if line[column]? then line[column] else null
line = lineAsObject
finish = (line) ->
# Print header on first line if we need to
self.headers() if csv.options.to.header is true and (csv.state.count - self.running) is 1
# Stringify the transformed line
csv.stringifier.write line
# Pick line if any
line = self.todo.shift()
return run line if line
# Emit end event if we are closed and we have no more transformation going on
self.emit 'end', csv.state.count if csv.state.transforming is 0 and self.closed is true
csv.state.count++
return finish line unless @callback
sync = @callback.length isnt 3
csv.state.transforming++
self = @
done = (err, line) ->
self.running--
return csv.error err if err
isObject = typeof line is 'object' and not Array.isArray line
if isObject and csv.options.to.newColumns and not csv.options.to.columns
Object.keys(line)
.filter( (column) -> self.columns.indexOf(column) is -1 )
.forEach( (column) -> self.columns.push(column) )
csv.state.transforming--
finish line
run = (line) ->
self.running++
try
if sync
then done null, self. callback line, csv.state.count - self.todo.length - 1
else self.callback line, csv.state.count - self.todo.length - 1, done
catch err
done err
# Apply back pressure
if @running is @options.parallel
@todo.push line
return false
# Work on current line
run line
true
### no doc
`end()`
------------------------
A transformer instance extends the EventEmitter and
emit the 'end' event when the last callback is called.
###
Transformer.prototype.end = ->
return @csv.error new Error 'Transformer already closed' if @closed
@closed = true
@emit 'end' if @csv.state.transforming is 0
module.exports = (csv) -> new Transformer csv
module.exports.Transformer = Transformer
|
[
{
"context": "repoUrl)\n @addRow @createFieldRow(\"fullName\", \"minieditor\", Configuration.labels.fullName)\n @addRow @cre",
"end": 346,
"score": 0.9994240403175354,
"start": 336,
"tag": "USERNAME",
"value": "minieditor"
},
{
"context": "FieldRow(\"username\", \"text\", Configuration.labels.username)\n # @addRow @createFieldRow(\"repoUsername\", \"t",
"end": 621,
"score": 0.6195772290229797,
"start": 613,
"tag": "USERNAME",
"value": "username"
},
{
"context": "rname)\n # @addRow @createFieldRow(\"password\", \"password\", Configuration.labels.password)\n @addRow @cre",
"end": 763,
"score": 0.919221818447113,
"start": 755,
"tag": "PASSWORD",
"value": "password"
}
] | lib/util/configuration-form-view.coffee | ExentriqLtd/Advanced-Web-Editor | 0 | FormView = require './form-view'
Configuration = require './configuration.coffee'
class ConfigurationFormView extends FormView
initialize: ->
super
@addRow @createTitleRow("Editing Tools Configuration")
# @addRow @createFieldRow("repoUrl", "text", Configuration.labels.repoUrl)
@addRow @createFieldRow("fullName", "minieditor", Configuration.labels.fullName)
@addRow @createFieldRow("email", "minieditor", Configuration.labels.email)
# @addRow @createFieldRow("repoOwner", "text", Configuration.labels.repoOwner)
# @addRow @createFieldRow("username", "text", Configuration.labels.username)
# @addRow @createFieldRow("repoUsername", "text", Configuration.labels.repoUsername)
# @addRow @createFieldRow("password", "password", Configuration.labels.password)
@addRow @createFieldRow("cloneDir", "directory", Configuration.labels.cloneDir)
# @addRow @createFieldRow("advancedMode", "checkbox", Configuration.labels.advancedMode)
# @forceTabIndex()
module.exports = document.registerElement('awe-configuration-form-view', prototype: ConfigurationFormView.prototype, extends: 'div')
| 203013 | FormView = require './form-view'
Configuration = require './configuration.coffee'
class ConfigurationFormView extends FormView
initialize: ->
super
@addRow @createTitleRow("Editing Tools Configuration")
# @addRow @createFieldRow("repoUrl", "text", Configuration.labels.repoUrl)
@addRow @createFieldRow("fullName", "minieditor", Configuration.labels.fullName)
@addRow @createFieldRow("email", "minieditor", Configuration.labels.email)
# @addRow @createFieldRow("repoOwner", "text", Configuration.labels.repoOwner)
# @addRow @createFieldRow("username", "text", Configuration.labels.username)
# @addRow @createFieldRow("repoUsername", "text", Configuration.labels.repoUsername)
# @addRow @createFieldRow("password", "<PASSWORD>", Configuration.labels.password)
@addRow @createFieldRow("cloneDir", "directory", Configuration.labels.cloneDir)
# @addRow @createFieldRow("advancedMode", "checkbox", Configuration.labels.advancedMode)
# @forceTabIndex()
module.exports = document.registerElement('awe-configuration-form-view', prototype: ConfigurationFormView.prototype, extends: 'div')
| true | FormView = require './form-view'
Configuration = require './configuration.coffee'
class ConfigurationFormView extends FormView
initialize: ->
super
@addRow @createTitleRow("Editing Tools Configuration")
# @addRow @createFieldRow("repoUrl", "text", Configuration.labels.repoUrl)
@addRow @createFieldRow("fullName", "minieditor", Configuration.labels.fullName)
@addRow @createFieldRow("email", "minieditor", Configuration.labels.email)
# @addRow @createFieldRow("repoOwner", "text", Configuration.labels.repoOwner)
# @addRow @createFieldRow("username", "text", Configuration.labels.username)
# @addRow @createFieldRow("repoUsername", "text", Configuration.labels.repoUsername)
# @addRow @createFieldRow("password", "PI:PASSWORD:<PASSWORD>END_PI", Configuration.labels.password)
@addRow @createFieldRow("cloneDir", "directory", Configuration.labels.cloneDir)
# @addRow @createFieldRow("advancedMode", "checkbox", Configuration.labels.advancedMode)
# @forceTabIndex()
module.exports = document.registerElement('awe-configuration-form-view', prototype: ConfigurationFormView.prototype, extends: 'div')
|
[
{
"context": " query.multiverseid = id\n else\n query.name = name\n if page > 1\n query.page = page - 1\n gathere",
"end": 5061,
"score": 0.7789427042007446,
"start": 5057,
"tag": "NAME",
"value": "name"
}
] | src/gatherer/card.coffee | davidchambers/tutor | 45 | cheerio = require 'cheerio'
_ = require 'underscore'
gatherer = require '../gatherer'
supertypes = require '../supertypes'
module.exports = (details, callback) ->
if 'which' of details and details.which not in ['a', 'b']
callback new Error 'invalid which property (valid values are "a" and "b")'
gatherer.request gatherer.card.url('Details.aspx', details), (err, res, body) ->
if err?
callback err
else
$ = cheerio.load body
if $('title').text().trim().indexOf('Card Search - Search:') is 0
callback new Error 'no results'
else
callback null, extract $, details
return
return
extract = ($, details) ->
verbose = 'id' of details
t = (el) -> gatherer._get_text $(el)
t1 = (el) -> gatherer._get_text $(el).next()
card =
converted_mana_cost: 0
supertypes: []
types: []
subtypes: []
rulings: _.map $('.discussion').find('tr.post'), (el) ->
[date, ruling] = $(el).children()
[m, d, y] = $(date).text().trim().split('/')
pad = (s) -> "0#{s}".substr(-2)
["#{y}-#{pad m}-#{pad d}", $(ruling).text().trim().replace(/[ ]{2,}/g, ' ')]
set = gatherer._set.bind null, card
get_versions = _.compose gatherer._get_versions, (el) ->
$(el)
.find '.label'
.filter (idx, el) -> $(el).text().trim() is 'Expansion:'
.next()
.find 'img'
# Delete the irrelevant column.
$(do ->
[left, right] = $('.cardComponentContainer')
if details.which is 'b'
left
# Double-faced cards.
else if verbose and (details.id of get_versions(right) and
details.id not of get_versions(left))
left
else if details.name?.toLowerCase() is $(right)
.find '.label'
.filter (idx, el) -> $(el).text().trim() is 'Card Name:'
.next()
.text()
.trim()
.toLowerCase()
left
else
right
).remove()
$('.label').each ->
$el = $ this
switch $el.text().trim()
when 'Card Name:'
set 'name', $el.next().text().trim()
when 'Mana Cost:'
set 'mana_cost', gatherer._get_text $el.next()
when 'Converted Mana Cost:'
set 'converted_mana_cost', +t1 $el
when 'Types:'
[..., types, subtypes] = /^(.+?)(?:\s+\u2014\s+(.+))?$/.exec t1 $el
for type in types.split(/\s+/)
card[if type in supertypes then 'supertypes' else 'types'].push type
set 'subtypes', subtypes?.split(/\s+/)
when 'Card Text:'
set 'text', gatherer._get_rules_text $el.next(), t
when 'Flavor Text:'
break unless verbose
$flavor = $el.next()
$el = $flavor.children().last()
match = /^(\u2014|\u2015\u2015|\uFF5E)\s*(.+)$/.exec $el.text().trim()
if match?
set 'flavor_text_attribution', match[2]
$el.remove()
pattern = /^["\u00AB\u201E\u300C]\s*(.+?)\s*["\u00BB\u300D]([.]?)$/
text = _.map($flavor.children(), t).join('\n')
text = match[1] + match[2] if match and match = pattern.exec text
set 'flavor_text', text
when 'Color Indicator:'
set 'color_indicator', t1 $el
when 'Watermark:'
set 'watermark', t1 $el
when 'P/T:'
[..., power, toughness] = ///^(.+?)\s+/\s+(.+)$///.exec t1 $el
set 'power', gatherer._to_stat power
set 'toughness', gatherer._to_stat toughness
when 'Loyalty:'
set 'loyalty', +t1 $el
when 'Hand/Life:'
text = t1 $el
set 'hand_modifier', +text.match(/Hand Modifier: ([+-]\d+)/)[1]
set 'life_modifier', +text.match(/Life Modifier: ([+-]\d+)/)[1]
when 'Expansion:'
set 'expansion', $el.next().find('a:last-child').text().trim() if verbose
when 'Rarity:'
set 'rarity', t1 $el if verbose
when 'Card Number:'
set 'number', gatherer._to_stat t1 $el if verbose
when 'Artist:'
set 'artist', t1 $el if verbose
when 'All Sets:'
set 'versions', gatherer._get_versions $el.next().find('img')
[..., rating, votes] =
///^Community Rating:(\d(?:[.]\d+)?)/5[(](\d+)votes?[)]$///
.exec $('.textRating').text().replace(/\s+/g, '')
set 'community_rating', rating: +rating, votes: +votes
if verbose
set 'image_url', "#{gatherer.origin}/Handlers/Image.ashx?type=card&multiverseid=#{details.id}"
set 'gatherer_url', "#{gatherer.origin}/Pages/Card/Details.aspx?multiverseid=#{details.id}"
else
# encodeURIComponent notably misses single quote, which messes up cards like "Gideon's Lawkeeper"
encodedName = encodeURIComponent(details.name).replace(/'/g, '%27')
set 'image_url', "#{gatherer.origin}/Handlers/Image.ashx?type=card&name=#{encodedName}"
set 'gatherer_url', "#{gatherer.origin}/Pages/Card/Details.aspx?name=#{encodedName}"
card
module.exports.url = (path, rest...) ->
params = {}
params[k] = v for k, v of o for o in rest
{id, name, page} = params
query = {}
if id?
query.multiverseid = id
else
query.name = name
if page > 1
query.page = page - 1
gatherer.url "/Pages/Card/#{path}", query
| 205856 | cheerio = require 'cheerio'
_ = require 'underscore'
gatherer = require '../gatherer'
supertypes = require '../supertypes'
module.exports = (details, callback) ->
if 'which' of details and details.which not in ['a', 'b']
callback new Error 'invalid which property (valid values are "a" and "b")'
gatherer.request gatherer.card.url('Details.aspx', details), (err, res, body) ->
if err?
callback err
else
$ = cheerio.load body
if $('title').text().trim().indexOf('Card Search - Search:') is 0
callback new Error 'no results'
else
callback null, extract $, details
return
return
extract = ($, details) ->
verbose = 'id' of details
t = (el) -> gatherer._get_text $(el)
t1 = (el) -> gatherer._get_text $(el).next()
card =
converted_mana_cost: 0
supertypes: []
types: []
subtypes: []
rulings: _.map $('.discussion').find('tr.post'), (el) ->
[date, ruling] = $(el).children()
[m, d, y] = $(date).text().trim().split('/')
pad = (s) -> "0#{s}".substr(-2)
["#{y}-#{pad m}-#{pad d}", $(ruling).text().trim().replace(/[ ]{2,}/g, ' ')]
set = gatherer._set.bind null, card
get_versions = _.compose gatherer._get_versions, (el) ->
$(el)
.find '.label'
.filter (idx, el) -> $(el).text().trim() is 'Expansion:'
.next()
.find 'img'
# Delete the irrelevant column.
$(do ->
[left, right] = $('.cardComponentContainer')
if details.which is 'b'
left
# Double-faced cards.
else if verbose and (details.id of get_versions(right) and
details.id not of get_versions(left))
left
else if details.name?.toLowerCase() is $(right)
.find '.label'
.filter (idx, el) -> $(el).text().trim() is 'Card Name:'
.next()
.text()
.trim()
.toLowerCase()
left
else
right
).remove()
$('.label').each ->
$el = $ this
switch $el.text().trim()
when 'Card Name:'
set 'name', $el.next().text().trim()
when 'Mana Cost:'
set 'mana_cost', gatherer._get_text $el.next()
when 'Converted Mana Cost:'
set 'converted_mana_cost', +t1 $el
when 'Types:'
[..., types, subtypes] = /^(.+?)(?:\s+\u2014\s+(.+))?$/.exec t1 $el
for type in types.split(/\s+/)
card[if type in supertypes then 'supertypes' else 'types'].push type
set 'subtypes', subtypes?.split(/\s+/)
when 'Card Text:'
set 'text', gatherer._get_rules_text $el.next(), t
when 'Flavor Text:'
break unless verbose
$flavor = $el.next()
$el = $flavor.children().last()
match = /^(\u2014|\u2015\u2015|\uFF5E)\s*(.+)$/.exec $el.text().trim()
if match?
set 'flavor_text_attribution', match[2]
$el.remove()
pattern = /^["\u00AB\u201E\u300C]\s*(.+?)\s*["\u00BB\u300D]([.]?)$/
text = _.map($flavor.children(), t).join('\n')
text = match[1] + match[2] if match and match = pattern.exec text
set 'flavor_text', text
when 'Color Indicator:'
set 'color_indicator', t1 $el
when 'Watermark:'
set 'watermark', t1 $el
when 'P/T:'
[..., power, toughness] = ///^(.+?)\s+/\s+(.+)$///.exec t1 $el
set 'power', gatherer._to_stat power
set 'toughness', gatherer._to_stat toughness
when 'Loyalty:'
set 'loyalty', +t1 $el
when 'Hand/Life:'
text = t1 $el
set 'hand_modifier', +text.match(/Hand Modifier: ([+-]\d+)/)[1]
set 'life_modifier', +text.match(/Life Modifier: ([+-]\d+)/)[1]
when 'Expansion:'
set 'expansion', $el.next().find('a:last-child').text().trim() if verbose
when 'Rarity:'
set 'rarity', t1 $el if verbose
when 'Card Number:'
set 'number', gatherer._to_stat t1 $el if verbose
when 'Artist:'
set 'artist', t1 $el if verbose
when 'All Sets:'
set 'versions', gatherer._get_versions $el.next().find('img')
[..., rating, votes] =
///^Community Rating:(\d(?:[.]\d+)?)/5[(](\d+)votes?[)]$///
.exec $('.textRating').text().replace(/\s+/g, '')
set 'community_rating', rating: +rating, votes: +votes
if verbose
set 'image_url', "#{gatherer.origin}/Handlers/Image.ashx?type=card&multiverseid=#{details.id}"
set 'gatherer_url', "#{gatherer.origin}/Pages/Card/Details.aspx?multiverseid=#{details.id}"
else
# encodeURIComponent notably misses single quote, which messes up cards like "Gideon's Lawkeeper"
encodedName = encodeURIComponent(details.name).replace(/'/g, '%27')
set 'image_url', "#{gatherer.origin}/Handlers/Image.ashx?type=card&name=#{encodedName}"
set 'gatherer_url', "#{gatherer.origin}/Pages/Card/Details.aspx?name=#{encodedName}"
card
module.exports.url = (path, rest...) ->
params = {}
params[k] = v for k, v of o for o in rest
{id, name, page} = params
query = {}
if id?
query.multiverseid = id
else
query.name = <NAME>
if page > 1
query.page = page - 1
gatherer.url "/Pages/Card/#{path}", query
| true | cheerio = require 'cheerio'
_ = require 'underscore'
gatherer = require '../gatherer'
supertypes = require '../supertypes'
module.exports = (details, callback) ->
if 'which' of details and details.which not in ['a', 'b']
callback new Error 'invalid which property (valid values are "a" and "b")'
gatherer.request gatherer.card.url('Details.aspx', details), (err, res, body) ->
if err?
callback err
else
$ = cheerio.load body
if $('title').text().trim().indexOf('Card Search - Search:') is 0
callback new Error 'no results'
else
callback null, extract $, details
return
return
extract = ($, details) ->
verbose = 'id' of details
t = (el) -> gatherer._get_text $(el)
t1 = (el) -> gatherer._get_text $(el).next()
card =
converted_mana_cost: 0
supertypes: []
types: []
subtypes: []
rulings: _.map $('.discussion').find('tr.post'), (el) ->
[date, ruling] = $(el).children()
[m, d, y] = $(date).text().trim().split('/')
pad = (s) -> "0#{s}".substr(-2)
["#{y}-#{pad m}-#{pad d}", $(ruling).text().trim().replace(/[ ]{2,}/g, ' ')]
set = gatherer._set.bind null, card
get_versions = _.compose gatherer._get_versions, (el) ->
$(el)
.find '.label'
.filter (idx, el) -> $(el).text().trim() is 'Expansion:'
.next()
.find 'img'
# Delete the irrelevant column.
$(do ->
[left, right] = $('.cardComponentContainer')
if details.which is 'b'
left
# Double-faced cards.
else if verbose and (details.id of get_versions(right) and
details.id not of get_versions(left))
left
else if details.name?.toLowerCase() is $(right)
.find '.label'
.filter (idx, el) -> $(el).text().trim() is 'Card Name:'
.next()
.text()
.trim()
.toLowerCase()
left
else
right
).remove()
$('.label').each ->
$el = $ this
switch $el.text().trim()
when 'Card Name:'
set 'name', $el.next().text().trim()
when 'Mana Cost:'
set 'mana_cost', gatherer._get_text $el.next()
when 'Converted Mana Cost:'
set 'converted_mana_cost', +t1 $el
when 'Types:'
[..., types, subtypes] = /^(.+?)(?:\s+\u2014\s+(.+))?$/.exec t1 $el
for type in types.split(/\s+/)
card[if type in supertypes then 'supertypes' else 'types'].push type
set 'subtypes', subtypes?.split(/\s+/)
when 'Card Text:'
set 'text', gatherer._get_rules_text $el.next(), t
when 'Flavor Text:'
break unless verbose
$flavor = $el.next()
$el = $flavor.children().last()
match = /^(\u2014|\u2015\u2015|\uFF5E)\s*(.+)$/.exec $el.text().trim()
if match?
set 'flavor_text_attribution', match[2]
$el.remove()
pattern = /^["\u00AB\u201E\u300C]\s*(.+?)\s*["\u00BB\u300D]([.]?)$/
text = _.map($flavor.children(), t).join('\n')
text = match[1] + match[2] if match and match = pattern.exec text
set 'flavor_text', text
when 'Color Indicator:'
set 'color_indicator', t1 $el
when 'Watermark:'
set 'watermark', t1 $el
when 'P/T:'
[..., power, toughness] = ///^(.+?)\s+/\s+(.+)$///.exec t1 $el
set 'power', gatherer._to_stat power
set 'toughness', gatherer._to_stat toughness
when 'Loyalty:'
set 'loyalty', +t1 $el
when 'Hand/Life:'
text = t1 $el
set 'hand_modifier', +text.match(/Hand Modifier: ([+-]\d+)/)[1]
set 'life_modifier', +text.match(/Life Modifier: ([+-]\d+)/)[1]
when 'Expansion:'
set 'expansion', $el.next().find('a:last-child').text().trim() if verbose
when 'Rarity:'
set 'rarity', t1 $el if verbose
when 'Card Number:'
set 'number', gatherer._to_stat t1 $el if verbose
when 'Artist:'
set 'artist', t1 $el if verbose
when 'All Sets:'
set 'versions', gatherer._get_versions $el.next().find('img')
[..., rating, votes] =
///^Community Rating:(\d(?:[.]\d+)?)/5[(](\d+)votes?[)]$///
.exec $('.textRating').text().replace(/\s+/g, '')
set 'community_rating', rating: +rating, votes: +votes
if verbose
set 'image_url', "#{gatherer.origin}/Handlers/Image.ashx?type=card&multiverseid=#{details.id}"
set 'gatherer_url', "#{gatherer.origin}/Pages/Card/Details.aspx?multiverseid=#{details.id}"
else
# encodeURIComponent notably misses single quote, which messes up cards like "Gideon's Lawkeeper"
encodedName = encodeURIComponent(details.name).replace(/'/g, '%27')
set 'image_url', "#{gatherer.origin}/Handlers/Image.ashx?type=card&name=#{encodedName}"
set 'gatherer_url', "#{gatherer.origin}/Pages/Card/Details.aspx?name=#{encodedName}"
card
module.exports.url = (path, rest...) ->
params = {}
params[k] = v for k, v of o for o in rest
{id, name, page} = params
query = {}
if id?
query.multiverseid = id
else
query.name = PI:NAME:<NAME>END_PI
if page > 1
query.page = page - 1
gatherer.url "/Pages/Card/#{path}", query
|
[
{
"context": "luate ->\n michael = new Model\n name: 'Michael Bolton'\n job: 'Singer'\n\n Models.add michael\n",
"end": 426,
"score": 0.9998498558998108,
"start": 412,
"tag": "NAME",
"value": "Michael Bolton"
},
{
"context": "ertEval ->\n results = Models.where({name: 'Michael Bolton'})\n results[0].get('job') is \"Singer\"\n ",
"end": 646,
"score": 0.999826967716217,
"start": 632,
"tag": "NAME",
"value": "Michael Bolton"
},
{
"context": "valuate ->\n results = Models.where({name: 'Michael Bolton'})\n\n results[0].destroy()\n Models.r",
"end": 878,
"score": 0.9997866749763489,
"start": 864,
"tag": "NAME",
"value": "Michael Bolton"
},
{
"context": "ssertEval ->\n results = Models.where({name: 'Michael Bolton'})\n results.length is 0\n , \"Backbone adap",
"end": 1046,
"score": 0.9997649192810059,
"start": 1032,
"tag": "NAME",
"value": "Michael Bolton"
}
] | test/test.backbone.coffee | lejenome/localForage | 0 | 'use strict'
casper.test.begin "Testing Backbone data adapter", (test) ->
casper.start "#{casper.TEST_URL}test.backbone.html", ->
test.info "Testing using global scope (no require.js)"
test.assertEval ->
typeof Backbone.localforage is 'function'
, "localforage storage adapter is attached to Backbone.localforage"
casper.then ->
@evaluate ->
michael = new Model
name: 'Michael Bolton'
job: 'Singer'
Models.add michael
michael.save()
casper.reload()
casper.then ->
@waitForSelector '#ready', ->
test.assertEval ->
results = Models.where({name: 'Michael Bolton'})
results[0].get('job') is "Singer"
, "Backbone adapter should persist data after a reload"
casper.then ->
@waitForSelector '#ready', ->
@evaluate ->
results = Models.where({name: 'Michael Bolton'})
results[0].destroy()
Models.reset()
casper.wait 300
casper.then ->
test.assertEval ->
results = Models.where({name: 'Michael Bolton'})
results.length is 0
, "Backbone adapter should delete data after model is removed"
casper.run ->
test.done()
| 117442 | 'use strict'
casper.test.begin "Testing Backbone data adapter", (test) ->
casper.start "#{casper.TEST_URL}test.backbone.html", ->
test.info "Testing using global scope (no require.js)"
test.assertEval ->
typeof Backbone.localforage is 'function'
, "localforage storage adapter is attached to Backbone.localforage"
casper.then ->
@evaluate ->
michael = new Model
name: '<NAME>'
job: 'Singer'
Models.add michael
michael.save()
casper.reload()
casper.then ->
@waitForSelector '#ready', ->
test.assertEval ->
results = Models.where({name: '<NAME>'})
results[0].get('job') is "Singer"
, "Backbone adapter should persist data after a reload"
casper.then ->
@waitForSelector '#ready', ->
@evaluate ->
results = Models.where({name: '<NAME>'})
results[0].destroy()
Models.reset()
casper.wait 300
casper.then ->
test.assertEval ->
results = Models.where({name: '<NAME>'})
results.length is 0
, "Backbone adapter should delete data after model is removed"
casper.run ->
test.done()
| true | 'use strict'
casper.test.begin "Testing Backbone data adapter", (test) ->
casper.start "#{casper.TEST_URL}test.backbone.html", ->
test.info "Testing using global scope (no require.js)"
test.assertEval ->
typeof Backbone.localforage is 'function'
, "localforage storage adapter is attached to Backbone.localforage"
casper.then ->
@evaluate ->
michael = new Model
name: 'PI:NAME:<NAME>END_PI'
job: 'Singer'
Models.add michael
michael.save()
casper.reload()
casper.then ->
@waitForSelector '#ready', ->
test.assertEval ->
results = Models.where({name: 'PI:NAME:<NAME>END_PI'})
results[0].get('job') is "Singer"
, "Backbone adapter should persist data after a reload"
casper.then ->
@waitForSelector '#ready', ->
@evaluate ->
results = Models.where({name: 'PI:NAME:<NAME>END_PI'})
results[0].destroy()
Models.reset()
casper.wait 300
casper.then ->
test.assertEval ->
results = Models.where({name: 'PI:NAME:<NAME>END_PI'})
results.length is 0
, "Backbone adapter should delete data after model is removed"
casper.run ->
test.done()
|
[
{
"context": " = singularize underscore name\n\n lkey = \"#{name}_ids\"\n unless lkey in @attributes\n @attributes",
"end": 4421,
"score": 0.9752335548400879,
"start": 4418,
"tag": "KEY",
"value": "ids"
},
{
"context": "ributes\n @attributes.push lkey\n\n fkey ?= 'id'\n\n association = (record, model) ->\n mode",
"end": 4498,
"score": 0.5985258221626282,
"start": 4496,
"tag": "KEY",
"value": "id"
},
{
"context": " = singularize underscore name\n fkey ?= \"#{name}_id\"\n\n association = (record) ->\n model = loa",
"end": 5453,
"score": 0.5795716643333435,
"start": 5451,
"tag": "KEY",
"value": "id"
}
] | src/relations.coffee | nextorigin/spine-relations | 0 | Spine = require "spine"
{isArray} = Array
class BaseCollection extends Spine.Class
constructor: (options = {}) ->
for key, value of options
@[key] = value
first: ->
@all()[0]
last: ->
@all()[-1..]
create: (record, options) ->
newRecord = @model.create record, options
(@add newRecord) if newRecord
class Collection extends BaseCollection
add: (fItem) ->
fItem[@fkey] = @record.id
fItem.save()
remove: (fItem) ->
fItem = @find(fItem) if typeof fItem is 'string'
fItem.destroy()
all: ->
@model.select (rec) => @associated(rec)
find: (id) ->
records = @select (rec) =>
"#{rec.id}" is "#{id}"
throw('Unknown record') unless records[0]
records[0]
findAllByAttribute: (name, value) ->
@model.select (rec) =>
@associated(rec) and rec[name] is value
findByAttribute: (name, value) ->
@findAllByAttribute(name, value)[0]
select: (cb) ->
@model.select (rec) =>
@associated(rec) and cb(rec)
refresh: (values) ->
for record in @all()
delete @model.irecords[record.id]
for match, i in @model.records when match.id is record.id
@model.records.splice(i, 1)
break
values = [values] unless Array.isArray(values)
record[@fkey] = @record.id for record in values
@model.refresh values
# Private
associated: (record) ->
record[@fkey] is @record.id
class O2MCollection extends BaseCollection
add: (item, save = true) ->
if isArray(item)
@add i, false for i in item
else
item = @model.find item unless item instanceof @model
@record[@lkey].push item[@fkey]
@record.save() if save
remove: (item) ->
item = @model.find item unless item instanceof @model
@record[@lkey].splice (@record[@lkey].indexOf item[@fkey]), 1
@record.save()
all: ->
(@model.find lkey for lkey in @record[@lkey])
find: (id) ->
id in @record[@lkey] and @model.find id or throw 'Unknown record'
class M2MCollection extends BaseCollection
add: (item, save = true) ->
if isArray(item)
@add i, false for i in item
else
item = @model.find item unless item instanceof @model
hub = new @Hub()
if @left_to_right
hub["#{@rev_name}_id"] = @record.id
hub["#{@name}_id"] = item.id
else
hub["#{@rev_name}_id"] = item.id
hub["#{@name}_id"] = @record.id
hub.save() if save
remove: (item) ->
i.destroy() for i in @Hub.select (item) =>
@associated(item)
_link: (items) ->
items.map (item) =>
if @left_to_right then return @model.find item["#{@name}_id"]
else return @model.find item["#{@rev_name}_id"]
all: ->
@_link @Hub.select (item) =>
@associated(item)
find: (id) ->
records = @Hub.select (rec) =>
@associated(rec, id)
throw 'Unknown record' unless records[0]
@_link(records)[0]
associated: (record, id) ->
if @left_to_right
return false unless record["#{@rev_name}_id"] is @record.id
return record["#{@rev_name}_id"] is id if id
else
return false unless record["#{@name}_id"] is @record.id
return record["#{@name}_id"] is id if id
true
class Instance extends Spine.Class
constructor: (options = {}) ->
for key, value of options
@[key] = value
find: ->
@model.find @record[@fkey]
update: (value) ->
unless value instanceof @model
value = new @model(value)
value.save if value.isNew()
@record[@fkey] = value and value.id
@record.save()
class Singleton extends Spine.Class
constructor: (options = {}) ->
for key, value of options
@[key] = value
find: ->
@record.id and @model.findByAttribute(@fkey, @record.id)
update: (value) ->
unless value instanceof @model
value = @model.fromJSON(value)
value[@fkey] = @record.id
value.save()
{singularize, underscore, loadModel} = require "./helpers"
Relations =
__filter: (args, revert=false) ->
(rec) ->
q = !!revert
for key, value of args
return q unless rec[key] is value
!q
filter: (args) -> @select @__filter args
exclude: (args) -> @select @__filter args, true
oneToMany: (model, name, fkey) ->
parent = @
unless name?
model = loadModel model, parent
name = model.className.toLowerCase()
name = singularize underscore name
lkey = "#{name}_ids"
unless lkey in @attributes
@attributes.push lkey
fkey ?= 'id'
association = (record, model) ->
model = loadModel model, parent
record[lkey] = [] unless record[lkey]
new O2MCollection {lkey, fkey, record, model}
@::["#{name}s"] = (value) ->
association(@, model)
hasMany: (model, name, fkey) ->
parent = @
unless name?
model = loadModel model, parent
name = model.className.toLowerCase()
name = singularize underscore name
fkey ?= "#{underscore(this.className)}_id"
association = (record) ->
model = loadModel model, parent
new Collection(
name: name, model: model,
record: record, fkey: fkey
)
@::["#{name}s"] = (value) ->
association(@).refresh(value) if value?
association(@)
belongsTo: (model, name, fkey) ->
parent = @
unless name?
model = loadModel model, parent
name = model.className.toLowerCase()
name = singularize underscore name
fkey ?= "#{name}_id"
association = (record) ->
model = loadModel model, parent
new Instance(
name: name, model: model,
record: record, fkey: fkey
)
@::[name] = (value) ->
if value?
association(@).update(value)
else
association(@).find()
@attributes.push(fkey)
hasOne: (model, name, fkey) ->
parent = @
unless name?
model = loadModel model, parent
name = model.className.toLowerCase()
name = singularize underscore name
fkey ?= "#{underscore(@className)}_id"
association = (record) ->
model = loadModel model, parent
new Singleton(
name: name, model: model,
record: record, fkey: fkey
)
@::[name] = (value) ->
association(@).update(value) if value?
association(@).find()
foreignKey: (model, name, rev_name) ->
parent = @
unless name?
model = loadModel model, parent
name = model.className.toLowerCase()
name = singularize underscore name
unless rev_name?
rev_name = @className.toLowerCase()
rev_name = singularize underscore rev_name
rev_name = "#{rev_name}s"
@belongsTo name, model
model.hasMany rev_name, @
manyToMany: (model, name, rev_name) ->
parent = @
unless name?
model = loadModel model, parent
name = model.className.toLowerCase()
name = singularize underscore name
unless rev_name?
rev_name = @className.toLowerCase()
rev_name = singularize underscore rev_name
rev_name = "#{rev_name}s"
rev_model = @
local = typeof model.loadLocal is 'function' or typeof rev_model.loadLocal is 'function'
tigerDB = typeof model.loadSpineDB is 'function' or typeof rev_model.loadSpineDB is 'function'
class Hub extends Spine.Model
@extend Spine.Model.Local if local
@extend Spine.Model.SpineDB if tigerDB
@configure "_#{rev_name}_to_#{name}", "#{@rev_name}_id", "#{@name}_id"
Hub.fetch() if local or tigerDB
Hub.foreignKey rev_model, "#{rev_name}"
Hub.foreignKey model, "#{name}"
association = (record, model, left_to_right) ->
model = loadModel model, parent
new M2MCollection {name, rev_name, record, model, Hub: Hub, left_to_right}
rev_model::["#{name}s"] = (value) ->
association(@, model, true)
model::["#{rev_name}s"] = (value) ->
association(@, rev_model, false)
Spine.Model.extend Relations
Relations.Classes =
BaseCollection: BaseCollection
Collection: Collection
O2MCollection: O2MCollection
M2MCollection: M2MCollection
Instance: Instance
Singleton: Singleton
Spine.Model.Relations = Relations
module?.exports = Relations
| 135393 | Spine = require "spine"
{isArray} = Array
class BaseCollection extends Spine.Class
constructor: (options = {}) ->
for key, value of options
@[key] = value
first: ->
@all()[0]
last: ->
@all()[-1..]
create: (record, options) ->
newRecord = @model.create record, options
(@add newRecord) if newRecord
class Collection extends BaseCollection
add: (fItem) ->
fItem[@fkey] = @record.id
fItem.save()
remove: (fItem) ->
fItem = @find(fItem) if typeof fItem is 'string'
fItem.destroy()
all: ->
@model.select (rec) => @associated(rec)
find: (id) ->
records = @select (rec) =>
"#{rec.id}" is "#{id}"
throw('Unknown record') unless records[0]
records[0]
findAllByAttribute: (name, value) ->
@model.select (rec) =>
@associated(rec) and rec[name] is value
findByAttribute: (name, value) ->
@findAllByAttribute(name, value)[0]
select: (cb) ->
@model.select (rec) =>
@associated(rec) and cb(rec)
refresh: (values) ->
for record in @all()
delete @model.irecords[record.id]
for match, i in @model.records when match.id is record.id
@model.records.splice(i, 1)
break
values = [values] unless Array.isArray(values)
record[@fkey] = @record.id for record in values
@model.refresh values
# Private
associated: (record) ->
record[@fkey] is @record.id
class O2MCollection extends BaseCollection
add: (item, save = true) ->
if isArray(item)
@add i, false for i in item
else
item = @model.find item unless item instanceof @model
@record[@lkey].push item[@fkey]
@record.save() if save
remove: (item) ->
item = @model.find item unless item instanceof @model
@record[@lkey].splice (@record[@lkey].indexOf item[@fkey]), 1
@record.save()
all: ->
(@model.find lkey for lkey in @record[@lkey])
find: (id) ->
id in @record[@lkey] and @model.find id or throw 'Unknown record'
class M2MCollection extends BaseCollection
add: (item, save = true) ->
if isArray(item)
@add i, false for i in item
else
item = @model.find item unless item instanceof @model
hub = new @Hub()
if @left_to_right
hub["#{@rev_name}_id"] = @record.id
hub["#{@name}_id"] = item.id
else
hub["#{@rev_name}_id"] = item.id
hub["#{@name}_id"] = @record.id
hub.save() if save
remove: (item) ->
i.destroy() for i in @Hub.select (item) =>
@associated(item)
_link: (items) ->
items.map (item) =>
if @left_to_right then return @model.find item["#{@name}_id"]
else return @model.find item["#{@rev_name}_id"]
all: ->
@_link @Hub.select (item) =>
@associated(item)
find: (id) ->
records = @Hub.select (rec) =>
@associated(rec, id)
throw 'Unknown record' unless records[0]
@_link(records)[0]
associated: (record, id) ->
if @left_to_right
return false unless record["#{@rev_name}_id"] is @record.id
return record["#{@rev_name}_id"] is id if id
else
return false unless record["#{@name}_id"] is @record.id
return record["#{@name}_id"] is id if id
true
class Instance extends Spine.Class
constructor: (options = {}) ->
for key, value of options
@[key] = value
find: ->
@model.find @record[@fkey]
update: (value) ->
unless value instanceof @model
value = new @model(value)
value.save if value.isNew()
@record[@fkey] = value and value.id
@record.save()
class Singleton extends Spine.Class
constructor: (options = {}) ->
for key, value of options
@[key] = value
find: ->
@record.id and @model.findByAttribute(@fkey, @record.id)
update: (value) ->
unless value instanceof @model
value = @model.fromJSON(value)
value[@fkey] = @record.id
value.save()
{singularize, underscore, loadModel} = require "./helpers"
Relations =
__filter: (args, revert=false) ->
(rec) ->
q = !!revert
for key, value of args
return q unless rec[key] is value
!q
filter: (args) -> @select @__filter args
exclude: (args) -> @select @__filter args, true
oneToMany: (model, name, fkey) ->
parent = @
unless name?
model = loadModel model, parent
name = model.className.toLowerCase()
name = singularize underscore name
lkey = "#{name}_<KEY>"
unless lkey in @attributes
@attributes.push lkey
fkey ?= '<KEY>'
association = (record, model) ->
model = loadModel model, parent
record[lkey] = [] unless record[lkey]
new O2MCollection {lkey, fkey, record, model}
@::["#{name}s"] = (value) ->
association(@, model)
hasMany: (model, name, fkey) ->
parent = @
unless name?
model = loadModel model, parent
name = model.className.toLowerCase()
name = singularize underscore name
fkey ?= "#{underscore(this.className)}_id"
association = (record) ->
model = loadModel model, parent
new Collection(
name: name, model: model,
record: record, fkey: fkey
)
@::["#{name}s"] = (value) ->
association(@).refresh(value) if value?
association(@)
belongsTo: (model, name, fkey) ->
parent = @
unless name?
model = loadModel model, parent
name = model.className.toLowerCase()
name = singularize underscore name
fkey ?= "#{name}_<KEY>"
association = (record) ->
model = loadModel model, parent
new Instance(
name: name, model: model,
record: record, fkey: fkey
)
@::[name] = (value) ->
if value?
association(@).update(value)
else
association(@).find()
@attributes.push(fkey)
hasOne: (model, name, fkey) ->
parent = @
unless name?
model = loadModel model, parent
name = model.className.toLowerCase()
name = singularize underscore name
fkey ?= "#{underscore(@className)}_id"
association = (record) ->
model = loadModel model, parent
new Singleton(
name: name, model: model,
record: record, fkey: fkey
)
@::[name] = (value) ->
association(@).update(value) if value?
association(@).find()
foreignKey: (model, name, rev_name) ->
parent = @
unless name?
model = loadModel model, parent
name = model.className.toLowerCase()
name = singularize underscore name
unless rev_name?
rev_name = @className.toLowerCase()
rev_name = singularize underscore rev_name
rev_name = "#{rev_name}s"
@belongsTo name, model
model.hasMany rev_name, @
manyToMany: (model, name, rev_name) ->
parent = @
unless name?
model = loadModel model, parent
name = model.className.toLowerCase()
name = singularize underscore name
unless rev_name?
rev_name = @className.toLowerCase()
rev_name = singularize underscore rev_name
rev_name = "#{rev_name}s"
rev_model = @
local = typeof model.loadLocal is 'function' or typeof rev_model.loadLocal is 'function'
tigerDB = typeof model.loadSpineDB is 'function' or typeof rev_model.loadSpineDB is 'function'
class Hub extends Spine.Model
@extend Spine.Model.Local if local
@extend Spine.Model.SpineDB if tigerDB
@configure "_#{rev_name}_to_#{name}", "#{@rev_name}_id", "#{@name}_id"
Hub.fetch() if local or tigerDB
Hub.foreignKey rev_model, "#{rev_name}"
Hub.foreignKey model, "#{name}"
association = (record, model, left_to_right) ->
model = loadModel model, parent
new M2MCollection {name, rev_name, record, model, Hub: Hub, left_to_right}
rev_model::["#{name}s"] = (value) ->
association(@, model, true)
model::["#{rev_name}s"] = (value) ->
association(@, rev_model, false)
Spine.Model.extend Relations
Relations.Classes =
BaseCollection: BaseCollection
Collection: Collection
O2MCollection: O2MCollection
M2MCollection: M2MCollection
Instance: Instance
Singleton: Singleton
Spine.Model.Relations = Relations
module?.exports = Relations
| true | Spine = require "spine"
{isArray} = Array
class BaseCollection extends Spine.Class
constructor: (options = {}) ->
for key, value of options
@[key] = value
first: ->
@all()[0]
last: ->
@all()[-1..]
create: (record, options) ->
newRecord = @model.create record, options
(@add newRecord) if newRecord
class Collection extends BaseCollection
add: (fItem) ->
fItem[@fkey] = @record.id
fItem.save()
remove: (fItem) ->
fItem = @find(fItem) if typeof fItem is 'string'
fItem.destroy()
all: ->
@model.select (rec) => @associated(rec)
find: (id) ->
records = @select (rec) =>
"#{rec.id}" is "#{id}"
throw('Unknown record') unless records[0]
records[0]
findAllByAttribute: (name, value) ->
@model.select (rec) =>
@associated(rec) and rec[name] is value
findByAttribute: (name, value) ->
@findAllByAttribute(name, value)[0]
select: (cb) ->
@model.select (rec) =>
@associated(rec) and cb(rec)
refresh: (values) ->
for record in @all()
delete @model.irecords[record.id]
for match, i in @model.records when match.id is record.id
@model.records.splice(i, 1)
break
values = [values] unless Array.isArray(values)
record[@fkey] = @record.id for record in values
@model.refresh values
# Private
associated: (record) ->
record[@fkey] is @record.id
class O2MCollection extends BaseCollection
add: (item, save = true) ->
if isArray(item)
@add i, false for i in item
else
item = @model.find item unless item instanceof @model
@record[@lkey].push item[@fkey]
@record.save() if save
remove: (item) ->
item = @model.find item unless item instanceof @model
@record[@lkey].splice (@record[@lkey].indexOf item[@fkey]), 1
@record.save()
all: ->
(@model.find lkey for lkey in @record[@lkey])
find: (id) ->
id in @record[@lkey] and @model.find id or throw 'Unknown record'
class M2MCollection extends BaseCollection
add: (item, save = true) ->
if isArray(item)
@add i, false for i in item
else
item = @model.find item unless item instanceof @model
hub = new @Hub()
if @left_to_right
hub["#{@rev_name}_id"] = @record.id
hub["#{@name}_id"] = item.id
else
hub["#{@rev_name}_id"] = item.id
hub["#{@name}_id"] = @record.id
hub.save() if save
remove: (item) ->
i.destroy() for i in @Hub.select (item) =>
@associated(item)
_link: (items) ->
items.map (item) =>
if @left_to_right then return @model.find item["#{@name}_id"]
else return @model.find item["#{@rev_name}_id"]
all: ->
@_link @Hub.select (item) =>
@associated(item)
find: (id) ->
records = @Hub.select (rec) =>
@associated(rec, id)
throw 'Unknown record' unless records[0]
@_link(records)[0]
associated: (record, id) ->
if @left_to_right
return false unless record["#{@rev_name}_id"] is @record.id
return record["#{@rev_name}_id"] is id if id
else
return false unless record["#{@name}_id"] is @record.id
return record["#{@name}_id"] is id if id
true
class Instance extends Spine.Class
constructor: (options = {}) ->
for key, value of options
@[key] = value
find: ->
@model.find @record[@fkey]
update: (value) ->
unless value instanceof @model
value = new @model(value)
value.save if value.isNew()
@record[@fkey] = value and value.id
@record.save()
class Singleton extends Spine.Class
constructor: (options = {}) ->
for key, value of options
@[key] = value
find: ->
@record.id and @model.findByAttribute(@fkey, @record.id)
update: (value) ->
unless value instanceof @model
value = @model.fromJSON(value)
value[@fkey] = @record.id
value.save()
{singularize, underscore, loadModel} = require "./helpers"
Relations =
__filter: (args, revert=false) ->
(rec) ->
q = !!revert
for key, value of args
return q unless rec[key] is value
!q
filter: (args) -> @select @__filter args
exclude: (args) -> @select @__filter args, true
oneToMany: (model, name, fkey) ->
parent = @
unless name?
model = loadModel model, parent
name = model.className.toLowerCase()
name = singularize underscore name
lkey = "#{name}_PI:KEY:<KEY>END_PI"
unless lkey in @attributes
@attributes.push lkey
fkey ?= 'PI:KEY:<KEY>END_PI'
association = (record, model) ->
model = loadModel model, parent
record[lkey] = [] unless record[lkey]
new O2MCollection {lkey, fkey, record, model}
@::["#{name}s"] = (value) ->
association(@, model)
hasMany: (model, name, fkey) ->
parent = @
unless name?
model = loadModel model, parent
name = model.className.toLowerCase()
name = singularize underscore name
fkey ?= "#{underscore(this.className)}_id"
association = (record) ->
model = loadModel model, parent
new Collection(
name: name, model: model,
record: record, fkey: fkey
)
@::["#{name}s"] = (value) ->
association(@).refresh(value) if value?
association(@)
belongsTo: (model, name, fkey) ->
parent = @
unless name?
model = loadModel model, parent
name = model.className.toLowerCase()
name = singularize underscore name
fkey ?= "#{name}_PI:KEY:<KEY>END_PI"
association = (record) ->
model = loadModel model, parent
new Instance(
name: name, model: model,
record: record, fkey: fkey
)
@::[name] = (value) ->
if value?
association(@).update(value)
else
association(@).find()
@attributes.push(fkey)
hasOne: (model, name, fkey) ->
parent = @
unless name?
model = loadModel model, parent
name = model.className.toLowerCase()
name = singularize underscore name
fkey ?= "#{underscore(@className)}_id"
association = (record) ->
model = loadModel model, parent
new Singleton(
name: name, model: model,
record: record, fkey: fkey
)
@::[name] = (value) ->
association(@).update(value) if value?
association(@).find()
foreignKey: (model, name, rev_name) ->
parent = @
unless name?
model = loadModel model, parent
name = model.className.toLowerCase()
name = singularize underscore name
unless rev_name?
rev_name = @className.toLowerCase()
rev_name = singularize underscore rev_name
rev_name = "#{rev_name}s"
@belongsTo name, model
model.hasMany rev_name, @
manyToMany: (model, name, rev_name) ->
parent = @
unless name?
model = loadModel model, parent
name = model.className.toLowerCase()
name = singularize underscore name
unless rev_name?
rev_name = @className.toLowerCase()
rev_name = singularize underscore rev_name
rev_name = "#{rev_name}s"
rev_model = @
local = typeof model.loadLocal is 'function' or typeof rev_model.loadLocal is 'function'
tigerDB = typeof model.loadSpineDB is 'function' or typeof rev_model.loadSpineDB is 'function'
class Hub extends Spine.Model
@extend Spine.Model.Local if local
@extend Spine.Model.SpineDB if tigerDB
@configure "_#{rev_name}_to_#{name}", "#{@rev_name}_id", "#{@name}_id"
Hub.fetch() if local or tigerDB
Hub.foreignKey rev_model, "#{rev_name}"
Hub.foreignKey model, "#{name}"
association = (record, model, left_to_right) ->
model = loadModel model, parent
new M2MCollection {name, rev_name, record, model, Hub: Hub, left_to_right}
rev_model::["#{name}s"] = (value) ->
association(@, model, true)
model::["#{rev_name}s"] = (value) ->
association(@, rev_model, false)
Spine.Model.extend Relations
Relations.Classes =
BaseCollection: BaseCollection
Collection: Collection
O2MCollection: O2MCollection
M2MCollection: M2MCollection
Instance: Instance
Singleton: Singleton
Spine.Model.Relations = Relations
module?.exports = Relations
|
[
{
"context": " ->\n new User({_id: creator2Id, name: 'Dummy Name'}).save (err) ->\n # retrieve last ra",
"end": 18540,
"score": 0.9993100166320801,
"start": 18530,
"tag": "NAME",
"value": "Dummy Name"
},
{
"context": " assert.equal rattles[0].creator.name, 'Dummy Name'\n done()\n\n describe \"document",
"end": 18885,
"score": 0.9993094205856323,
"start": 18875,
"tag": "NAME",
"value": "Dummy Name"
}
] | test/unit/index.coffee | brianjd/mongoose-rattle-plugin | 0 | require '../bootstrap'
async = require 'async'
sinon = require 'sinon'
assert = require 'assert'
moment = require 'moment'
should = require 'should'
mongoose = require 'mongoose'
Thingy = require '../models/thingy'
User = require '../models/user'
ObjectId = mongoose.Types.ObjectId;
describe "MongooseRattlePlugin", ->
thingy = {}
commentorUserId = new ObjectId()
objectCreatorUserId = new ObjectId()
beforeEach (done) ->
Thingy.remove done
describe "document.save(callback)", ->
it "update dateCreation and dateUpdate when inserting", (done) ->
clock = sinon.useFakeTimers()
new Thingy(creator: objectCreatorUserId, owner: objectCreatorUserId).save (err, thingySaved) ->
assert.deepEqual(new Date(), thingySaved.dateCreation)
assert.deepEqual(new Date(), thingySaved.dateUpdate)
clock.restore()
done()
it "only update dateUpdate when updating", (done) ->
clock = sinon.useFakeTimers(new Date(2011, 0, 1, 1, 1, 36).getTime())
new Thingy(creator: objectCreatorUserId, owner: objectCreatorUserId).save (err, thingySaved) ->
clock.restore()
clock = sinon.useFakeTimers(new Date(2012, 0, 1, 1, 1, 36).getTime())
thingySaved.save (err, thingySaved) ->
assert.notDeepEqual(new Date(), thingySaved.dateCreation)
assert.deepEqual(new Date(), thingySaved.dateUpdate)
clock.restore()
done()
describe "Plugin methods", ->
beforeEach (done) ->
new Thingy(creator: objectCreatorUserId, owner: objectCreatorUserId).save (err, thingySaved) ->
thingy = thingySaved
done()
describe "document.getComment(commentId)", ->
userOneId = new ObjectId()
userTwoId = new ObjectId()
level1UserOneMsg = 'level1 message ' + userOneId
level1UserTwoMsg = 'level1 message ' + userTwoId
commentIds = {}
beforeEach (done) ->
thingy.comments = [
message: level1UserOneMsg
creator: userOneId
,
message: level1UserTwoMsg
creator: userTwoId
]
commentIds['level 1 ' + userOneId] = thingy.comments[0]._id
commentIds['level 1 ' + userTwoId] = thingy.comments[1]._id
thingy.save done
it "retrieve null if comment doesn't exist", ->
assert.equal(null, thingy.getComment('n0t3x1t1n9'))
it "retrieve comment", ->
assert.equal(level1UserOneMsg, thingy.getComment(commentIds['level 1 ' + userOneId]).message)
assert.equal(level1UserTwoMsg, thingy.getComment(commentIds['level 1 ' + userTwoId]).message)
it "retrieve a comment when commentId is a string and not an ObjectId", ->
assert.equal(level1UserOneMsg, thingy.getComment(String(commentIds['level 1 ' + userOneId])).message)
describe "document.addComment(userId, message, callback)", ->
it "append a new comment and return comment id", (done) ->
commentId = thingy.addComment commentorUserId, 'dummy message', (err) ->
should.not.exists(err)
should.exists(commentId)
Thingy.findById thingy._id, (err, updatedThingy) ->
should.exists(updatedThingy)
assert.equal(1, updatedThingy.comments.length)
done()
it "update dateCreation and dateUpdated", (done) ->
clock = sinon.useFakeTimers()
commentId = thingy.addComment commentorUserId, 'dummy message', (err, updatedThingy) ->
assert.deepEqual(new Date(), updatedThingy.getComment(commentId).dateCreation)
assert.deepEqual(new Date(), updatedThingy.getComment(commentId).dateUpdate)
clock.restore()
done()
it "fails if message length is out of min and max", (done) ->
thingy.addComment commentorUserId, '', (err) ->
should.exists(err)
done()
describe "document.editComment(userId, commentId, message, callback)", ->
commentId = null
updatedMessage = 'dummy message updated'
beforeEach (done) ->
clock = sinon.useFakeTimers(new Date(2011, 0, 1, 1, 1, 36).getTime())
commentId = thingy.addComment commentorUserId, 'dummy message', (err) ->
clock.restore()
done()
it "fails if message length is out of min and max", (done) ->
thingy.editComment commentorUserId, commentId, '', (err) ->
should.exists(err)
done()
describe 'when user is not the creator', ->
it "always fails", (done) ->
thingy.editComment 'n0t3x1t1n9', commentId, updatedMessage, (err) ->
should.exists(err)
done()
describe 'when user is the creator', ->
checkEditCommentWhenOwner = (commentorUserId, commentId, updatedMessage, done) ->
thingy.editComment commentorUserId, commentId, updatedMessage, (err) ->
should.not.exists(err)
should.exists(commentId)
Thingy.findById thingy._id, (err, updatedThingy) ->
should.exists(updatedThingy)
assert.equal(1, updatedThingy.comments.length)
assert.equal(updatedMessage, updatedThingy.comments[0].message)
done()
it "edit comment and return comment id if user is the owner", (done) ->
checkEditCommentWhenOwner(commentorUserId, commentId, updatedMessage, done)
it "edit comment and return comment id if user is the owner when userId is a string", (done) ->
checkEditCommentWhenOwner(String(commentorUserId), commentId, updatedMessage, done)
it "update dateCreation and dateUpdated", (done) ->
clock = sinon.useFakeTimers(new Date(2012, 0, 1, 1, 1, 36).getTime())
thingy.editComment commentorUserId, commentId, updatedMessage, (err, updatedThingy) ->
assert.notDeepEqual(new Date(), updatedThingy.getComment(commentId).dateCreation)
assert.deepEqual(new Date(), updatedThingy.getComment(commentId).dateUpdate)
clock.restore()
done()
describe "document.removeComment(userId, commentId, callback)", ->
level1Msg = 'level1 message'
commentIds = {}
beforeEach (done) ->
thingy.comments = [
message: level1Msg
creator: commentorUserId
,
message: 'level1 second message'
creator: commentorUserId
]
commentIds['level 1'] = thingy.comments[0]._id
thingy.save done
it "fails if comment doesn't exist", (done) ->
thingy.removeComment commentorUserId, 'n0t3x1t1n9', (err, updatedThingy) ->
should.exists(err)
done()
describe 'when user is not the creator', ->
it "it's not removing the comment", (done) ->
thingy.removeComment 'n0t3x1t1n9', commentIds['level 1'], (err, updatedThingy) ->
should.exists(err)
should.exists(updatedThingy)
should.exists(updatedThingy.getComment(commentIds['level 1']))
done()
describe 'when user is the creator', ->
it "can remove comment", (done) ->
thingy.removeComment commentorUserId, commentIds['level 1'], (err, updatedThingy) ->
should.exists(updatedThingy)
should.not.exists(updatedThingy.getComment(commentIds['level 1']))
done()
it "remove comment when userId param is a string", (done) ->
thingy.removeComment String(commentorUserId), commentIds['level 1'], (err, updatedThingy) ->
should.exists(updatedThingy)
should.not.exists(updatedThingy.getComment(commentIds['level 1']))
done()
it "remove comment when commentId is a string", (done) ->
thingy.removeComment commentorUserId, String(commentIds['level 1']), (err, updatedThingy) ->
should.exists(updatedThingy)
should.not.exists(updatedThingy.getComment(commentIds['level 1']))
done()
describe "document.addLike(userId, callback)", ->
it "add one user like if user doesn't already liked", (done) ->
thingy.addLike commentorUserId, (err, updatedThingy) ->
assert.equal(1, updatedThingy.likes.length)
assert.equal(1, updatedThingy.likesCount)
done()
it "not add an other user like if user already liked", (done) ->
thingy.addLike commentorUserId, (err, updatedThingy) ->
assert.equal(1, updatedThingy.likes.length)
assert.equal(1, updatedThingy.likesCount)
updatedThingy.addLike commentorUserId, (err, updatedThingy) ->
should.exist(err)
assert.equal(1, updatedThingy.likes.length)
assert.equal(1, updatedThingy.likesCount)
done()
it "not add an other user like if user already liked and userId param is a string", (done) ->
thingy.addLike commentorUserId, (err, updatedThingy) ->
thingy.addLike String(commentorUserId), (err, updatedThingy) ->
assert.equal(1, updatedThingy.likes.length)
assert.equal(1, updatedThingy.likesCount)
done()
describe "document.removeLike(userId, callback)", ->
userOneId = new ObjectId()
userTwoId = new ObjectId()
beforeEach (done) ->
async.series [(callback) ->
thingy.addLike commentorUserId, (err, updatedThingy) ->
thingy = updatedThingy
callback()
, (callback) ->
thingy.addLike userOneId, (err, updatedThingy) ->
thingy = updatedThingy
callback()
], done
it "not affect current likes list if user didn'nt already liked", (done) ->
thingy.removeLike userTwoId, (err, updatedThingy) ->
should.exists(err)
assert.equal(2, updatedThingy.likes.length)
assert.equal(2, updatedThingy.likesCount)
done()
it "remove user like from likes list if user already liked", (done) ->
thingy.removeLike commentorUserId, (err, updatedThingy) ->
assert.equal(1, updatedThingy.likes.length)
assert.equal(1, updatedThingy.likesCount)
done()
it "remove user like from likes list if user already liked when userId param is a string", (done) ->
thingy.removeLike String(commentorUserId), (err, updatedThingy) ->
assert.equal(1, updatedThingy.likes.length)
assert.equal(1, updatedThingy.likesCount)
done()
it "remove likesCount keep 0 when no there is no more likes", (done) ->
thingy.removeLike String(commentorUserId), (err, updatedThingy) ->
thingy.removeLike String(userOneId), (err, updatedThingy) ->
thingy.removeLike String(userOneId), (err, updatedThingy) ->
assert.equal(0, updatedThingy.likes.length)
assert.equal(0, updatedThingy.likesCount)
done()
describe "document.addLikeToComment(userId, commentId, callback)", ->
level1Msg = 'level1 message'
commentId = ''
beforeEach (done) ->
thingy.comments = [
message: 'level1 message'
creator: commentorUserId
]
commentId = thingy.comments[0]._id
thingy.save done
it "fails if comment doesn't exist", (done) ->
thingy.addLikeToComment commentorUserId, 'n0t3x1t1n9', (err, updatedThingy) ->
should.exists(err)
done()
it "add one user like if user doesn't already liked and comment exists", (done) ->
thingy.addLikeToComment commentorUserId, commentId, (err, updatedThingy) ->
assert.equal 1, updatedThingy.getComment(commentId).likes.length
assert.equal 1, updatedThingy.getComment(commentId).likesCount
done()
it "not add an other user like if user already liked and comment exists", (done) ->
thingy.addLikeToComment commentorUserId, commentId, (err, updatedThingy) ->
thingy.addLikeToComment commentorUserId, commentId, (err, updatedThingy) ->
should.exists(err)
assert.equal 1, updatedThingy.getComment(commentId).likes.length
assert.equal 1, updatedThingy.getComment(commentId).likesCount
done()
it "not add an other user like if user already liked and comment exists when userId param is a string", (done) ->
thingy.addLikeToComment String(commentorUserId), commentId, (err, updatedThingy) ->
thingy.addLikeToComment commentorUserId, commentId, (err, updatedThingy) ->
should.exists(err)
assert.equal 1, updatedThingy.getComment(commentId).likes.length
assert.equal 1, updatedThingy.getComment(commentId).likesCount
done()
describe "document.removeLikeFromComment(userId, commentId, callback)", ->
level1Msg = 'level1 message'
commentId = ''
beforeEach (done) ->
thingy.comments = [
message: 'level1 message'
creator: commentorUserId
likes: [commentorUserId, new ObjectId()]
]
commentId = thingy.comments[0]._id
thingy.save done
it "fails if comment doesn't exist", (done) ->
thingy.removeLikeFromComment commentorUserId, 'n0t3x1t1n9', (err, updatedThingy) ->
should.exists(err)
done()
it "not affect current likes list if user didn'nt already liked", (done) ->
thingy.removeLikeFromComment new ObjectId(), commentId, (err, updatedThingy) ->
should.exists(err)
assert.equal 2, updatedThingy.getComment(commentId).likes.length
assert.equal 2, updatedThingy.getComment(commentId).likesCount
done()
it "remove user like from likes list if user already liked", (done) ->
thingy.removeLikeFromComment commentorUserId, commentId, (err, updatedThingy) ->
assert.equal 1, updatedThingy.getComment(commentId).likes.length
assert.equal 1, updatedThingy.getComment(commentId).likesCount
done()
it "remove user like from likes list if user already liked when userId param is a string", (done) ->
thingy.removeLikeFromComment String(commentorUserId), commentId, (err, updatedThingy) ->
assert.equal 1, updatedThingy.getComment(commentId).likes.length
assert.equal 1, updatedThingy.getComment(commentId).likesCount
done()
describe "Plugin statics", ->
describe "document.getList", ->
creator1Id = new ObjectId()
creator2Id = new ObjectId()
beforeEach (done) ->
rattles = [
creator: creator1Id
likes: [new ObjectId(), new ObjectId()]
comments: [
message: '11'
creator: new ObjectId()
,
message: '12'
creator: new ObjectId()
]
,
creator: creator2Id
likes: [new ObjectId(), new ObjectId()]
comments: [
message: '21'
creator: new ObjectId()
,
message: '22'
creator: new ObjectId()
]
]
async.eachSeries rattles, (save = (rattleData, next) ->
new Thingy(rattleData).save next
), done
describe "(num, maxNumLastPostComments, callback)", ->
it "get list of the number of 'num' last rattles and return likesCount instead of likes array", (done) ->
Thingy.find {}, (err, rattles) ->
Thingy.getList 1, 0, (err, rattles) ->
should.not.exists(err)
assert.equal rattles.length, 1
assert.deepEqual rattles[0].creator, creator2Id
assert !rattles[0].likes
assert.equal rattles[0].likesCount, 2
done()
it "get all rattles if 'num' is greater than the number of rattles", (done) ->
Thingy.getList 3, 0, (err, rattles) ->
should.not.exists(err)
assert.equal rattles.length, 2
done()
it "each rattle get the maximum of 'maxLastComments' last comments", (done) ->
Thingy.getList 1, 1, (err, rattles) ->
should.not.exists(err)
assert.equal rattles.length, 1
assert.deepEqual rattles[0].creator, creator2Id
should.exists(rattles[0].comments)
assert.equal rattles[0].comments.length, 1
assert.equal rattles[0].comments[0].message, '22'
done()
it "each all comments when 'maxLastComments' is greater than number of comments", (done) ->
Thingy.getList 1, 3, (err, rattles) ->
should.not.exists(err)
assert.equal rattles.length, 1
should.exists(rattles[0].comments)
assert.equal rattles[0].comments.length, 2
done()
describe "(num, maxNumLastPostComments, options, callback)", ->
describe "from a creation date", ->
it "get list of last rattles created from the 'fromDate'", (done) ->
# retrieve last rattle
Thingy.getList 1, 0, (err, rattles) ->
Thingy.getList 1, 0, fromCreationDate: rattles[0].dateCreation, (err, rattles) ->
should.not.exists(err)
assert.equal rattles.length, 1
assert.deepEqual rattles[0].creator, creator1Id
done()
it "get all last rattles if 'num' is greater than the number of last rattles", (done) ->
# retrieve last rattle
Thingy.getList 1, 0, (err, rattles) ->
Thingy.getList 2, 0, fromCreationDate: rattles[0].dateCreation, (err, rattles) ->
should.not.exists(err)
assert.equal rattles.length, 1
done()
it "each rattle get the maximum of 'maxLastComments' last comments", (done) ->
# retrieve last rattle
Thingy.getList 1, 0, (err, rattles) ->
Thingy.getList 1, 1, fromCreationDate: rattles[0].dateCreation, (err, rattles) ->
should.not.exists(err)
assert.equal rattles.length, 1
assert.deepEqual rattles[0].creator, creator1Id
should.exists(rattles[0].comments)
assert.equal rattles[0].comments.length, 1
assert.equal rattles[0].comments[0].message, '12'
done()
describe "populating", ->
it "build", (done) ->
new User({_id: creator2Id, name: 'Dummy Name'}).save (err) ->
# retrieve last rattle
Thingy.getList 1, 0, {populate: 'creator'}, (err, rattles) ->
should.not.exists(err)
assert.equal rattles.length, 1
should.exists(rattles[0].creator.name)
assert.equal rattles[0].creator.name, 'Dummy Name'
done()
describe "document.getListOfCommentsById(rattleId, num, offsetFromEnd, callback)", ->
creatorId = new ObjectId()
rattleId = null
beforeEach (done) ->
async.waterfall [
saveThingy = (next) ->
new Thingy(
creator: creatorId
).save (err, data) ->
next(err) if err
rattleId = data._id
next(null, data)
pushComments = (thingy, next) ->
comments = [
message: '11'
creator: new ObjectId()
,
message: '12'
creator: new ObjectId()
,
message: '13'
creator: new ObjectId()
]
async.eachSeries comments, (push = (comment, next) ->
thingy.addComment comment.creator, comment.message, next
), next
], done
it "get last 'num' of comments for 'rattleId' when offsetFromEnd is 0", (done) ->
Thingy.getListOfCommentsById rattleId, 1, 0, (err, comments) ->
should.not.exists(err)
assert.equal comments.length, 1
assert.equal comments[0].message, '13'
done()
it "get last num of comments from the offsetFromEnd", (done) ->
Thingy.getListOfCommentsById rattleId, 1, 1, (err, comments) ->
should.not.exists(err)
assert.equal comments.length, 1
assert.equal comments[0].message, '12'
done()
it "get no comments when offsetFromEnd is equal to the number of comments", (done) ->
Thingy.getListOfCommentsById rattleId, 1, 3, (err, comments) ->
should.not.exists(err)
assert.equal comments.length, 0
done()
it "limit comments when offsetFromEnd + num is greater that the number of comments", (done) ->
Thingy.getListOfCommentsById rattleId, 3, 1, (err, comments) ->
should.not.exists(err)
assert.equal comments[0].message, '11'
assert.equal comments[1].message, '12'
assert.equal comments.length, 2
done()
it "keep comments order", (done) ->
Thingy.getListOfCommentsById rattleId, 3, 0, (err, comments) ->
should.not.exists(err)
assert.equal comments[0].message, '11'
assert.equal comments[1].message, '12'
assert.equal comments[2].message, '13'
assert.equal comments.length, 3
done()
| 79943 | require '../bootstrap'
async = require 'async'
sinon = require 'sinon'
assert = require 'assert'
moment = require 'moment'
should = require 'should'
mongoose = require 'mongoose'
Thingy = require '../models/thingy'
User = require '../models/user'
ObjectId = mongoose.Types.ObjectId;
describe "MongooseRattlePlugin", ->
thingy = {}
commentorUserId = new ObjectId()
objectCreatorUserId = new ObjectId()
beforeEach (done) ->
Thingy.remove done
describe "document.save(callback)", ->
it "update dateCreation and dateUpdate when inserting", (done) ->
clock = sinon.useFakeTimers()
new Thingy(creator: objectCreatorUserId, owner: objectCreatorUserId).save (err, thingySaved) ->
assert.deepEqual(new Date(), thingySaved.dateCreation)
assert.deepEqual(new Date(), thingySaved.dateUpdate)
clock.restore()
done()
it "only update dateUpdate when updating", (done) ->
clock = sinon.useFakeTimers(new Date(2011, 0, 1, 1, 1, 36).getTime())
new Thingy(creator: objectCreatorUserId, owner: objectCreatorUserId).save (err, thingySaved) ->
clock.restore()
clock = sinon.useFakeTimers(new Date(2012, 0, 1, 1, 1, 36).getTime())
thingySaved.save (err, thingySaved) ->
assert.notDeepEqual(new Date(), thingySaved.dateCreation)
assert.deepEqual(new Date(), thingySaved.dateUpdate)
clock.restore()
done()
describe "Plugin methods", ->
beforeEach (done) ->
new Thingy(creator: objectCreatorUserId, owner: objectCreatorUserId).save (err, thingySaved) ->
thingy = thingySaved
done()
describe "document.getComment(commentId)", ->
userOneId = new ObjectId()
userTwoId = new ObjectId()
level1UserOneMsg = 'level1 message ' + userOneId
level1UserTwoMsg = 'level1 message ' + userTwoId
commentIds = {}
beforeEach (done) ->
thingy.comments = [
message: level1UserOneMsg
creator: userOneId
,
message: level1UserTwoMsg
creator: userTwoId
]
commentIds['level 1 ' + userOneId] = thingy.comments[0]._id
commentIds['level 1 ' + userTwoId] = thingy.comments[1]._id
thingy.save done
it "retrieve null if comment doesn't exist", ->
assert.equal(null, thingy.getComment('n0t3x1t1n9'))
it "retrieve comment", ->
assert.equal(level1UserOneMsg, thingy.getComment(commentIds['level 1 ' + userOneId]).message)
assert.equal(level1UserTwoMsg, thingy.getComment(commentIds['level 1 ' + userTwoId]).message)
it "retrieve a comment when commentId is a string and not an ObjectId", ->
assert.equal(level1UserOneMsg, thingy.getComment(String(commentIds['level 1 ' + userOneId])).message)
describe "document.addComment(userId, message, callback)", ->
it "append a new comment and return comment id", (done) ->
commentId = thingy.addComment commentorUserId, 'dummy message', (err) ->
should.not.exists(err)
should.exists(commentId)
Thingy.findById thingy._id, (err, updatedThingy) ->
should.exists(updatedThingy)
assert.equal(1, updatedThingy.comments.length)
done()
it "update dateCreation and dateUpdated", (done) ->
clock = sinon.useFakeTimers()
commentId = thingy.addComment commentorUserId, 'dummy message', (err, updatedThingy) ->
assert.deepEqual(new Date(), updatedThingy.getComment(commentId).dateCreation)
assert.deepEqual(new Date(), updatedThingy.getComment(commentId).dateUpdate)
clock.restore()
done()
it "fails if message length is out of min and max", (done) ->
thingy.addComment commentorUserId, '', (err) ->
should.exists(err)
done()
describe "document.editComment(userId, commentId, message, callback)", ->
commentId = null
updatedMessage = 'dummy message updated'
beforeEach (done) ->
clock = sinon.useFakeTimers(new Date(2011, 0, 1, 1, 1, 36).getTime())
commentId = thingy.addComment commentorUserId, 'dummy message', (err) ->
clock.restore()
done()
it "fails if message length is out of min and max", (done) ->
thingy.editComment commentorUserId, commentId, '', (err) ->
should.exists(err)
done()
describe 'when user is not the creator', ->
it "always fails", (done) ->
thingy.editComment 'n0t3x1t1n9', commentId, updatedMessage, (err) ->
should.exists(err)
done()
describe 'when user is the creator', ->
checkEditCommentWhenOwner = (commentorUserId, commentId, updatedMessage, done) ->
thingy.editComment commentorUserId, commentId, updatedMessage, (err) ->
should.not.exists(err)
should.exists(commentId)
Thingy.findById thingy._id, (err, updatedThingy) ->
should.exists(updatedThingy)
assert.equal(1, updatedThingy.comments.length)
assert.equal(updatedMessage, updatedThingy.comments[0].message)
done()
it "edit comment and return comment id if user is the owner", (done) ->
checkEditCommentWhenOwner(commentorUserId, commentId, updatedMessage, done)
it "edit comment and return comment id if user is the owner when userId is a string", (done) ->
checkEditCommentWhenOwner(String(commentorUserId), commentId, updatedMessage, done)
it "update dateCreation and dateUpdated", (done) ->
clock = sinon.useFakeTimers(new Date(2012, 0, 1, 1, 1, 36).getTime())
thingy.editComment commentorUserId, commentId, updatedMessage, (err, updatedThingy) ->
assert.notDeepEqual(new Date(), updatedThingy.getComment(commentId).dateCreation)
assert.deepEqual(new Date(), updatedThingy.getComment(commentId).dateUpdate)
clock.restore()
done()
describe "document.removeComment(userId, commentId, callback)", ->
level1Msg = 'level1 message'
commentIds = {}
beforeEach (done) ->
thingy.comments = [
message: level1Msg
creator: commentorUserId
,
message: 'level1 second message'
creator: commentorUserId
]
commentIds['level 1'] = thingy.comments[0]._id
thingy.save done
it "fails if comment doesn't exist", (done) ->
thingy.removeComment commentorUserId, 'n0t3x1t1n9', (err, updatedThingy) ->
should.exists(err)
done()
describe 'when user is not the creator', ->
it "it's not removing the comment", (done) ->
thingy.removeComment 'n0t3x1t1n9', commentIds['level 1'], (err, updatedThingy) ->
should.exists(err)
should.exists(updatedThingy)
should.exists(updatedThingy.getComment(commentIds['level 1']))
done()
describe 'when user is the creator', ->
it "can remove comment", (done) ->
thingy.removeComment commentorUserId, commentIds['level 1'], (err, updatedThingy) ->
should.exists(updatedThingy)
should.not.exists(updatedThingy.getComment(commentIds['level 1']))
done()
it "remove comment when userId param is a string", (done) ->
thingy.removeComment String(commentorUserId), commentIds['level 1'], (err, updatedThingy) ->
should.exists(updatedThingy)
should.not.exists(updatedThingy.getComment(commentIds['level 1']))
done()
it "remove comment when commentId is a string", (done) ->
thingy.removeComment commentorUserId, String(commentIds['level 1']), (err, updatedThingy) ->
should.exists(updatedThingy)
should.not.exists(updatedThingy.getComment(commentIds['level 1']))
done()
describe "document.addLike(userId, callback)", ->
it "add one user like if user doesn't already liked", (done) ->
thingy.addLike commentorUserId, (err, updatedThingy) ->
assert.equal(1, updatedThingy.likes.length)
assert.equal(1, updatedThingy.likesCount)
done()
it "not add an other user like if user already liked", (done) ->
thingy.addLike commentorUserId, (err, updatedThingy) ->
assert.equal(1, updatedThingy.likes.length)
assert.equal(1, updatedThingy.likesCount)
updatedThingy.addLike commentorUserId, (err, updatedThingy) ->
should.exist(err)
assert.equal(1, updatedThingy.likes.length)
assert.equal(1, updatedThingy.likesCount)
done()
it "not add an other user like if user already liked and userId param is a string", (done) ->
thingy.addLike commentorUserId, (err, updatedThingy) ->
thingy.addLike String(commentorUserId), (err, updatedThingy) ->
assert.equal(1, updatedThingy.likes.length)
assert.equal(1, updatedThingy.likesCount)
done()
describe "document.removeLike(userId, callback)", ->
userOneId = new ObjectId()
userTwoId = new ObjectId()
beforeEach (done) ->
async.series [(callback) ->
thingy.addLike commentorUserId, (err, updatedThingy) ->
thingy = updatedThingy
callback()
, (callback) ->
thingy.addLike userOneId, (err, updatedThingy) ->
thingy = updatedThingy
callback()
], done
it "not affect current likes list if user didn'nt already liked", (done) ->
thingy.removeLike userTwoId, (err, updatedThingy) ->
should.exists(err)
assert.equal(2, updatedThingy.likes.length)
assert.equal(2, updatedThingy.likesCount)
done()
it "remove user like from likes list if user already liked", (done) ->
thingy.removeLike commentorUserId, (err, updatedThingy) ->
assert.equal(1, updatedThingy.likes.length)
assert.equal(1, updatedThingy.likesCount)
done()
it "remove user like from likes list if user already liked when userId param is a string", (done) ->
thingy.removeLike String(commentorUserId), (err, updatedThingy) ->
assert.equal(1, updatedThingy.likes.length)
assert.equal(1, updatedThingy.likesCount)
done()
it "remove likesCount keep 0 when no there is no more likes", (done) ->
thingy.removeLike String(commentorUserId), (err, updatedThingy) ->
thingy.removeLike String(userOneId), (err, updatedThingy) ->
thingy.removeLike String(userOneId), (err, updatedThingy) ->
assert.equal(0, updatedThingy.likes.length)
assert.equal(0, updatedThingy.likesCount)
done()
describe "document.addLikeToComment(userId, commentId, callback)", ->
level1Msg = 'level1 message'
commentId = ''
beforeEach (done) ->
thingy.comments = [
message: 'level1 message'
creator: commentorUserId
]
commentId = thingy.comments[0]._id
thingy.save done
it "fails if comment doesn't exist", (done) ->
thingy.addLikeToComment commentorUserId, 'n0t3x1t1n9', (err, updatedThingy) ->
should.exists(err)
done()
it "add one user like if user doesn't already liked and comment exists", (done) ->
thingy.addLikeToComment commentorUserId, commentId, (err, updatedThingy) ->
assert.equal 1, updatedThingy.getComment(commentId).likes.length
assert.equal 1, updatedThingy.getComment(commentId).likesCount
done()
it "not add an other user like if user already liked and comment exists", (done) ->
thingy.addLikeToComment commentorUserId, commentId, (err, updatedThingy) ->
thingy.addLikeToComment commentorUserId, commentId, (err, updatedThingy) ->
should.exists(err)
assert.equal 1, updatedThingy.getComment(commentId).likes.length
assert.equal 1, updatedThingy.getComment(commentId).likesCount
done()
it "not add an other user like if user already liked and comment exists when userId param is a string", (done) ->
thingy.addLikeToComment String(commentorUserId), commentId, (err, updatedThingy) ->
thingy.addLikeToComment commentorUserId, commentId, (err, updatedThingy) ->
should.exists(err)
assert.equal 1, updatedThingy.getComment(commentId).likes.length
assert.equal 1, updatedThingy.getComment(commentId).likesCount
done()
describe "document.removeLikeFromComment(userId, commentId, callback)", ->
level1Msg = 'level1 message'
commentId = ''
beforeEach (done) ->
thingy.comments = [
message: 'level1 message'
creator: commentorUserId
likes: [commentorUserId, new ObjectId()]
]
commentId = thingy.comments[0]._id
thingy.save done
it "fails if comment doesn't exist", (done) ->
thingy.removeLikeFromComment commentorUserId, 'n0t3x1t1n9', (err, updatedThingy) ->
should.exists(err)
done()
it "not affect current likes list if user didn'nt already liked", (done) ->
thingy.removeLikeFromComment new ObjectId(), commentId, (err, updatedThingy) ->
should.exists(err)
assert.equal 2, updatedThingy.getComment(commentId).likes.length
assert.equal 2, updatedThingy.getComment(commentId).likesCount
done()
it "remove user like from likes list if user already liked", (done) ->
thingy.removeLikeFromComment commentorUserId, commentId, (err, updatedThingy) ->
assert.equal 1, updatedThingy.getComment(commentId).likes.length
assert.equal 1, updatedThingy.getComment(commentId).likesCount
done()
it "remove user like from likes list if user already liked when userId param is a string", (done) ->
thingy.removeLikeFromComment String(commentorUserId), commentId, (err, updatedThingy) ->
assert.equal 1, updatedThingy.getComment(commentId).likes.length
assert.equal 1, updatedThingy.getComment(commentId).likesCount
done()
describe "Plugin statics", ->
describe "document.getList", ->
creator1Id = new ObjectId()
creator2Id = new ObjectId()
beforeEach (done) ->
rattles = [
creator: creator1Id
likes: [new ObjectId(), new ObjectId()]
comments: [
message: '11'
creator: new ObjectId()
,
message: '12'
creator: new ObjectId()
]
,
creator: creator2Id
likes: [new ObjectId(), new ObjectId()]
comments: [
message: '21'
creator: new ObjectId()
,
message: '22'
creator: new ObjectId()
]
]
async.eachSeries rattles, (save = (rattleData, next) ->
new Thingy(rattleData).save next
), done
describe "(num, maxNumLastPostComments, callback)", ->
it "get list of the number of 'num' last rattles and return likesCount instead of likes array", (done) ->
Thingy.find {}, (err, rattles) ->
Thingy.getList 1, 0, (err, rattles) ->
should.not.exists(err)
assert.equal rattles.length, 1
assert.deepEqual rattles[0].creator, creator2Id
assert !rattles[0].likes
assert.equal rattles[0].likesCount, 2
done()
it "get all rattles if 'num' is greater than the number of rattles", (done) ->
Thingy.getList 3, 0, (err, rattles) ->
should.not.exists(err)
assert.equal rattles.length, 2
done()
it "each rattle get the maximum of 'maxLastComments' last comments", (done) ->
Thingy.getList 1, 1, (err, rattles) ->
should.not.exists(err)
assert.equal rattles.length, 1
assert.deepEqual rattles[0].creator, creator2Id
should.exists(rattles[0].comments)
assert.equal rattles[0].comments.length, 1
assert.equal rattles[0].comments[0].message, '22'
done()
it "each all comments when 'maxLastComments' is greater than number of comments", (done) ->
Thingy.getList 1, 3, (err, rattles) ->
should.not.exists(err)
assert.equal rattles.length, 1
should.exists(rattles[0].comments)
assert.equal rattles[0].comments.length, 2
done()
describe "(num, maxNumLastPostComments, options, callback)", ->
describe "from a creation date", ->
it "get list of last rattles created from the 'fromDate'", (done) ->
# retrieve last rattle
Thingy.getList 1, 0, (err, rattles) ->
Thingy.getList 1, 0, fromCreationDate: rattles[0].dateCreation, (err, rattles) ->
should.not.exists(err)
assert.equal rattles.length, 1
assert.deepEqual rattles[0].creator, creator1Id
done()
it "get all last rattles if 'num' is greater than the number of last rattles", (done) ->
# retrieve last rattle
Thingy.getList 1, 0, (err, rattles) ->
Thingy.getList 2, 0, fromCreationDate: rattles[0].dateCreation, (err, rattles) ->
should.not.exists(err)
assert.equal rattles.length, 1
done()
it "each rattle get the maximum of 'maxLastComments' last comments", (done) ->
# retrieve last rattle
Thingy.getList 1, 0, (err, rattles) ->
Thingy.getList 1, 1, fromCreationDate: rattles[0].dateCreation, (err, rattles) ->
should.not.exists(err)
assert.equal rattles.length, 1
assert.deepEqual rattles[0].creator, creator1Id
should.exists(rattles[0].comments)
assert.equal rattles[0].comments.length, 1
assert.equal rattles[0].comments[0].message, '12'
done()
describe "populating", ->
it "build", (done) ->
new User({_id: creator2Id, name: '<NAME>'}).save (err) ->
# retrieve last rattle
Thingy.getList 1, 0, {populate: 'creator'}, (err, rattles) ->
should.not.exists(err)
assert.equal rattles.length, 1
should.exists(rattles[0].creator.name)
assert.equal rattles[0].creator.name, '<NAME>'
done()
describe "document.getListOfCommentsById(rattleId, num, offsetFromEnd, callback)", ->
creatorId = new ObjectId()
rattleId = null
beforeEach (done) ->
async.waterfall [
saveThingy = (next) ->
new Thingy(
creator: creatorId
).save (err, data) ->
next(err) if err
rattleId = data._id
next(null, data)
pushComments = (thingy, next) ->
comments = [
message: '11'
creator: new ObjectId()
,
message: '12'
creator: new ObjectId()
,
message: '13'
creator: new ObjectId()
]
async.eachSeries comments, (push = (comment, next) ->
thingy.addComment comment.creator, comment.message, next
), next
], done
it "get last 'num' of comments for 'rattleId' when offsetFromEnd is 0", (done) ->
Thingy.getListOfCommentsById rattleId, 1, 0, (err, comments) ->
should.not.exists(err)
assert.equal comments.length, 1
assert.equal comments[0].message, '13'
done()
it "get last num of comments from the offsetFromEnd", (done) ->
Thingy.getListOfCommentsById rattleId, 1, 1, (err, comments) ->
should.not.exists(err)
assert.equal comments.length, 1
assert.equal comments[0].message, '12'
done()
it "get no comments when offsetFromEnd is equal to the number of comments", (done) ->
Thingy.getListOfCommentsById rattleId, 1, 3, (err, comments) ->
should.not.exists(err)
assert.equal comments.length, 0
done()
it "limit comments when offsetFromEnd + num is greater that the number of comments", (done) ->
Thingy.getListOfCommentsById rattleId, 3, 1, (err, comments) ->
should.not.exists(err)
assert.equal comments[0].message, '11'
assert.equal comments[1].message, '12'
assert.equal comments.length, 2
done()
it "keep comments order", (done) ->
Thingy.getListOfCommentsById rattleId, 3, 0, (err, comments) ->
should.not.exists(err)
assert.equal comments[0].message, '11'
assert.equal comments[1].message, '12'
assert.equal comments[2].message, '13'
assert.equal comments.length, 3
done()
| true | require '../bootstrap'
async = require 'async'
sinon = require 'sinon'
assert = require 'assert'
moment = require 'moment'
should = require 'should'
mongoose = require 'mongoose'
Thingy = require '../models/thingy'
User = require '../models/user'
ObjectId = mongoose.Types.ObjectId;
describe "MongooseRattlePlugin", ->
thingy = {}
commentorUserId = new ObjectId()
objectCreatorUserId = new ObjectId()
beforeEach (done) ->
Thingy.remove done
describe "document.save(callback)", ->
it "update dateCreation and dateUpdate when inserting", (done) ->
clock = sinon.useFakeTimers()
new Thingy(creator: objectCreatorUserId, owner: objectCreatorUserId).save (err, thingySaved) ->
assert.deepEqual(new Date(), thingySaved.dateCreation)
assert.deepEqual(new Date(), thingySaved.dateUpdate)
clock.restore()
done()
it "only update dateUpdate when updating", (done) ->
clock = sinon.useFakeTimers(new Date(2011, 0, 1, 1, 1, 36).getTime())
new Thingy(creator: objectCreatorUserId, owner: objectCreatorUserId).save (err, thingySaved) ->
clock.restore()
clock = sinon.useFakeTimers(new Date(2012, 0, 1, 1, 1, 36).getTime())
thingySaved.save (err, thingySaved) ->
assert.notDeepEqual(new Date(), thingySaved.dateCreation)
assert.deepEqual(new Date(), thingySaved.dateUpdate)
clock.restore()
done()
describe "Plugin methods", ->
beforeEach (done) ->
new Thingy(creator: objectCreatorUserId, owner: objectCreatorUserId).save (err, thingySaved) ->
thingy = thingySaved
done()
describe "document.getComment(commentId)", ->
userOneId = new ObjectId()
userTwoId = new ObjectId()
level1UserOneMsg = 'level1 message ' + userOneId
level1UserTwoMsg = 'level1 message ' + userTwoId
commentIds = {}
beforeEach (done) ->
thingy.comments = [
message: level1UserOneMsg
creator: userOneId
,
message: level1UserTwoMsg
creator: userTwoId
]
commentIds['level 1 ' + userOneId] = thingy.comments[0]._id
commentIds['level 1 ' + userTwoId] = thingy.comments[1]._id
thingy.save done
it "retrieve null if comment doesn't exist", ->
assert.equal(null, thingy.getComment('n0t3x1t1n9'))
it "retrieve comment", ->
assert.equal(level1UserOneMsg, thingy.getComment(commentIds['level 1 ' + userOneId]).message)
assert.equal(level1UserTwoMsg, thingy.getComment(commentIds['level 1 ' + userTwoId]).message)
it "retrieve a comment when commentId is a string and not an ObjectId", ->
assert.equal(level1UserOneMsg, thingy.getComment(String(commentIds['level 1 ' + userOneId])).message)
describe "document.addComment(userId, message, callback)", ->
it "append a new comment and return comment id", (done) ->
commentId = thingy.addComment commentorUserId, 'dummy message', (err) ->
should.not.exists(err)
should.exists(commentId)
Thingy.findById thingy._id, (err, updatedThingy) ->
should.exists(updatedThingy)
assert.equal(1, updatedThingy.comments.length)
done()
it "update dateCreation and dateUpdated", (done) ->
clock = sinon.useFakeTimers()
commentId = thingy.addComment commentorUserId, 'dummy message', (err, updatedThingy) ->
assert.deepEqual(new Date(), updatedThingy.getComment(commentId).dateCreation)
assert.deepEqual(new Date(), updatedThingy.getComment(commentId).dateUpdate)
clock.restore()
done()
it "fails if message length is out of min and max", (done) ->
thingy.addComment commentorUserId, '', (err) ->
should.exists(err)
done()
describe "document.editComment(userId, commentId, message, callback)", ->
commentId = null
updatedMessage = 'dummy message updated'
beforeEach (done) ->
clock = sinon.useFakeTimers(new Date(2011, 0, 1, 1, 1, 36).getTime())
commentId = thingy.addComment commentorUserId, 'dummy message', (err) ->
clock.restore()
done()
it "fails if message length is out of min and max", (done) ->
thingy.editComment commentorUserId, commentId, '', (err) ->
should.exists(err)
done()
describe 'when user is not the creator', ->
it "always fails", (done) ->
thingy.editComment 'n0t3x1t1n9', commentId, updatedMessage, (err) ->
should.exists(err)
done()
describe 'when user is the creator', ->
checkEditCommentWhenOwner = (commentorUserId, commentId, updatedMessage, done) ->
thingy.editComment commentorUserId, commentId, updatedMessage, (err) ->
should.not.exists(err)
should.exists(commentId)
Thingy.findById thingy._id, (err, updatedThingy) ->
should.exists(updatedThingy)
assert.equal(1, updatedThingy.comments.length)
assert.equal(updatedMessage, updatedThingy.comments[0].message)
done()
it "edit comment and return comment id if user is the owner", (done) ->
checkEditCommentWhenOwner(commentorUserId, commentId, updatedMessage, done)
it "edit comment and return comment id if user is the owner when userId is a string", (done) ->
checkEditCommentWhenOwner(String(commentorUserId), commentId, updatedMessage, done)
it "update dateCreation and dateUpdated", (done) ->
clock = sinon.useFakeTimers(new Date(2012, 0, 1, 1, 1, 36).getTime())
thingy.editComment commentorUserId, commentId, updatedMessage, (err, updatedThingy) ->
assert.notDeepEqual(new Date(), updatedThingy.getComment(commentId).dateCreation)
assert.deepEqual(new Date(), updatedThingy.getComment(commentId).dateUpdate)
clock.restore()
done()
describe "document.removeComment(userId, commentId, callback)", ->
level1Msg = 'level1 message'
commentIds = {}
beforeEach (done) ->
thingy.comments = [
message: level1Msg
creator: commentorUserId
,
message: 'level1 second message'
creator: commentorUserId
]
commentIds['level 1'] = thingy.comments[0]._id
thingy.save done
it "fails if comment doesn't exist", (done) ->
thingy.removeComment commentorUserId, 'n0t3x1t1n9', (err, updatedThingy) ->
should.exists(err)
done()
describe 'when user is not the creator', ->
it "it's not removing the comment", (done) ->
thingy.removeComment 'n0t3x1t1n9', commentIds['level 1'], (err, updatedThingy) ->
should.exists(err)
should.exists(updatedThingy)
should.exists(updatedThingy.getComment(commentIds['level 1']))
done()
describe 'when user is the creator', ->
it "can remove comment", (done) ->
thingy.removeComment commentorUserId, commentIds['level 1'], (err, updatedThingy) ->
should.exists(updatedThingy)
should.not.exists(updatedThingy.getComment(commentIds['level 1']))
done()
it "remove comment when userId param is a string", (done) ->
thingy.removeComment String(commentorUserId), commentIds['level 1'], (err, updatedThingy) ->
should.exists(updatedThingy)
should.not.exists(updatedThingy.getComment(commentIds['level 1']))
done()
it "remove comment when commentId is a string", (done) ->
thingy.removeComment commentorUserId, String(commentIds['level 1']), (err, updatedThingy) ->
should.exists(updatedThingy)
should.not.exists(updatedThingy.getComment(commentIds['level 1']))
done()
describe "document.addLike(userId, callback)", ->
it "add one user like if user doesn't already liked", (done) ->
thingy.addLike commentorUserId, (err, updatedThingy) ->
assert.equal(1, updatedThingy.likes.length)
assert.equal(1, updatedThingy.likesCount)
done()
it "not add an other user like if user already liked", (done) ->
thingy.addLike commentorUserId, (err, updatedThingy) ->
assert.equal(1, updatedThingy.likes.length)
assert.equal(1, updatedThingy.likesCount)
updatedThingy.addLike commentorUserId, (err, updatedThingy) ->
should.exist(err)
assert.equal(1, updatedThingy.likes.length)
assert.equal(1, updatedThingy.likesCount)
done()
it "not add an other user like if user already liked and userId param is a string", (done) ->
thingy.addLike commentorUserId, (err, updatedThingy) ->
thingy.addLike String(commentorUserId), (err, updatedThingy) ->
assert.equal(1, updatedThingy.likes.length)
assert.equal(1, updatedThingy.likesCount)
done()
describe "document.removeLike(userId, callback)", ->
userOneId = new ObjectId()
userTwoId = new ObjectId()
beforeEach (done) ->
async.series [(callback) ->
thingy.addLike commentorUserId, (err, updatedThingy) ->
thingy = updatedThingy
callback()
, (callback) ->
thingy.addLike userOneId, (err, updatedThingy) ->
thingy = updatedThingy
callback()
], done
it "not affect current likes list if user didn'nt already liked", (done) ->
thingy.removeLike userTwoId, (err, updatedThingy) ->
should.exists(err)
assert.equal(2, updatedThingy.likes.length)
assert.equal(2, updatedThingy.likesCount)
done()
it "remove user like from likes list if user already liked", (done) ->
thingy.removeLike commentorUserId, (err, updatedThingy) ->
assert.equal(1, updatedThingy.likes.length)
assert.equal(1, updatedThingy.likesCount)
done()
it "remove user like from likes list if user already liked when userId param is a string", (done) ->
thingy.removeLike String(commentorUserId), (err, updatedThingy) ->
assert.equal(1, updatedThingy.likes.length)
assert.equal(1, updatedThingy.likesCount)
done()
it "remove likesCount keep 0 when no there is no more likes", (done) ->
thingy.removeLike String(commentorUserId), (err, updatedThingy) ->
thingy.removeLike String(userOneId), (err, updatedThingy) ->
thingy.removeLike String(userOneId), (err, updatedThingy) ->
assert.equal(0, updatedThingy.likes.length)
assert.equal(0, updatedThingy.likesCount)
done()
describe "document.addLikeToComment(userId, commentId, callback)", ->
level1Msg = 'level1 message'
commentId = ''
beforeEach (done) ->
thingy.comments = [
message: 'level1 message'
creator: commentorUserId
]
commentId = thingy.comments[0]._id
thingy.save done
it "fails if comment doesn't exist", (done) ->
thingy.addLikeToComment commentorUserId, 'n0t3x1t1n9', (err, updatedThingy) ->
should.exists(err)
done()
it "add one user like if user doesn't already liked and comment exists", (done) ->
thingy.addLikeToComment commentorUserId, commentId, (err, updatedThingy) ->
assert.equal 1, updatedThingy.getComment(commentId).likes.length
assert.equal 1, updatedThingy.getComment(commentId).likesCount
done()
it "not add an other user like if user already liked and comment exists", (done) ->
thingy.addLikeToComment commentorUserId, commentId, (err, updatedThingy) ->
thingy.addLikeToComment commentorUserId, commentId, (err, updatedThingy) ->
should.exists(err)
assert.equal 1, updatedThingy.getComment(commentId).likes.length
assert.equal 1, updatedThingy.getComment(commentId).likesCount
done()
it "not add an other user like if user already liked and comment exists when userId param is a string", (done) ->
thingy.addLikeToComment String(commentorUserId), commentId, (err, updatedThingy) ->
thingy.addLikeToComment commentorUserId, commentId, (err, updatedThingy) ->
should.exists(err)
assert.equal 1, updatedThingy.getComment(commentId).likes.length
assert.equal 1, updatedThingy.getComment(commentId).likesCount
done()
describe "document.removeLikeFromComment(userId, commentId, callback)", ->
level1Msg = 'level1 message'
commentId = ''
beforeEach (done) ->
thingy.comments = [
message: 'level1 message'
creator: commentorUserId
likes: [commentorUserId, new ObjectId()]
]
commentId = thingy.comments[0]._id
thingy.save done
it "fails if comment doesn't exist", (done) ->
thingy.removeLikeFromComment commentorUserId, 'n0t3x1t1n9', (err, updatedThingy) ->
should.exists(err)
done()
it "not affect current likes list if user didn'nt already liked", (done) ->
thingy.removeLikeFromComment new ObjectId(), commentId, (err, updatedThingy) ->
should.exists(err)
assert.equal 2, updatedThingy.getComment(commentId).likes.length
assert.equal 2, updatedThingy.getComment(commentId).likesCount
done()
it "remove user like from likes list if user already liked", (done) ->
thingy.removeLikeFromComment commentorUserId, commentId, (err, updatedThingy) ->
assert.equal 1, updatedThingy.getComment(commentId).likes.length
assert.equal 1, updatedThingy.getComment(commentId).likesCount
done()
it "remove user like from likes list if user already liked when userId param is a string", (done) ->
thingy.removeLikeFromComment String(commentorUserId), commentId, (err, updatedThingy) ->
assert.equal 1, updatedThingy.getComment(commentId).likes.length
assert.equal 1, updatedThingy.getComment(commentId).likesCount
done()
describe "Plugin statics", ->
describe "document.getList", ->
creator1Id = new ObjectId()
creator2Id = new ObjectId()
beforeEach (done) ->
rattles = [
creator: creator1Id
likes: [new ObjectId(), new ObjectId()]
comments: [
message: '11'
creator: new ObjectId()
,
message: '12'
creator: new ObjectId()
]
,
creator: creator2Id
likes: [new ObjectId(), new ObjectId()]
comments: [
message: '21'
creator: new ObjectId()
,
message: '22'
creator: new ObjectId()
]
]
async.eachSeries rattles, (save = (rattleData, next) ->
new Thingy(rattleData).save next
), done
describe "(num, maxNumLastPostComments, callback)", ->
it "get list of the number of 'num' last rattles and return likesCount instead of likes array", (done) ->
Thingy.find {}, (err, rattles) ->
Thingy.getList 1, 0, (err, rattles) ->
should.not.exists(err)
assert.equal rattles.length, 1
assert.deepEqual rattles[0].creator, creator2Id
assert !rattles[0].likes
assert.equal rattles[0].likesCount, 2
done()
it "get all rattles if 'num' is greater than the number of rattles", (done) ->
Thingy.getList 3, 0, (err, rattles) ->
should.not.exists(err)
assert.equal rattles.length, 2
done()
it "each rattle get the maximum of 'maxLastComments' last comments", (done) ->
Thingy.getList 1, 1, (err, rattles) ->
should.not.exists(err)
assert.equal rattles.length, 1
assert.deepEqual rattles[0].creator, creator2Id
should.exists(rattles[0].comments)
assert.equal rattles[0].comments.length, 1
assert.equal rattles[0].comments[0].message, '22'
done()
it "each all comments when 'maxLastComments' is greater than number of comments", (done) ->
Thingy.getList 1, 3, (err, rattles) ->
should.not.exists(err)
assert.equal rattles.length, 1
should.exists(rattles[0].comments)
assert.equal rattles[0].comments.length, 2
done()
describe "(num, maxNumLastPostComments, options, callback)", ->
describe "from a creation date", ->
it "get list of last rattles created from the 'fromDate'", (done) ->
# retrieve last rattle
Thingy.getList 1, 0, (err, rattles) ->
Thingy.getList 1, 0, fromCreationDate: rattles[0].dateCreation, (err, rattles) ->
should.not.exists(err)
assert.equal rattles.length, 1
assert.deepEqual rattles[0].creator, creator1Id
done()
it "get all last rattles if 'num' is greater than the number of last rattles", (done) ->
# retrieve last rattle
Thingy.getList 1, 0, (err, rattles) ->
Thingy.getList 2, 0, fromCreationDate: rattles[0].dateCreation, (err, rattles) ->
should.not.exists(err)
assert.equal rattles.length, 1
done()
it "each rattle get the maximum of 'maxLastComments' last comments", (done) ->
# retrieve last rattle
Thingy.getList 1, 0, (err, rattles) ->
Thingy.getList 1, 1, fromCreationDate: rattles[0].dateCreation, (err, rattles) ->
should.not.exists(err)
assert.equal rattles.length, 1
assert.deepEqual rattles[0].creator, creator1Id
should.exists(rattles[0].comments)
assert.equal rattles[0].comments.length, 1
assert.equal rattles[0].comments[0].message, '12'
done()
describe "populating", ->
it "build", (done) ->
new User({_id: creator2Id, name: 'PI:NAME:<NAME>END_PI'}).save (err) ->
# retrieve last rattle
Thingy.getList 1, 0, {populate: 'creator'}, (err, rattles) ->
should.not.exists(err)
assert.equal rattles.length, 1
should.exists(rattles[0].creator.name)
assert.equal rattles[0].creator.name, 'PI:NAME:<NAME>END_PI'
done()
describe "document.getListOfCommentsById(rattleId, num, offsetFromEnd, callback)", ->
creatorId = new ObjectId()
rattleId = null
beforeEach (done) ->
async.waterfall [
saveThingy = (next) ->
new Thingy(
creator: creatorId
).save (err, data) ->
next(err) if err
rattleId = data._id
next(null, data)
pushComments = (thingy, next) ->
comments = [
message: '11'
creator: new ObjectId()
,
message: '12'
creator: new ObjectId()
,
message: '13'
creator: new ObjectId()
]
async.eachSeries comments, (push = (comment, next) ->
thingy.addComment comment.creator, comment.message, next
), next
], done
it "get last 'num' of comments for 'rattleId' when offsetFromEnd is 0", (done) ->
Thingy.getListOfCommentsById rattleId, 1, 0, (err, comments) ->
should.not.exists(err)
assert.equal comments.length, 1
assert.equal comments[0].message, '13'
done()
it "get last num of comments from the offsetFromEnd", (done) ->
Thingy.getListOfCommentsById rattleId, 1, 1, (err, comments) ->
should.not.exists(err)
assert.equal comments.length, 1
assert.equal comments[0].message, '12'
done()
it "get no comments when offsetFromEnd is equal to the number of comments", (done) ->
Thingy.getListOfCommentsById rattleId, 1, 3, (err, comments) ->
should.not.exists(err)
assert.equal comments.length, 0
done()
it "limit comments when offsetFromEnd + num is greater that the number of comments", (done) ->
Thingy.getListOfCommentsById rattleId, 3, 1, (err, comments) ->
should.not.exists(err)
assert.equal comments[0].message, '11'
assert.equal comments[1].message, '12'
assert.equal comments.length, 2
done()
it "keep comments order", (done) ->
Thingy.getListOfCommentsById rattleId, 3, 0, (err, comments) ->
should.not.exists(err)
assert.equal comments[0].message, '11'
assert.equal comments[1].message, '12'
assert.equal comments[2].message, '13'
assert.equal comments.length, 3
done()
|
[
{
"context": "s: { type: 'string' }\n }\n }\n data = { name: 'Bob', numbers: ['401-401-1337', '123-456-7890'], 'add",
"end": 378,
"score": 0.9997455477714539,
"start": 375,
"tag": "NAME",
"value": "Bob"
}
] | test/keyboard/tab-key.coffee | lgr7/codecombattreema | 66 | # almost exactly the same as enter, except it keeps focus on a row that is invalid
describe 'Tab key press', ->
tabKeyPress = ($el) -> keyDown($el, 9)
schema = {
type: 'object',
properties: {
name: { type: 'string' }
numbers: { type: 'array', items: { type: 'string', minLength: 4 } }
address: { type: 'string' }
}
}
data = { name: 'Bob', numbers: ['401-401-1337', '123-456-7890'], 'address': 'Mars' }
treema = TreemaNode.make(null, {data: data, schema: schema})
treema.build()
nameTreema = treema.childrenTreemas.name
phoneTreema = treema.childrenTreemas.numbers
addressTreema = treema.childrenTreemas.address
afterEach ->
treema.endExistingEdits()
phoneTreema.close()
it 'edits the last selected row', ->
nameTreema.select()
tabKeyPress(treema.$el)
expect(nameTreema.isEditing()).toBeTruthy()
it 'saves the current row and goes on to the next value in the collection if there is one', ->
phoneTreema.open()
phoneTreema.childrenTreemas[0].edit()
phoneTreema.childrenTreemas[0].$el.find('input').val('4321')
tabKeyPress(phoneTreema.childrenTreemas[0].$el)
expect(phoneTreema.childrenTreemas[0].isDisplaying()).toBeTruthy()
expect(phoneTreema.childrenTreemas[1].isEditing()).toBeTruthy()
expect(treema.data.numbers[0]).toBe('4321')
it 'traverses into and out of open collections', ->
phoneTreema.open()
nameTreema.edit()
tabKeyPress(nameTreema.$el)
expect(phoneTreema.isSelected()).toBeTruthy()
tabKeyPress(treema.$el)
expect(phoneTreema.childrenTreemas[0].isEditing()).toBeTruthy()
tabKeyPress(phoneTreema.childrenTreemas[0].$el)
expect(phoneTreema.childrenTreemas[1].isEditing()).toBeTruthy()
it 'skips over closed collections', ->
nameTreema.edit()
tabKeyPress(nameTreema.$el)
expect(phoneTreema.isSelected()).toBeTruthy()
tabKeyPress(treema.$el)
expect(addressTreema.isEditing()).toBeTruthy()
it 'shows errors and stays put when saving an invalid row', ->
phoneTreema.open()
phoneTreema.childrenTreemas[0].edit()
phoneTreema.childrenTreemas[0].$el.find('input').val('1')
tabKeyPress(phoneTreema.childrenTreemas[0].$el)
expect(phoneTreema.childrenTreemas[1].isDisplaying()).toBeTruthy()
expect(phoneTreema.childrenTreemas[0].isEditing()).toBeTruthy()
expect(treema.data.numbers[0]).toBe('1')
expect(treema.isValid()).toBeFalsy()
it 'goes backwards if shift is pressed', ->
phoneTreema.open()
phoneTreema.childrenTreemas[1].edit()
event = jQuery.Event("keydown")
event.which = 9
event.shiftKey = true
phoneTreema.childrenTreemas[1].$el.trigger(event)
expect(phoneTreema.childrenTreemas[1].isDisplaying()).toBeTruthy()
expect(phoneTreema.childrenTreemas[0].isEditing()).toBeTruthy()
it 'edits the first child in a collection if a collection is selected', ->
phoneTreema.open()
phoneTreema.select()
tabKeyPress(phoneTreema.$el)
expect(phoneTreema.childrenTreemas[0].isEditing()).toBeTruthy()
xit 'goes around in a loop', ->
# this test doesn't work because focus on the add button doesn't work in a testing condition...
$('body').append(treema.$el)
addressTreema.select()
tabKeyPress(treema.$el) # editing address
tabKeyPress($(document.activeElement)) # selecting add button
expect(nameTreema.isEditing()).toBeTruthy()
treema.$el.remove()
| 134838 | # almost exactly the same as enter, except it keeps focus on a row that is invalid
describe 'Tab key press', ->
tabKeyPress = ($el) -> keyDown($el, 9)
schema = {
type: 'object',
properties: {
name: { type: 'string' }
numbers: { type: 'array', items: { type: 'string', minLength: 4 } }
address: { type: 'string' }
}
}
data = { name: '<NAME>', numbers: ['401-401-1337', '123-456-7890'], 'address': 'Mars' }
treema = TreemaNode.make(null, {data: data, schema: schema})
treema.build()
nameTreema = treema.childrenTreemas.name
phoneTreema = treema.childrenTreemas.numbers
addressTreema = treema.childrenTreemas.address
afterEach ->
treema.endExistingEdits()
phoneTreema.close()
it 'edits the last selected row', ->
nameTreema.select()
tabKeyPress(treema.$el)
expect(nameTreema.isEditing()).toBeTruthy()
it 'saves the current row and goes on to the next value in the collection if there is one', ->
phoneTreema.open()
phoneTreema.childrenTreemas[0].edit()
phoneTreema.childrenTreemas[0].$el.find('input').val('4321')
tabKeyPress(phoneTreema.childrenTreemas[0].$el)
expect(phoneTreema.childrenTreemas[0].isDisplaying()).toBeTruthy()
expect(phoneTreema.childrenTreemas[1].isEditing()).toBeTruthy()
expect(treema.data.numbers[0]).toBe('4321')
it 'traverses into and out of open collections', ->
phoneTreema.open()
nameTreema.edit()
tabKeyPress(nameTreema.$el)
expect(phoneTreema.isSelected()).toBeTruthy()
tabKeyPress(treema.$el)
expect(phoneTreema.childrenTreemas[0].isEditing()).toBeTruthy()
tabKeyPress(phoneTreema.childrenTreemas[0].$el)
expect(phoneTreema.childrenTreemas[1].isEditing()).toBeTruthy()
it 'skips over closed collections', ->
nameTreema.edit()
tabKeyPress(nameTreema.$el)
expect(phoneTreema.isSelected()).toBeTruthy()
tabKeyPress(treema.$el)
expect(addressTreema.isEditing()).toBeTruthy()
it 'shows errors and stays put when saving an invalid row', ->
phoneTreema.open()
phoneTreema.childrenTreemas[0].edit()
phoneTreema.childrenTreemas[0].$el.find('input').val('1')
tabKeyPress(phoneTreema.childrenTreemas[0].$el)
expect(phoneTreema.childrenTreemas[1].isDisplaying()).toBeTruthy()
expect(phoneTreema.childrenTreemas[0].isEditing()).toBeTruthy()
expect(treema.data.numbers[0]).toBe('1')
expect(treema.isValid()).toBeFalsy()
it 'goes backwards if shift is pressed', ->
phoneTreema.open()
phoneTreema.childrenTreemas[1].edit()
event = jQuery.Event("keydown")
event.which = 9
event.shiftKey = true
phoneTreema.childrenTreemas[1].$el.trigger(event)
expect(phoneTreema.childrenTreemas[1].isDisplaying()).toBeTruthy()
expect(phoneTreema.childrenTreemas[0].isEditing()).toBeTruthy()
it 'edits the first child in a collection if a collection is selected', ->
phoneTreema.open()
phoneTreema.select()
tabKeyPress(phoneTreema.$el)
expect(phoneTreema.childrenTreemas[0].isEditing()).toBeTruthy()
xit 'goes around in a loop', ->
# this test doesn't work because focus on the add button doesn't work in a testing condition...
$('body').append(treema.$el)
addressTreema.select()
tabKeyPress(treema.$el) # editing address
tabKeyPress($(document.activeElement)) # selecting add button
expect(nameTreema.isEditing()).toBeTruthy()
treema.$el.remove()
| true | # almost exactly the same as enter, except it keeps focus on a row that is invalid
describe 'Tab key press', ->
tabKeyPress = ($el) -> keyDown($el, 9)
schema = {
type: 'object',
properties: {
name: { type: 'string' }
numbers: { type: 'array', items: { type: 'string', minLength: 4 } }
address: { type: 'string' }
}
}
data = { name: 'PI:NAME:<NAME>END_PI', numbers: ['401-401-1337', '123-456-7890'], 'address': 'Mars' }
treema = TreemaNode.make(null, {data: data, schema: schema})
treema.build()
nameTreema = treema.childrenTreemas.name
phoneTreema = treema.childrenTreemas.numbers
addressTreema = treema.childrenTreemas.address
afterEach ->
treema.endExistingEdits()
phoneTreema.close()
it 'edits the last selected row', ->
nameTreema.select()
tabKeyPress(treema.$el)
expect(nameTreema.isEditing()).toBeTruthy()
it 'saves the current row and goes on to the next value in the collection if there is one', ->
phoneTreema.open()
phoneTreema.childrenTreemas[0].edit()
phoneTreema.childrenTreemas[0].$el.find('input').val('4321')
tabKeyPress(phoneTreema.childrenTreemas[0].$el)
expect(phoneTreema.childrenTreemas[0].isDisplaying()).toBeTruthy()
expect(phoneTreema.childrenTreemas[1].isEditing()).toBeTruthy()
expect(treema.data.numbers[0]).toBe('4321')
it 'traverses into and out of open collections', ->
phoneTreema.open()
nameTreema.edit()
tabKeyPress(nameTreema.$el)
expect(phoneTreema.isSelected()).toBeTruthy()
tabKeyPress(treema.$el)
expect(phoneTreema.childrenTreemas[0].isEditing()).toBeTruthy()
tabKeyPress(phoneTreema.childrenTreemas[0].$el)
expect(phoneTreema.childrenTreemas[1].isEditing()).toBeTruthy()
it 'skips over closed collections', ->
nameTreema.edit()
tabKeyPress(nameTreema.$el)
expect(phoneTreema.isSelected()).toBeTruthy()
tabKeyPress(treema.$el)
expect(addressTreema.isEditing()).toBeTruthy()
it 'shows errors and stays put when saving an invalid row', ->
phoneTreema.open()
phoneTreema.childrenTreemas[0].edit()
phoneTreema.childrenTreemas[0].$el.find('input').val('1')
tabKeyPress(phoneTreema.childrenTreemas[0].$el)
expect(phoneTreema.childrenTreemas[1].isDisplaying()).toBeTruthy()
expect(phoneTreema.childrenTreemas[0].isEditing()).toBeTruthy()
expect(treema.data.numbers[0]).toBe('1')
expect(treema.isValid()).toBeFalsy()
it 'goes backwards if shift is pressed', ->
phoneTreema.open()
phoneTreema.childrenTreemas[1].edit()
event = jQuery.Event("keydown")
event.which = 9
event.shiftKey = true
phoneTreema.childrenTreemas[1].$el.trigger(event)
expect(phoneTreema.childrenTreemas[1].isDisplaying()).toBeTruthy()
expect(phoneTreema.childrenTreemas[0].isEditing()).toBeTruthy()
it 'edits the first child in a collection if a collection is selected', ->
phoneTreema.open()
phoneTreema.select()
tabKeyPress(phoneTreema.$el)
expect(phoneTreema.childrenTreemas[0].isEditing()).toBeTruthy()
xit 'goes around in a loop', ->
# this test doesn't work because focus on the add button doesn't work in a testing condition...
$('body').append(treema.$el)
addressTreema.select()
tabKeyPress(treema.$el) # editing address
tabKeyPress($(document.activeElement)) # selecting add button
expect(nameTreema.isEditing()).toBeTruthy()
treema.$el.remove()
|
[
{
"context": "# Copyright (c) ppy Pty Ltd <contact@ppy.sh>. Licensed under the GNU Affero General Public Li",
"end": 43,
"score": 0.9999164342880249,
"start": 29,
"tag": "EMAIL",
"value": "contact@ppy.sh"
}
] | resources/assets/lib/store-paypal.coffee | osu-katakuna/osu-katakuna-web | 5 | # Copyright (c) ppy Pty Ltd <contact@ppy.sh>. Licensed under the GNU Affero General Public License v3.0.
# See the LICENCE file in the repository root for full licence text.
export class StorePaypal
@fetchApprovalLink: (orderId) ->
new Promise (resolve, reject) ->
$.post laroute.route('payments.paypal.create'), order_id: orderId
.done resolve
.fail (xhr) ->
reject(xhr: xhr)
| 94236 | # Copyright (c) ppy Pty Ltd <<EMAIL>>. Licensed under the GNU Affero General Public License v3.0.
# See the LICENCE file in the repository root for full licence text.
export class StorePaypal
@fetchApprovalLink: (orderId) ->
new Promise (resolve, reject) ->
$.post laroute.route('payments.paypal.create'), order_id: orderId
.done resolve
.fail (xhr) ->
reject(xhr: xhr)
| true | # Copyright (c) ppy Pty Ltd <PI:EMAIL:<EMAIL>END_PI>. Licensed under the GNU Affero General Public License v3.0.
# See the LICENCE file in the repository root for full licence text.
export class StorePaypal
@fetchApprovalLink: (orderId) ->
new Promise (resolve, reject) ->
$.post laroute.route('payments.paypal.create'), order_id: orderId
.done resolve
.fail (xhr) ->
reject(xhr: xhr)
|
[
{
"context": "@InitFiles = new FS.Collection(\"InitFiles\",\n\tstores: [\n\t\tnew",
"end": 10,
"score": 0.5587348937988281,
"start": 1,
"tag": "USERNAME",
"value": "InitFiles"
},
{
"context": "\temail: email\n\t\t\t\tusername: mobile\n\t\t\t\tpassword: '12345678'\n\t\t\t\ttype: 'teacher'\n\t\t\t\troles: ''\n\t\t\t\tprofile:\n\t",
"end": 1244,
"score": 0.9991213083267212,
"start": 1236,
"tag": "PASSWORD",
"value": "12345678"
},
{
"context": "udents\n\t\ttry\n\t\t\tAccounts.createUser(\n\t\t\t\tusername: id\n\t\t\t\tpassword: '12345678'\n\t\t\t\ttype: 'student'\n\t\t\t\t",
"end": 1996,
"score": 0.9569597840309143,
"start": 1994,
"tag": "USERNAME",
"value": "id"
},
{
"context": "ounts.createUser(\n\t\t\t\tusername: id\n\t\t\t\tpassword: '12345678'\n\t\t\t\ttype: 'student'\n\t\t\t\troles: ''\n\t\t\t\tprofile:\n\t",
"end": 2020,
"score": 0.9991621375083923,
"start": 2012,
"tag": "PASSWORD",
"value": "12345678"
},
{
"context": "e: 'student'\n\t\t\t\troles: ''\n\t\t\t\tprofile:\n\t\t\t\t\tname: name\n\t\t\t\t\tmobile: mobile\n\t\t\t\t\tgender: gender\n\t\t\t\t\tbirt",
"end": 2084,
"score": 0.9585438966751099,
"start": 2080,
"tag": "NAME",
"value": "name"
},
{
"context": "arents\n\t\ttry\n\t\t\tAccounts.createUser(\n\t\t\t\tusername: id\n\t\t\t\tpassword: '12345678'\n\t\t\t\ttype: 'parent'\n\t\t\t\tr",
"end": 2317,
"score": 0.9669724106788635,
"start": 2315,
"tag": "USERNAME",
"value": "id"
},
{
"context": "ounts.createUser(\n\t\t\t\tusername: id\n\t\t\t\tpassword: '12345678'\n\t\t\t\ttype: 'parent'\n\t\t\t\troles: ''\n\t\t\t\tprofile:\n\t\t",
"end": 2341,
"score": 0.9991694688796997,
"start": 2333,
"tag": "PASSWORD",
"value": "12345678"
},
{
"context": "pe: 'parent'\n\t\t\t\troles: ''\n\t\t\t\tprofile:\n\t\t\t\t\tname: name\n\t\t\t)\n\t\t\tconsole.log(id + \" parent created succe",
"end": 2404,
"score": 0.9577697515487671,
"start": 2400,
"tag": "NAME",
"value": "name"
}
] | collections/collection_fs/initFiles.coffee | cuijiemmx/alpha | 0 | @InitFiles = new FS.Collection("InitFiles",
stores: [
new FS.Store.GridFS 'files', transformWrite: (fileObj, readStream, writeStream) =>
# console.log readStream
content = ''
readStream.on 'data', (chunk) =>
content += chunk.toString()
readStream.on 'end', Meteor.bindEnvironment(() =>
console.log '[ImportUsers] Begin'
content = content.trim()
console.log content
if areTeachers content
importTeachers content
else
importStudentsAndParents content
console.log '[ImportUsers] End'
)
readStream.pipe writeStream
],
filter:
allow:
contentTypes: ['text/csv']
extensions: ['csv']
)
areTeachers = (content) =>
(content.indexOf '学号') == -1
importTeachers = (content) =>
console.log 'Importing teachers'
start = content.indexOf('\n') + 1
content = content.substr(start)
content.split('\n').forEach (line) =>
arr = line.split(',')
name = arr[0].trim()
email = arr[1].trim()
mobile = arr[2].trim()
gender = arr[3].trim()
if gender == '男'
gender = 'male'
else
gender = 'female'
birthday = arr[4].trim()
# console.log name, email, mobile, gender, birthday
try
Accounts.createUser(
email: email
username: mobile
password: '12345678'
type: 'teacher'
roles: ''
profile:
name: name
mobile: mobile
gender: gender
birthday: birthday
)
console.log(email + " created successfully.")
catch e
console.log(email + " " + e)
importStudentsAndParents = (content) =>
console.log 'Importing students and parents'
start = content.indexOf('\n') + 1
content = content.substr(start)
content.split('\n').forEach (line) =>
arr = line.split(',')
name = arr[0].trim()
id = arr[1].trim()
mobile = arr[2].trim()
gender = arr[3].trim()
if gender == '男'
gender = 'male'
else
gender = 'female'
birthday = arr[4].trim()
# console.log name, id, gender, birthday, mobile
# import students
try
Accounts.createUser(
username: id
password: '12345678'
type: 'student'
roles: ''
profile:
name: name
mobile: mobile
gender: gender
birthday: birthday
)
console.log(id + " student created successfully.")
catch e
console.log(id + " " + e)
# import parents
try
Accounts.createUser(
username: id
password: '12345678'
type: 'parent'
roles: ''
profile:
name: name
)
console.log(id + " parent created successfully.")
catch e
console.log(id + " " + e) | 159120 | @InitFiles = new FS.Collection("InitFiles",
stores: [
new FS.Store.GridFS 'files', transformWrite: (fileObj, readStream, writeStream) =>
# console.log readStream
content = ''
readStream.on 'data', (chunk) =>
content += chunk.toString()
readStream.on 'end', Meteor.bindEnvironment(() =>
console.log '[ImportUsers] Begin'
content = content.trim()
console.log content
if areTeachers content
importTeachers content
else
importStudentsAndParents content
console.log '[ImportUsers] End'
)
readStream.pipe writeStream
],
filter:
allow:
contentTypes: ['text/csv']
extensions: ['csv']
)
areTeachers = (content) =>
(content.indexOf '学号') == -1
importTeachers = (content) =>
console.log 'Importing teachers'
start = content.indexOf('\n') + 1
content = content.substr(start)
content.split('\n').forEach (line) =>
arr = line.split(',')
name = arr[0].trim()
email = arr[1].trim()
mobile = arr[2].trim()
gender = arr[3].trim()
if gender == '男'
gender = 'male'
else
gender = 'female'
birthday = arr[4].trim()
# console.log name, email, mobile, gender, birthday
try
Accounts.createUser(
email: email
username: mobile
password: '<PASSWORD>'
type: 'teacher'
roles: ''
profile:
name: name
mobile: mobile
gender: gender
birthday: birthday
)
console.log(email + " created successfully.")
catch e
console.log(email + " " + e)
importStudentsAndParents = (content) =>
console.log 'Importing students and parents'
start = content.indexOf('\n') + 1
content = content.substr(start)
content.split('\n').forEach (line) =>
arr = line.split(',')
name = arr[0].trim()
id = arr[1].trim()
mobile = arr[2].trim()
gender = arr[3].trim()
if gender == '男'
gender = 'male'
else
gender = 'female'
birthday = arr[4].trim()
# console.log name, id, gender, birthday, mobile
# import students
try
Accounts.createUser(
username: id
password: '<PASSWORD>'
type: 'student'
roles: ''
profile:
name: <NAME>
mobile: mobile
gender: gender
birthday: birthday
)
console.log(id + " student created successfully.")
catch e
console.log(id + " " + e)
# import parents
try
Accounts.createUser(
username: id
password: '<PASSWORD>'
type: 'parent'
roles: ''
profile:
name: <NAME>
)
console.log(id + " parent created successfully.")
catch e
console.log(id + " " + e) | true | @InitFiles = new FS.Collection("InitFiles",
stores: [
new FS.Store.GridFS 'files', transformWrite: (fileObj, readStream, writeStream) =>
# console.log readStream
content = ''
readStream.on 'data', (chunk) =>
content += chunk.toString()
readStream.on 'end', Meteor.bindEnvironment(() =>
console.log '[ImportUsers] Begin'
content = content.trim()
console.log content
if areTeachers content
importTeachers content
else
importStudentsAndParents content
console.log '[ImportUsers] End'
)
readStream.pipe writeStream
],
filter:
allow:
contentTypes: ['text/csv']
extensions: ['csv']
)
areTeachers = (content) =>
(content.indexOf '学号') == -1
importTeachers = (content) =>
console.log 'Importing teachers'
start = content.indexOf('\n') + 1
content = content.substr(start)
content.split('\n').forEach (line) =>
arr = line.split(',')
name = arr[0].trim()
email = arr[1].trim()
mobile = arr[2].trim()
gender = arr[3].trim()
if gender == '男'
gender = 'male'
else
gender = 'female'
birthday = arr[4].trim()
# console.log name, email, mobile, gender, birthday
try
Accounts.createUser(
email: email
username: mobile
password: 'PI:PASSWORD:<PASSWORD>END_PI'
type: 'teacher'
roles: ''
profile:
name: name
mobile: mobile
gender: gender
birthday: birthday
)
console.log(email + " created successfully.")
catch e
console.log(email + " " + e)
importStudentsAndParents = (content) =>
console.log 'Importing students and parents'
start = content.indexOf('\n') + 1
content = content.substr(start)
content.split('\n').forEach (line) =>
arr = line.split(',')
name = arr[0].trim()
id = arr[1].trim()
mobile = arr[2].trim()
gender = arr[3].trim()
if gender == '男'
gender = 'male'
else
gender = 'female'
birthday = arr[4].trim()
# console.log name, id, gender, birthday, mobile
# import students
try
Accounts.createUser(
username: id
password: 'PI:PASSWORD:<PASSWORD>END_PI'
type: 'student'
roles: ''
profile:
name: PI:NAME:<NAME>END_PI
mobile: mobile
gender: gender
birthday: birthday
)
console.log(id + " student created successfully.")
catch e
console.log(id + " " + e)
# import parents
try
Accounts.createUser(
username: id
password: 'PI:PASSWORD:<PASSWORD>END_PI'
type: 'parent'
roles: ''
profile:
name: PI:NAME:<NAME>END_PI
)
console.log(id + " parent created successfully.")
catch e
console.log(id + " " + e) |
[
{
"context": " JSLocation - location coordinate class\n# Coded by kouichi.sakazaki 2013.10.03\n#*************************************",
"end": 111,
"score": 0.9945704340934753,
"start": 95,
"tag": "NAME",
"value": "kouichi.sakazaki"
}
] | JSKit/frameworks/JSCoreLocation.framework/JSLocation.coffee | digitarhythm/codeJS | 0 | #*****************************************
# JSLocation - location coordinate class
# Coded by kouichi.sakazaki 2013.10.03
#*****************************************
class JSLocation extends JSObject
constructor:(@_latitude = 0.0, @_longitude = 0.0)->
super()
| 192573 | #*****************************************
# JSLocation - location coordinate class
# Coded by <NAME> 2013.10.03
#*****************************************
class JSLocation extends JSObject
constructor:(@_latitude = 0.0, @_longitude = 0.0)->
super()
| true | #*****************************************
# JSLocation - location coordinate class
# Coded by PI:NAME:<NAME>END_PI 2013.10.03
#*****************************************
class JSLocation extends JSObject
constructor:(@_latitude = 0.0, @_longitude = 0.0)->
super()
|
[
{
"context": "/z5AZK3t4zjOpJQ6BPECfiKAcqRUzkFmASQEhHzJOUgQ8BWyviwFsL4sBnC+LAE84YMWQnSAVCixdkvMAiB6Q7TCfJtrLq4PHkmSnHHbi0LHvOYa6w/g3kitjSgOYFyUUoWvlCPA9C1gvQfgDmiHNLZBgO8A3geZt+G6chQBA7hi/0QVQBrZ9EwQ0LbtbhgGghQAVFPAB25HmRH8b2/nAAAAAElFTkSuQmCC'\n\n initialize: (attrs, options) ->\n super",
"end": 5313,
"score": 0.9976466298103333,
"start": 5135,
"tag": "KEY",
"value": "wFsL4sBnC+LAE84YMWQnSAVCixdkvMAiB6Q7TCfJtrLq4PHkmSnHHbi0LHvOYa6w/g3kitjSgOYFyUUoWvlCPA9C1gvQfgDmiHNLZBgO8A3geZt+G6chQBA7hi/0QVQBrZ9EwQ0LbtbhgGghQAVFPAB25HmRH8b2/nAAAAAElFTkSuQmCC"
}
] | bokehjs/src/coffee/tool/inspectors/hover_tool.coffee | tswicegood/bokeh | 0 |
define [
"underscore"
"sprintf"
"common/collection"
"renderer/annotation/tooltip"
"./inspect_tool"
], (_, sprintf, Collection, Tooltip, InspectTool) ->
_color_to_hex = (color) ->
if (color.substr(0, 1) == '#')
return color
digits = /(.*?)rgb\((\d+), (\d+), (\d+)\)/.exec(color)
red = parseInt(digits[2])
green = parseInt(digits[3])
blue = parseInt(digits[4])
rgb = blue | (green << 8) | (red << 16)
return digits[1] + '#' + rgb.toString(16)
_format_number = (number) ->
# will get strings for categorical types, just pass back
if typeof(number) == "string"
return number
if Math.floor(number) == number
return sprintf("%d", number)
if Math.abs(number) > 0.1 and Math.abs(number) < 1000
return sprintf("%0.3f", number)
return sprintf("%0.3e", number)
class HoverToolView extends InspectTool.View
bind_bokeh_events: () ->
for r in @mget('renderers')
@listenTo(r.get('data_source'), 'inspect', @_update)
@plot_view.canvas_view.canvas_wrapper.css('cursor', 'crosshair')
_move: (e) ->
if not @mget('active')
return
canvas = @plot_view.canvas
vx = canvas.sx_to_vx(e.bokeh.sx)
vy = canvas.sy_to_vy(e.bokeh.sy)
if not @plot_view.frame.contains(vx, vy)
@mget('tooltip').clear()
return
@_inspect(vx, vy)
_move_exit: ()->
@mget('tooltip').clear()
_inspect: (vx, vy, e) ->
geometry = {
type: 'point'
vx: vx
vy: vy
}
for r in @mget('renderers')
sm = r.get('data_source').get('selection_manager')
sm.inspect(@, @plot_view.renderers[r.id], geometry, {"geometry": geometry})
_update: (indices, tool, renderer, ds, {geometry}) ->
@mget('tooltip').clear()
if indices.length == 0
return
vx = geometry.vx
vy = geometry.vy
canvas = @plot_model.get('canvas')
frame = @plot_model.get('frame')
sx = canvas.vx_to_sx(vx)
sy = canvas.vy_to_sy(vy)
xmapper = frame.get('x_mappers')[renderer.mget('x_range_name')]
ymapper = frame.get('y_mappers')[renderer.mget('y_range_name')]
x = xmapper.map_from_target(vx)
y = ymapper.map_from_target(vy)
for i in indices
if @mget('snap_to_marker')
rx = canvas.sx_to_vx(renderer.sx[i])
ry = canvas.sy_to_vy(renderer.sy[i])
else
[rx, ry] = [vx, vy]
table = $('<table></table>')
for label, value of @mget("tooltips")
row = $("<tr></tr>")
row.append($("<td class='bk-tooltip-row-label'>#{ label }: </td>"))
td = $("<td class='bk-tooltip-row-value'></td>")
if value.indexOf("$color") >= 0
[match, opts, colname] = value.match(/\$color(\[.*\])?:(\w*)/)
column = ds.get_column(colname)
if not column?
span = $("<span>#{ colname } unknown</span>")
td.append(span)
continue
hex = opts?.indexOf("hex") >= 0
swatch = opts?.indexOf("swatch") >= 0
color = column[i]
if not color?
span = $("<span>(null)</span>")
td.append(span)
continue
if hex
color = _color_to_hex(color)
span = $("<span>#{ color }</span>")
td.append(span)
if swatch
span = $("<span class='bk-tooltip-color-block'> </span>")
span.css({ backgroundColor: color})
td.append(span)
else
value = value.replace("$index", "#{ i }")
value = value.replace("$x", "#{ _format_number(x) }")
value = value.replace("$y", "#{ _format_number(y) }")
value = value.replace("$vx", "#{ vx }")
value = value.replace("$vy", "#{ vy }")
value = value.replace("$sx", "#{ sx }")
value = value.replace("$sy", "#{ sy }")
while value.indexOf("@") >= 0
[match, unused, column_name] = value.match(/(@)(\w*)/)
column = ds.get_column(column_name)
if not column?
value = value.replace(column_name, "#{ column_name } unknown")
break
column = ds.get_column(column_name)
dsvalue = column[i]
if typeof(dsvalue) == "number"
value = value.replace(match, "#{ _format_number(dsvalue) }")
else
value = value.replace(match, "#{ dsvalue }")
span = $("<span>#{ value }</span>")
td.append(span)
row.append(td)
table.append(row)
@mget('tooltip').add(rx, ry, table)
return null
class HoverTool extends InspectTool.Model
default_view: HoverToolView
type: "HoverTool"
tool_name: "Hover Tool"
icon = 'data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAAA8ElEQVQ4T42T0Q2CMBCGaQjPxgmMG/jelIQN3ECZQEfADRwBJzBuQCC81wlkBHxvqP8lmhTsUfpSWvp/vfvvKiJn1HVdpml6dPdC38I90DSNxVobYzKMPiSm/z5AZK3t4zjOpJQ6BPECfiKAcqRUzkFmASQEhHzJOUgQ8BWyviwFsL4sBnC+LAE84YMWQnSAVCixdkvMAiB6Q7TCfJtrLq4PHkmSnHHbi0LHvOYa6w/g3kitjSgOYFyUUoWvlCPA9C1gvQfgDmiHNLZBgO8A3geZt+G6chQBA7hi/0QVQBrZ9EwQ0LbtbhgGghQAVFPAB25HmRH8b2/nAAAAAElFTkSuQmCC'
initialize: (attrs, options) ->
super(attrs, options)
@set('tooltip', new Tooltip.Model())
renderers = @get('plot').get('renderers')
renderers.push(@get('tooltip'))
@get('plot').set('renderers', renderers)
defaults: () ->
return _.extend({}, super(), {
snap_to_marker: true
tooltips: {
"index": "$index"
"data (x, y)": "($x, $y)"
"canvas (x, y)": "($sx, $sy)"
}
})
class HoverTools extends Collection
model: HoverTool
return {
"Model": HoverTool,
"Collection": new HoverTools(),
"View": HoverToolView,
}
| 184695 |
define [
"underscore"
"sprintf"
"common/collection"
"renderer/annotation/tooltip"
"./inspect_tool"
], (_, sprintf, Collection, Tooltip, InspectTool) ->
_color_to_hex = (color) ->
if (color.substr(0, 1) == '#')
return color
digits = /(.*?)rgb\((\d+), (\d+), (\d+)\)/.exec(color)
red = parseInt(digits[2])
green = parseInt(digits[3])
blue = parseInt(digits[4])
rgb = blue | (green << 8) | (red << 16)
return digits[1] + '#' + rgb.toString(16)
_format_number = (number) ->
# will get strings for categorical types, just pass back
if typeof(number) == "string"
return number
if Math.floor(number) == number
return sprintf("%d", number)
if Math.abs(number) > 0.1 and Math.abs(number) < 1000
return sprintf("%0.3f", number)
return sprintf("%0.3e", number)
class HoverToolView extends InspectTool.View
bind_bokeh_events: () ->
for r in @mget('renderers')
@listenTo(r.get('data_source'), 'inspect', @_update)
@plot_view.canvas_view.canvas_wrapper.css('cursor', 'crosshair')
_move: (e) ->
if not @mget('active')
return
canvas = @plot_view.canvas
vx = canvas.sx_to_vx(e.bokeh.sx)
vy = canvas.sy_to_vy(e.bokeh.sy)
if not @plot_view.frame.contains(vx, vy)
@mget('tooltip').clear()
return
@_inspect(vx, vy)
_move_exit: ()->
@mget('tooltip').clear()
_inspect: (vx, vy, e) ->
geometry = {
type: 'point'
vx: vx
vy: vy
}
for r in @mget('renderers')
sm = r.get('data_source').get('selection_manager')
sm.inspect(@, @plot_view.renderers[r.id], geometry, {"geometry": geometry})
_update: (indices, tool, renderer, ds, {geometry}) ->
@mget('tooltip').clear()
if indices.length == 0
return
vx = geometry.vx
vy = geometry.vy
canvas = @plot_model.get('canvas')
frame = @plot_model.get('frame')
sx = canvas.vx_to_sx(vx)
sy = canvas.vy_to_sy(vy)
xmapper = frame.get('x_mappers')[renderer.mget('x_range_name')]
ymapper = frame.get('y_mappers')[renderer.mget('y_range_name')]
x = xmapper.map_from_target(vx)
y = ymapper.map_from_target(vy)
for i in indices
if @mget('snap_to_marker')
rx = canvas.sx_to_vx(renderer.sx[i])
ry = canvas.sy_to_vy(renderer.sy[i])
else
[rx, ry] = [vx, vy]
table = $('<table></table>')
for label, value of @mget("tooltips")
row = $("<tr></tr>")
row.append($("<td class='bk-tooltip-row-label'>#{ label }: </td>"))
td = $("<td class='bk-tooltip-row-value'></td>")
if value.indexOf("$color") >= 0
[match, opts, colname] = value.match(/\$color(\[.*\])?:(\w*)/)
column = ds.get_column(colname)
if not column?
span = $("<span>#{ colname } unknown</span>")
td.append(span)
continue
hex = opts?.indexOf("hex") >= 0
swatch = opts?.indexOf("swatch") >= 0
color = column[i]
if not color?
span = $("<span>(null)</span>")
td.append(span)
continue
if hex
color = _color_to_hex(color)
span = $("<span>#{ color }</span>")
td.append(span)
if swatch
span = $("<span class='bk-tooltip-color-block'> </span>")
span.css({ backgroundColor: color})
td.append(span)
else
value = value.replace("$index", "#{ i }")
value = value.replace("$x", "#{ _format_number(x) }")
value = value.replace("$y", "#{ _format_number(y) }")
value = value.replace("$vx", "#{ vx }")
value = value.replace("$vy", "#{ vy }")
value = value.replace("$sx", "#{ sx }")
value = value.replace("$sy", "#{ sy }")
while value.indexOf("@") >= 0
[match, unused, column_name] = value.match(/(@)(\w*)/)
column = ds.get_column(column_name)
if not column?
value = value.replace(column_name, "#{ column_name } unknown")
break
column = ds.get_column(column_name)
dsvalue = column[i]
if typeof(dsvalue) == "number"
value = value.replace(match, "#{ _format_number(dsvalue) }")
else
value = value.replace(match, "#{ dsvalue }")
span = $("<span>#{ value }</span>")
td.append(span)
row.append(td)
table.append(row)
@mget('tooltip').add(rx, ry, table)
return null
class HoverTool extends InspectTool.Model
default_view: HoverToolView
type: "HoverTool"
tool_name: "Hover Tool"
icon = 'data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAAA8ElEQVQ4T42T0Q2CMBCGaQjPxgmMG/jelIQN3ECZQEfADRwBJzBuQCC81wlkBHxvqP8lmhTsUfpSWvp/vfvvKiJn1HVdpml6dPdC38I90DSNxVobYzKMPiSm/z5AZK3t4zjOpJQ6BPECfiKAcqRUzkFmASQEhHzJOUgQ8BWyvi<KEY>'
initialize: (attrs, options) ->
super(attrs, options)
@set('tooltip', new Tooltip.Model())
renderers = @get('plot').get('renderers')
renderers.push(@get('tooltip'))
@get('plot').set('renderers', renderers)
defaults: () ->
return _.extend({}, super(), {
snap_to_marker: true
tooltips: {
"index": "$index"
"data (x, y)": "($x, $y)"
"canvas (x, y)": "($sx, $sy)"
}
})
class HoverTools extends Collection
model: HoverTool
return {
"Model": HoverTool,
"Collection": new HoverTools(),
"View": HoverToolView,
}
| true |
define [
"underscore"
"sprintf"
"common/collection"
"renderer/annotation/tooltip"
"./inspect_tool"
], (_, sprintf, Collection, Tooltip, InspectTool) ->
_color_to_hex = (color) ->
if (color.substr(0, 1) == '#')
return color
digits = /(.*?)rgb\((\d+), (\d+), (\d+)\)/.exec(color)
red = parseInt(digits[2])
green = parseInt(digits[3])
blue = parseInt(digits[4])
rgb = blue | (green << 8) | (red << 16)
return digits[1] + '#' + rgb.toString(16)
_format_number = (number) ->
# will get strings for categorical types, just pass back
if typeof(number) == "string"
return number
if Math.floor(number) == number
return sprintf("%d", number)
if Math.abs(number) > 0.1 and Math.abs(number) < 1000
return sprintf("%0.3f", number)
return sprintf("%0.3e", number)
class HoverToolView extends InspectTool.View
bind_bokeh_events: () ->
for r in @mget('renderers')
@listenTo(r.get('data_source'), 'inspect', @_update)
@plot_view.canvas_view.canvas_wrapper.css('cursor', 'crosshair')
_move: (e) ->
if not @mget('active')
return
canvas = @plot_view.canvas
vx = canvas.sx_to_vx(e.bokeh.sx)
vy = canvas.sy_to_vy(e.bokeh.sy)
if not @plot_view.frame.contains(vx, vy)
@mget('tooltip').clear()
return
@_inspect(vx, vy)
_move_exit: ()->
@mget('tooltip').clear()
_inspect: (vx, vy, e) ->
geometry = {
type: 'point'
vx: vx
vy: vy
}
for r in @mget('renderers')
sm = r.get('data_source').get('selection_manager')
sm.inspect(@, @plot_view.renderers[r.id], geometry, {"geometry": geometry})
_update: (indices, tool, renderer, ds, {geometry}) ->
@mget('tooltip').clear()
if indices.length == 0
return
vx = geometry.vx
vy = geometry.vy
canvas = @plot_model.get('canvas')
frame = @plot_model.get('frame')
sx = canvas.vx_to_sx(vx)
sy = canvas.vy_to_sy(vy)
xmapper = frame.get('x_mappers')[renderer.mget('x_range_name')]
ymapper = frame.get('y_mappers')[renderer.mget('y_range_name')]
x = xmapper.map_from_target(vx)
y = ymapper.map_from_target(vy)
for i in indices
if @mget('snap_to_marker')
rx = canvas.sx_to_vx(renderer.sx[i])
ry = canvas.sy_to_vy(renderer.sy[i])
else
[rx, ry] = [vx, vy]
table = $('<table></table>')
for label, value of @mget("tooltips")
row = $("<tr></tr>")
row.append($("<td class='bk-tooltip-row-label'>#{ label }: </td>"))
td = $("<td class='bk-tooltip-row-value'></td>")
if value.indexOf("$color") >= 0
[match, opts, colname] = value.match(/\$color(\[.*\])?:(\w*)/)
column = ds.get_column(colname)
if not column?
span = $("<span>#{ colname } unknown</span>")
td.append(span)
continue
hex = opts?.indexOf("hex") >= 0
swatch = opts?.indexOf("swatch") >= 0
color = column[i]
if not color?
span = $("<span>(null)</span>")
td.append(span)
continue
if hex
color = _color_to_hex(color)
span = $("<span>#{ color }</span>")
td.append(span)
if swatch
span = $("<span class='bk-tooltip-color-block'> </span>")
span.css({ backgroundColor: color})
td.append(span)
else
value = value.replace("$index", "#{ i }")
value = value.replace("$x", "#{ _format_number(x) }")
value = value.replace("$y", "#{ _format_number(y) }")
value = value.replace("$vx", "#{ vx }")
value = value.replace("$vy", "#{ vy }")
value = value.replace("$sx", "#{ sx }")
value = value.replace("$sy", "#{ sy }")
while value.indexOf("@") >= 0
[match, unused, column_name] = value.match(/(@)(\w*)/)
column = ds.get_column(column_name)
if not column?
value = value.replace(column_name, "#{ column_name } unknown")
break
column = ds.get_column(column_name)
dsvalue = column[i]
if typeof(dsvalue) == "number"
value = value.replace(match, "#{ _format_number(dsvalue) }")
else
value = value.replace(match, "#{ dsvalue }")
span = $("<span>#{ value }</span>")
td.append(span)
row.append(td)
table.append(row)
@mget('tooltip').add(rx, ry, table)
return null
class HoverTool extends InspectTool.Model
default_view: HoverToolView
type: "HoverTool"
tool_name: "Hover Tool"
icon = 'data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAAA8ElEQVQ4T42T0Q2CMBCGaQjPxgmMG/jelIQN3ECZQEfADRwBJzBuQCC81wlkBHxvqP8lmhTsUfpSWvp/vfvvKiJn1HVdpml6dPdC38I90DSNxVobYzKMPiSm/z5AZK3t4zjOpJQ6BPECfiKAcqRUzkFmASQEhHzJOUgQ8BWyviPI:KEY:<KEY>END_PI'
initialize: (attrs, options) ->
super(attrs, options)
@set('tooltip', new Tooltip.Model())
renderers = @get('plot').get('renderers')
renderers.push(@get('tooltip'))
@get('plot').set('renderers', renderers)
defaults: () ->
return _.extend({}, super(), {
snap_to_marker: true
tooltips: {
"index": "$index"
"data (x, y)": "($x, $y)"
"canvas (x, y)": "($sx, $sy)"
}
})
class HoverTools extends Collection
model: HoverTool
return {
"Model": HoverTool,
"Collection": new HoverTools(),
"View": HoverToolView,
}
|
[
{
"context": " @id = 3\n @association 'user', first_name: 'Kamil'\n\n Factory.define 'facebook', class: Facebook,",
"end": 695,
"score": 0.9995479583740234,
"start": 690,
"tag": "NAME",
"value": "Kamil"
}
] | test/association_test.coffee | JackDanger/factory-boy | 2 | require('./test_helper')
class User extends Object
@create: (attrs = {}, callback) ->
callback(null, new User(attrs))
class Profile extends Object
@create: (attrs = {}, callback) ->
callback(null, new Profile(attrs))
class Facebook extends Object
@create: (attrs = {}, callback) ->
callback(null, new Profile(attrs))
describe "Factory#association", ->
beforeEach ->
Factory.define 'admin', class: User, ->
@id = 1
Factory.define 'user', class: User, ->
@id = 2
@association 'external_facebook_id', 'facebook', factory: {field: 'uuid'}
Factory.define 'profile', class: Profile, ->
@id = 3
@association 'user', first_name: 'Kamil'
Factory.define 'facebook', class: Facebook, ->
@uuid = 10000001
Factory.define 'adminProfile', class: Profile, ->
@association 'user', factory: {name: 'admin'}
it 'should create association when building factory', ->
Factory.build 'profile', (err, profile) ->
profile.should.have.property('user_id', 2)
it 'should create association from given factory name', ->
Factory.build 'adminProfile', (err, adminProfile) ->
adminProfile.should.have.property('user_id', 1)
it 'should assign association to given field', ->
Factory.build 'user', (err, user) ->
user.should.have.property('external_facebook_id', 10000001)
it 'should create association when creating factory', ->
Factory.create 'profile', (err, profile) ->
profile.should.have.property('user_id', 2) | 183522 | require('./test_helper')
class User extends Object
@create: (attrs = {}, callback) ->
callback(null, new User(attrs))
class Profile extends Object
@create: (attrs = {}, callback) ->
callback(null, new Profile(attrs))
class Facebook extends Object
@create: (attrs = {}, callback) ->
callback(null, new Profile(attrs))
describe "Factory#association", ->
beforeEach ->
Factory.define 'admin', class: User, ->
@id = 1
Factory.define 'user', class: User, ->
@id = 2
@association 'external_facebook_id', 'facebook', factory: {field: 'uuid'}
Factory.define 'profile', class: Profile, ->
@id = 3
@association 'user', first_name: '<NAME>'
Factory.define 'facebook', class: Facebook, ->
@uuid = 10000001
Factory.define 'adminProfile', class: Profile, ->
@association 'user', factory: {name: 'admin'}
it 'should create association when building factory', ->
Factory.build 'profile', (err, profile) ->
profile.should.have.property('user_id', 2)
it 'should create association from given factory name', ->
Factory.build 'adminProfile', (err, adminProfile) ->
adminProfile.should.have.property('user_id', 1)
it 'should assign association to given field', ->
Factory.build 'user', (err, user) ->
user.should.have.property('external_facebook_id', 10000001)
it 'should create association when creating factory', ->
Factory.create 'profile', (err, profile) ->
profile.should.have.property('user_id', 2) | true | require('./test_helper')
class User extends Object
@create: (attrs = {}, callback) ->
callback(null, new User(attrs))
class Profile extends Object
@create: (attrs = {}, callback) ->
callback(null, new Profile(attrs))
class Facebook extends Object
@create: (attrs = {}, callback) ->
callback(null, new Profile(attrs))
describe "Factory#association", ->
beforeEach ->
Factory.define 'admin', class: User, ->
@id = 1
Factory.define 'user', class: User, ->
@id = 2
@association 'external_facebook_id', 'facebook', factory: {field: 'uuid'}
Factory.define 'profile', class: Profile, ->
@id = 3
@association 'user', first_name: 'PI:NAME:<NAME>END_PI'
Factory.define 'facebook', class: Facebook, ->
@uuid = 10000001
Factory.define 'adminProfile', class: Profile, ->
@association 'user', factory: {name: 'admin'}
it 'should create association when building factory', ->
Factory.build 'profile', (err, profile) ->
profile.should.have.property('user_id', 2)
it 'should create association from given factory name', ->
Factory.build 'adminProfile', (err, adminProfile) ->
adminProfile.should.have.property('user_id', 1)
it 'should assign association to given field', ->
Factory.build 'user', (err, user) ->
user.should.have.property('external_facebook_id', 10000001)
it 'should create association when creating factory', ->
Factory.create 'profile', (err, profile) ->
profile.should.have.property('user_id', 2) |
[
{
"context": " available commands in this script\n#\n# Author:\n# Guglielmo Iozzia (@virtualramblas)\n\nsdcInstance = \"\"\n\nmodule.expor",
"end": 681,
"score": 0.9998672008514404,
"start": 665,
"tag": "NAME",
"value": "Guglielmo Iozzia"
},
{
"context": "ds in this script\n#\n# Author:\n# Guglielmo Iozzia (@virtualramblas)\n\nsdcInstance = \"\"\n\nmodule.exports = (robot) ->\n\t",
"end": 698,
"score": 0.999293863773346,
"start": 682,
"tag": "USERNAME",
"value": "(@virtualramblas"
},
{
"context": "end \"Max memory: #{value.value}\"\n\t\t\t\t\t\tif key == \"jvm.memory.total.committed\"\n\t\t\t\t\t\t\tres.send \"Committed memory",
"end": 5158,
"score": 0.570932149887085,
"start": 5147,
"tag": "KEY",
"value": "jvm.memory."
},
{
"context": " #{value.value}\"\n\t\t\t\t\t\tif key == \"jvm.memory.total.committed\"\n\t\t\t\t\t\t\tres.send \"Committed memory: #{va",
"end": 5163,
"score": 0.5762554407119751,
"start": 5163,
"tag": "KEY",
"value": ""
},
{
"context": "ommitted memory: #{value.value}\"\n\t\t\t\t\t\tif key == \"jvm.memory.total.used\"\n\t\t\t\t\t\t\tres.send \"Memory used: #",
"end": 5247,
"score": 0.6190860867500305,
"start": 5243,
"tag": "KEY",
"value": "jvm."
},
{
"context": "emory: #{value.value}\"\n\t\t\t\t\t\tif key == \"jvm.memory.total.used\"\n\t\t\t\t\t\t\tres.send \"Memory used: #{value.",
"end": 5253,
"score": 0.576179563999176,
"start": 5253,
"tag": "KEY",
"value": ""
},
{
"context": " #{value.value}\"\n\t\t\t\t\t\tif key == \"jvm.memory.total.used\"\n\t\t\t\t\t\t\tres.send \"Memory used: #{value.value}",
"end": 5259,
"score": 0.561119019985199,
"start": 5259,
"tag": "KEY",
"value": ""
},
{
"context": "nd \"Memory used: #{value.value}\"\n\t\t\t\t\t\tif key == \"jvm.memory.heap.max\"\n\t\t\t\t\t\t\tres.send \"Max heap memory:#{value",
"end": 5340,
"score": 0.6797857880592346,
"start": 5329,
"tag": "KEY",
"value": "jvm.memory."
}
] | sdc-helper/sdc-helper.coffee | virtualramblas/sdc-hubot | 0 | # Description:
# Streamsets Data Collector monitoring script
#
# Dependencies:
# None
#
# Configuration:
# None
#
# Commands:
# hubot sdc check <sdc_url> - Checks if the given SDC server is alive
# hubot sdc pipeline <pipeline_name> status - Checks the current status of a given pipeline
# hubot sdc get uuid <pipeline_name> - Returns the uuid of a given pipeline
# hubot sdc pipeline counts <pipeline_uuid> - Returns the current record counts for a given pipeline
# hubot sdc pipeline jvm metrics <pipeline_uuid> - Returns the JVM metrics for a given pipeline
# hubot sdc help - Displays a list of all the available commands in this script
#
# Author:
# Guglielmo Iozzia (@virtualramblas)
sdcInstance = ""
module.exports = (robot) ->
#----------------------------------------
# Checks if the given SDC server is alive
#----------------------------------------
robot.hear /sdc check (.*)/i, (res) ->
sdcInstance = res.match[1]
sdcUrl = sdcInstance + "/rest/v1/system/info"
auth = "Basic " + new Buffer(process.env.SDC_USER + ':' + process.env.SDC_PASSWORD).toString('base64')
res.robot.http(sdcUrl)
.header('Authorization', auth, 'Accept', 'application/json')
.get() (err, resp, body) ->
data = null
try
data = JSON.parse body
res.send "#{sdcInstance} seems alive."
catch error
res.send "#{sdcInstance} returned: #{error}"
return
#----------------------------------------------
# Checks the current status of a given pipeline
#----------------------------------------------
robot.hear /sdc pipeline (.*) status/i, (res) ->
pipelineName = res.match[1]
sdcUrl = sdcInstance + "/rest/v1/pipelines?filterText=" + pipelineName + "&includeStatus=true"
auth = "Basic " + new Buffer(process.env.SDC_USER + ':' + process.env.SDC_PASSWORD).toString('base64')
res.robot.http(sdcUrl)
.header('Authorization', auth, 'Accept', 'application/json')
.get() (err, resp, body) ->
data = null
try
data = JSON.parse body
for item in data
for jsonItem in item
if jsonItem.status?
res.send "The current status of the #{pipelineName} pipeline is #{jsonItem.status}."
break
catch error
res.send "#{sdcInstance} returned: #{error}"
return
#-------------------------------------
# Returns the uuid of a given pipeline
#-------------------------------------
robot.hear /sdc get uuid (.*)/i, (res) ->
pipelineName = res.match[1]
getPipelineUuidUrl = sdcInstance + "/rest/v1/pipelines?filterText=" + pipelineName
auth = "Basic " + new Buffer(process.env.SDC_USER + ':' + process.env.SDC_PASSWORD).toString('base64')
res.robot.http(getPipelineUuidUrl)
.header('Authorization', auth, 'Accept', 'application/json')
.get() (err, resp, body) ->
data = null
try
data = JSON.parse body
for item in data
res.send "The #{pipelineName} pipeline uuid is #{item.name}"
catch error
res.send "#{sdcInstance} returned: #{error}"
return
#-------------------------------------------------------
# Returns the current record counts for a given pipeline
#-------------------------------------------------------
robot.hear /sdc pipeline counts (.*)/i, (res) ->
pipelineName = res.match[1]
sdcUrl = sdcInstance + "/rest/v1/pipeline/" + pipelineName + "/status"
auth = "Basic " + new Buffer(process.env.SDC_USER + ':' + process.env.SDC_PASSWORD).toString('base64')
res.robot.http(sdcUrl)
.header('Authorization', auth, 'Accept', 'application/json')
.get() (err, resp, body) ->
data = null
try
data = JSON.parse body
metrics = JSON.parse data.metrics
for key, value of metrics.counters
if key == "pipeline.batchCount.counter"
batchCountCounter = value.count
if key == "pipeline.batchInputRecords.counter"
batchInputRecordsCounter = value.count
if key == "pipeline.batchOutputRecords.counter"
batchOutputRecordsCounter = value.count
if key == "pipeline.batchErrorRecords.counter"
batchErrorRecordsCounter = value.count
res.send "Batch Count = #{batchCountCounter}. Input Records = #{batchInputRecordsCounter}. Output Records = #{batchOutputRecordsCounter}. Error Records: #{batchErrorRecordsCounter}"
catch error
res.send "#{sdcInstance} returned: #{error}"
return
#---------------------------------------------
# Returns the JVM metrics for a given pipeline
#---------------------------------------------
robot.hear /sdc pipeline jvm metrics (.*)/i, (res) ->
pipelineName = res.match[1]
sdcUrl = sdcInstance + "/rest/v1/pipeline/" + pipelineName + "/status"
auth = "Basic " + new Buffer(process.env.SDC_USER + ':' + process.env.SDC_PASSWORD).toString('base64')
res.robot.http(sdcUrl)
.header('Authorization', auth, 'Accept', 'application/json')
.get() (err, resp, body) ->
data = null
try
data = JSON.parse body
metrics = JSON.parse data.metrics
for key, value of metrics.gauges
#res.send "#{key} : #{value}"
if key == "jvm.memory.total.max"
res.send "Max memory: #{value.value}"
if key == "jvm.memory.total.committed"
res.send "Committed memory: #{value.value}"
if key == "jvm.memory.total.used"
res.send "Memory used: #{value.value}"
if key == "jvm.memory.heap.max"
res.send "Max heap memory:#{value.value}"
if key == "jvm.memory.heap.committed"
res.send "Committed heap memory:#{value.value}"
if key == "jvm.memory.heap.used"
res.send "Heap memory used: #{value.value}"
if key == "jvm.memory.non-heap.max"
res.send "Max non-heap memory: #{value.value}"
if key == "jvm.memory.non-heap.committed"
res.send "Committed non-heap memory: #{value.value}"
if key == "jvm.memory.non-heap.used"
res.send "Non-heap memory used: #{value.value}"
if key == "jvm.threads.count"
res.send "Threads count: #{value.value}"
if key == "jvm.threads.blocked.count"
res.send "Blocked threads count:#{value.value}"
if key == "jvm.threads.daemon.count"
res.send "Daemon threads count:#{value.value}"
if key == "jvm.threads.deadlock.count"
res.send "Deadlock threads count:#{value.value}"
if key == "jvm.threads.new.count"
res.send "New threads count:#{value.value}"
if key == "jvm.threads.runnable.count"
res.send "Runnable threads count:#{value.value}"
if key == "jvm.threads.terminated.count"
res.send "Terminated threads count:#{value.value}"
if key == "jvm.threads.timed_waiting.count"
res.send "Time waiting threads count:#{value.value}"
if key == "jvm.threads.waiting.count"
res.send "Waiting threads count:#{value.value}"
catch error
res.send "#{sdcInstance} returned: #{error}"
return
#-------------------------------------------------------------
# Displays a list of all the available commands in this script
#-------------------------------------------------------------
robot.hear /sdc help/i, (res) ->
res.send "Available commands: \n\
sdc check <sdc_url>\n\
sdc pipeline <pipeline_name> status\n\
sdc get uuid <pipeline_name>\n\
sdc pipeline counts <pipeline_uid>\n\
sdc pipeline jvm metrics <pipeline_uid>\n\
sdc help"
| 42343 | # Description:
# Streamsets Data Collector monitoring script
#
# Dependencies:
# None
#
# Configuration:
# None
#
# Commands:
# hubot sdc check <sdc_url> - Checks if the given SDC server is alive
# hubot sdc pipeline <pipeline_name> status - Checks the current status of a given pipeline
# hubot sdc get uuid <pipeline_name> - Returns the uuid of a given pipeline
# hubot sdc pipeline counts <pipeline_uuid> - Returns the current record counts for a given pipeline
# hubot sdc pipeline jvm metrics <pipeline_uuid> - Returns the JVM metrics for a given pipeline
# hubot sdc help - Displays a list of all the available commands in this script
#
# Author:
# <NAME> (@virtualramblas)
sdcInstance = ""
module.exports = (robot) ->
#----------------------------------------
# Checks if the given SDC server is alive
#----------------------------------------
robot.hear /sdc check (.*)/i, (res) ->
sdcInstance = res.match[1]
sdcUrl = sdcInstance + "/rest/v1/system/info"
auth = "Basic " + new Buffer(process.env.SDC_USER + ':' + process.env.SDC_PASSWORD).toString('base64')
res.robot.http(sdcUrl)
.header('Authorization', auth, 'Accept', 'application/json')
.get() (err, resp, body) ->
data = null
try
data = JSON.parse body
res.send "#{sdcInstance} seems alive."
catch error
res.send "#{sdcInstance} returned: #{error}"
return
#----------------------------------------------
# Checks the current status of a given pipeline
#----------------------------------------------
robot.hear /sdc pipeline (.*) status/i, (res) ->
pipelineName = res.match[1]
sdcUrl = sdcInstance + "/rest/v1/pipelines?filterText=" + pipelineName + "&includeStatus=true"
auth = "Basic " + new Buffer(process.env.SDC_USER + ':' + process.env.SDC_PASSWORD).toString('base64')
res.robot.http(sdcUrl)
.header('Authorization', auth, 'Accept', 'application/json')
.get() (err, resp, body) ->
data = null
try
data = JSON.parse body
for item in data
for jsonItem in item
if jsonItem.status?
res.send "The current status of the #{pipelineName} pipeline is #{jsonItem.status}."
break
catch error
res.send "#{sdcInstance} returned: #{error}"
return
#-------------------------------------
# Returns the uuid of a given pipeline
#-------------------------------------
robot.hear /sdc get uuid (.*)/i, (res) ->
pipelineName = res.match[1]
getPipelineUuidUrl = sdcInstance + "/rest/v1/pipelines?filterText=" + pipelineName
auth = "Basic " + new Buffer(process.env.SDC_USER + ':' + process.env.SDC_PASSWORD).toString('base64')
res.robot.http(getPipelineUuidUrl)
.header('Authorization', auth, 'Accept', 'application/json')
.get() (err, resp, body) ->
data = null
try
data = JSON.parse body
for item in data
res.send "The #{pipelineName} pipeline uuid is #{item.name}"
catch error
res.send "#{sdcInstance} returned: #{error}"
return
#-------------------------------------------------------
# Returns the current record counts for a given pipeline
#-------------------------------------------------------
robot.hear /sdc pipeline counts (.*)/i, (res) ->
pipelineName = res.match[1]
sdcUrl = sdcInstance + "/rest/v1/pipeline/" + pipelineName + "/status"
auth = "Basic " + new Buffer(process.env.SDC_USER + ':' + process.env.SDC_PASSWORD).toString('base64')
res.robot.http(sdcUrl)
.header('Authorization', auth, 'Accept', 'application/json')
.get() (err, resp, body) ->
data = null
try
data = JSON.parse body
metrics = JSON.parse data.metrics
for key, value of metrics.counters
if key == "pipeline.batchCount.counter"
batchCountCounter = value.count
if key == "pipeline.batchInputRecords.counter"
batchInputRecordsCounter = value.count
if key == "pipeline.batchOutputRecords.counter"
batchOutputRecordsCounter = value.count
if key == "pipeline.batchErrorRecords.counter"
batchErrorRecordsCounter = value.count
res.send "Batch Count = #{batchCountCounter}. Input Records = #{batchInputRecordsCounter}. Output Records = #{batchOutputRecordsCounter}. Error Records: #{batchErrorRecordsCounter}"
catch error
res.send "#{sdcInstance} returned: #{error}"
return
#---------------------------------------------
# Returns the JVM metrics for a given pipeline
#---------------------------------------------
robot.hear /sdc pipeline jvm metrics (.*)/i, (res) ->
pipelineName = res.match[1]
sdcUrl = sdcInstance + "/rest/v1/pipeline/" + pipelineName + "/status"
auth = "Basic " + new Buffer(process.env.SDC_USER + ':' + process.env.SDC_PASSWORD).toString('base64')
res.robot.http(sdcUrl)
.header('Authorization', auth, 'Accept', 'application/json')
.get() (err, resp, body) ->
data = null
try
data = JSON.parse body
metrics = JSON.parse data.metrics
for key, value of metrics.gauges
#res.send "#{key} : #{value}"
if key == "jvm.memory.total.max"
res.send "Max memory: #{value.value}"
if key == "<KEY>total<KEY>.committed"
res.send "Committed memory: #{value.value}"
if key == "<KEY>memory<KEY>.total<KEY>.used"
res.send "Memory used: #{value.value}"
if key == "<KEY>heap.max"
res.send "Max heap memory:#{value.value}"
if key == "jvm.memory.heap.committed"
res.send "Committed heap memory:#{value.value}"
if key == "jvm.memory.heap.used"
res.send "Heap memory used: #{value.value}"
if key == "jvm.memory.non-heap.max"
res.send "Max non-heap memory: #{value.value}"
if key == "jvm.memory.non-heap.committed"
res.send "Committed non-heap memory: #{value.value}"
if key == "jvm.memory.non-heap.used"
res.send "Non-heap memory used: #{value.value}"
if key == "jvm.threads.count"
res.send "Threads count: #{value.value}"
if key == "jvm.threads.blocked.count"
res.send "Blocked threads count:#{value.value}"
if key == "jvm.threads.daemon.count"
res.send "Daemon threads count:#{value.value}"
if key == "jvm.threads.deadlock.count"
res.send "Deadlock threads count:#{value.value}"
if key == "jvm.threads.new.count"
res.send "New threads count:#{value.value}"
if key == "jvm.threads.runnable.count"
res.send "Runnable threads count:#{value.value}"
if key == "jvm.threads.terminated.count"
res.send "Terminated threads count:#{value.value}"
if key == "jvm.threads.timed_waiting.count"
res.send "Time waiting threads count:#{value.value}"
if key == "jvm.threads.waiting.count"
res.send "Waiting threads count:#{value.value}"
catch error
res.send "#{sdcInstance} returned: #{error}"
return
#-------------------------------------------------------------
# Displays a list of all the available commands in this script
#-------------------------------------------------------------
robot.hear /sdc help/i, (res) ->
res.send "Available commands: \n\
sdc check <sdc_url>\n\
sdc pipeline <pipeline_name> status\n\
sdc get uuid <pipeline_name>\n\
sdc pipeline counts <pipeline_uid>\n\
sdc pipeline jvm metrics <pipeline_uid>\n\
sdc help"
| true | # Description:
# Streamsets Data Collector monitoring script
#
# Dependencies:
# None
#
# Configuration:
# None
#
# Commands:
# hubot sdc check <sdc_url> - Checks if the given SDC server is alive
# hubot sdc pipeline <pipeline_name> status - Checks the current status of a given pipeline
# hubot sdc get uuid <pipeline_name> - Returns the uuid of a given pipeline
# hubot sdc pipeline counts <pipeline_uuid> - Returns the current record counts for a given pipeline
# hubot sdc pipeline jvm metrics <pipeline_uuid> - Returns the JVM metrics for a given pipeline
# hubot sdc help - Displays a list of all the available commands in this script
#
# Author:
# PI:NAME:<NAME>END_PI (@virtualramblas)
sdcInstance = ""
module.exports = (robot) ->
#----------------------------------------
# Checks if the given SDC server is alive
#----------------------------------------
robot.hear /sdc check (.*)/i, (res) ->
sdcInstance = res.match[1]
sdcUrl = sdcInstance + "/rest/v1/system/info"
auth = "Basic " + new Buffer(process.env.SDC_USER + ':' + process.env.SDC_PASSWORD).toString('base64')
res.robot.http(sdcUrl)
.header('Authorization', auth, 'Accept', 'application/json')
.get() (err, resp, body) ->
data = null
try
data = JSON.parse body
res.send "#{sdcInstance} seems alive."
catch error
res.send "#{sdcInstance} returned: #{error}"
return
#----------------------------------------------
# Checks the current status of a given pipeline
#----------------------------------------------
robot.hear /sdc pipeline (.*) status/i, (res) ->
pipelineName = res.match[1]
sdcUrl = sdcInstance + "/rest/v1/pipelines?filterText=" + pipelineName + "&includeStatus=true"
auth = "Basic " + new Buffer(process.env.SDC_USER + ':' + process.env.SDC_PASSWORD).toString('base64')
res.robot.http(sdcUrl)
.header('Authorization', auth, 'Accept', 'application/json')
.get() (err, resp, body) ->
data = null
try
data = JSON.parse body
for item in data
for jsonItem in item
if jsonItem.status?
res.send "The current status of the #{pipelineName} pipeline is #{jsonItem.status}."
break
catch error
res.send "#{sdcInstance} returned: #{error}"
return
#-------------------------------------
# Returns the uuid of a given pipeline
#-------------------------------------
robot.hear /sdc get uuid (.*)/i, (res) ->
pipelineName = res.match[1]
getPipelineUuidUrl = sdcInstance + "/rest/v1/pipelines?filterText=" + pipelineName
auth = "Basic " + new Buffer(process.env.SDC_USER + ':' + process.env.SDC_PASSWORD).toString('base64')
res.robot.http(getPipelineUuidUrl)
.header('Authorization', auth, 'Accept', 'application/json')
.get() (err, resp, body) ->
data = null
try
data = JSON.parse body
for item in data
res.send "The #{pipelineName} pipeline uuid is #{item.name}"
catch error
res.send "#{sdcInstance} returned: #{error}"
return
#-------------------------------------------------------
# Returns the current record counts for a given pipeline
#-------------------------------------------------------
robot.hear /sdc pipeline counts (.*)/i, (res) ->
pipelineName = res.match[1]
sdcUrl = sdcInstance + "/rest/v1/pipeline/" + pipelineName + "/status"
auth = "Basic " + new Buffer(process.env.SDC_USER + ':' + process.env.SDC_PASSWORD).toString('base64')
res.robot.http(sdcUrl)
.header('Authorization', auth, 'Accept', 'application/json')
.get() (err, resp, body) ->
data = null
try
data = JSON.parse body
metrics = JSON.parse data.metrics
for key, value of metrics.counters
if key == "pipeline.batchCount.counter"
batchCountCounter = value.count
if key == "pipeline.batchInputRecords.counter"
batchInputRecordsCounter = value.count
if key == "pipeline.batchOutputRecords.counter"
batchOutputRecordsCounter = value.count
if key == "pipeline.batchErrorRecords.counter"
batchErrorRecordsCounter = value.count
res.send "Batch Count = #{batchCountCounter}. Input Records = #{batchInputRecordsCounter}. Output Records = #{batchOutputRecordsCounter}. Error Records: #{batchErrorRecordsCounter}"
catch error
res.send "#{sdcInstance} returned: #{error}"
return
#---------------------------------------------
# Returns the JVM metrics for a given pipeline
#---------------------------------------------
robot.hear /sdc pipeline jvm metrics (.*)/i, (res) ->
pipelineName = res.match[1]
sdcUrl = sdcInstance + "/rest/v1/pipeline/" + pipelineName + "/status"
auth = "Basic " + new Buffer(process.env.SDC_USER + ':' + process.env.SDC_PASSWORD).toString('base64')
res.robot.http(sdcUrl)
.header('Authorization', auth, 'Accept', 'application/json')
.get() (err, resp, body) ->
data = null
try
data = JSON.parse body
metrics = JSON.parse data.metrics
for key, value of metrics.gauges
#res.send "#{key} : #{value}"
if key == "jvm.memory.total.max"
res.send "Max memory: #{value.value}"
if key == "PI:KEY:<KEY>END_PItotalPI:KEY:<KEY>END_PI.committed"
res.send "Committed memory: #{value.value}"
if key == "PI:KEY:<KEY>END_PImemoryPI:KEY:<KEY>END_PI.totalPI:KEY:<KEY>END_PI.used"
res.send "Memory used: #{value.value}"
if key == "PI:KEY:<KEY>END_PIheap.max"
res.send "Max heap memory:#{value.value}"
if key == "jvm.memory.heap.committed"
res.send "Committed heap memory:#{value.value}"
if key == "jvm.memory.heap.used"
res.send "Heap memory used: #{value.value}"
if key == "jvm.memory.non-heap.max"
res.send "Max non-heap memory: #{value.value}"
if key == "jvm.memory.non-heap.committed"
res.send "Committed non-heap memory: #{value.value}"
if key == "jvm.memory.non-heap.used"
res.send "Non-heap memory used: #{value.value}"
if key == "jvm.threads.count"
res.send "Threads count: #{value.value}"
if key == "jvm.threads.blocked.count"
res.send "Blocked threads count:#{value.value}"
if key == "jvm.threads.daemon.count"
res.send "Daemon threads count:#{value.value}"
if key == "jvm.threads.deadlock.count"
res.send "Deadlock threads count:#{value.value}"
if key == "jvm.threads.new.count"
res.send "New threads count:#{value.value}"
if key == "jvm.threads.runnable.count"
res.send "Runnable threads count:#{value.value}"
if key == "jvm.threads.terminated.count"
res.send "Terminated threads count:#{value.value}"
if key == "jvm.threads.timed_waiting.count"
res.send "Time waiting threads count:#{value.value}"
if key == "jvm.threads.waiting.count"
res.send "Waiting threads count:#{value.value}"
catch error
res.send "#{sdcInstance} returned: #{error}"
return
#-------------------------------------------------------------
# Displays a list of all the available commands in this script
#-------------------------------------------------------------
robot.hear /sdc help/i, (res) ->
res.send "Available commands: \n\
sdc check <sdc_url>\n\
sdc pipeline <pipeline_name> status\n\
sdc get uuid <pipeline_name>\n\
sdc pipeline counts <pipeline_uid>\n\
sdc pipeline jvm metrics <pipeline_uid>\n\
sdc help"
|
[
{
"context": "age'\nbus = require('./event-bus')()\n\nTOKEN_KEY = 'learn-ide:token'\n\nmodule.exports = token = {\n get: ->\n ",
"end": 95,
"score": 0.6236005425453186,
"start": 86,
"tag": "PASSWORD",
"value": "learn-ide"
},
{
"context": "= require('./event-bus')()\n\nTOKEN_KEY = 'learn-ide:token'\n\nmodule.exports = token = {\n get: ->\n l",
"end": 95,
"score": 0.48727911710739136,
"start": 95,
"tag": "KEY",
"value": ""
}
] | lib/token.coffee | learn-co/mastermind | 1 | localStorage = require './local-storage'
bus = require('./event-bus')()
TOKEN_KEY = 'learn-ide:token'
module.exports = token = {
get: ->
localStorage.get(TOKEN_KEY)
set: (value) ->
localStorage.set(TOKEN_KEY, value)
bus.emit(TOKEN_KEY, value)
unset: ->
localStorage.delete(TOKEN_KEY)
bus.emit(TOKEN_KEY, undefined)
observe: (callback) ->
callback(token.get())
bus.on(TOKEN_KEY, callback)
}
| 138269 | localStorage = require './local-storage'
bus = require('./event-bus')()
TOKEN_KEY = '<PASSWORD> <KEY>:token'
module.exports = token = {
get: ->
localStorage.get(TOKEN_KEY)
set: (value) ->
localStorage.set(TOKEN_KEY, value)
bus.emit(TOKEN_KEY, value)
unset: ->
localStorage.delete(TOKEN_KEY)
bus.emit(TOKEN_KEY, undefined)
observe: (callback) ->
callback(token.get())
bus.on(TOKEN_KEY, callback)
}
| true | localStorage = require './local-storage'
bus = require('./event-bus')()
TOKEN_KEY = 'PI:PASSWORD:<PASSWORD>END_PI PI:KEY:<KEY>END_PI:token'
module.exports = token = {
get: ->
localStorage.get(TOKEN_KEY)
set: (value) ->
localStorage.set(TOKEN_KEY, value)
bus.emit(TOKEN_KEY, value)
unset: ->
localStorage.delete(TOKEN_KEY)
bus.emit(TOKEN_KEY, undefined)
observe: (callback) ->
callback(token.get())
bus.on(TOKEN_KEY, callback)
}
|
[
{
"context": "> console.log(arguments)\n\t\t\t\terr:->\n\n\t\t@userId = '12345abcde'\n\n\tdescribe \"getManagedPublishers\", ->\n\t\t\tit 'fet",
"end": 917,
"score": 0.8898299336433411,
"start": 907,
"tag": "KEY",
"value": "12345abcde"
}
] | test/unit/coffee/Publishers/PublishersGetterTests.coffee | shyoshyo/web-sharelatex | 1 | SandboxedModule = require('sandboxed-module')
require('chai').should()
expect = require('chai').expect
sinon = require('sinon')
modulePath = require('path').join __dirname, '../../../../app/js/Features/Publishers/PublishersGetter.js'
describe 'PublishersGetter', ->
beforeEach ->
@publisher =
_id: 'mock-publsiher-id'
slug: 'ieee'
fetchV1Data: sinon.stub()
@PublishersGetter = SandboxedModule.require modulePath, requires:
'../User/UserGetter': @UserGetter
"../UserMembership/UserMembershipHandler": @UserMembershipHandler = {
getEntitiesByUser: sinon.stub().callsArgWith(2, null, [@publisher])
}
"../UserMembership/UserMembershipEntityConfigs": @UserMembershipEntityConfigs = {
publisher:
modelName: 'Publisher'
canCreate: true
fields:
primaryKey: 'slug'
}
'logger-sharelatex':
log:-> console.log(arguments)
err:->
@userId = '12345abcde'
describe "getManagedPublishers", ->
it 'fetches v1 data before returning publisher list', (done) ->
@PublishersGetter.getManagedPublishers @userId, (error, publishers) ->
publishers.length.should.equal 1
done()
| 17546 | SandboxedModule = require('sandboxed-module')
require('chai').should()
expect = require('chai').expect
sinon = require('sinon')
modulePath = require('path').join __dirname, '../../../../app/js/Features/Publishers/PublishersGetter.js'
describe 'PublishersGetter', ->
beforeEach ->
@publisher =
_id: 'mock-publsiher-id'
slug: 'ieee'
fetchV1Data: sinon.stub()
@PublishersGetter = SandboxedModule.require modulePath, requires:
'../User/UserGetter': @UserGetter
"../UserMembership/UserMembershipHandler": @UserMembershipHandler = {
getEntitiesByUser: sinon.stub().callsArgWith(2, null, [@publisher])
}
"../UserMembership/UserMembershipEntityConfigs": @UserMembershipEntityConfigs = {
publisher:
modelName: 'Publisher'
canCreate: true
fields:
primaryKey: 'slug'
}
'logger-sharelatex':
log:-> console.log(arguments)
err:->
@userId = '<KEY>'
describe "getManagedPublishers", ->
it 'fetches v1 data before returning publisher list', (done) ->
@PublishersGetter.getManagedPublishers @userId, (error, publishers) ->
publishers.length.should.equal 1
done()
| true | SandboxedModule = require('sandboxed-module')
require('chai').should()
expect = require('chai').expect
sinon = require('sinon')
modulePath = require('path').join __dirname, '../../../../app/js/Features/Publishers/PublishersGetter.js'
describe 'PublishersGetter', ->
beforeEach ->
@publisher =
_id: 'mock-publsiher-id'
slug: 'ieee'
fetchV1Data: sinon.stub()
@PublishersGetter = SandboxedModule.require modulePath, requires:
'../User/UserGetter': @UserGetter
"../UserMembership/UserMembershipHandler": @UserMembershipHandler = {
getEntitiesByUser: sinon.stub().callsArgWith(2, null, [@publisher])
}
"../UserMembership/UserMembershipEntityConfigs": @UserMembershipEntityConfigs = {
publisher:
modelName: 'Publisher'
canCreate: true
fields:
primaryKey: 'slug'
}
'logger-sharelatex':
log:-> console.log(arguments)
err:->
@userId = 'PI:KEY:<KEY>END_PI'
describe "getManagedPublishers", ->
it 'fetches v1 data before returning publisher list', (done) ->
@PublishersGetter.getManagedPublishers @userId, (error, publishers) ->
publishers.length.should.equal 1
done()
|
[
{
"context": "ID = 'ddc7384ce313772cfad415c1ed2afc30'\nSECRET = '2acfd8a430c873ea7d03335b0644733a'\n@findOne = (query, callback) ->\n if query.id is",
"end": 325,
"score": 0.9994824528694153,
"start": 293,
"tag": "KEY",
"value": "2acfd8a430c873ea7d03335b0644733a"
}
] | dal/client-applications.coffee | craigspaeth/nfd-api | 0 | # ClientApplication
#
# An API client.
#
# Schema: {
# secret: String,
# name: String
# }
_ = require 'underscore'
{ ObjectID } = mongodb = require 'mongodb'
# For now we only need to auth against our website so we can hard-code this.
ID = 'ddc7384ce313772cfad415c1ed2afc30'
SECRET = '2acfd8a430c873ea7d03335b0644733a'
@findOne = (query, callback) ->
if query.id is ID and query.secret is SECRET
callback null, { id: ID, secret: SECRET, name: 'No Fee Digs Website' }
else
callback new Error "Could not find application" | 149540 | # ClientApplication
#
# An API client.
#
# Schema: {
# secret: String,
# name: String
# }
_ = require 'underscore'
{ ObjectID } = mongodb = require 'mongodb'
# For now we only need to auth against our website so we can hard-code this.
ID = 'ddc7384ce313772cfad415c1ed2afc30'
SECRET = '<KEY>'
@findOne = (query, callback) ->
if query.id is ID and query.secret is SECRET
callback null, { id: ID, secret: SECRET, name: 'No Fee Digs Website' }
else
callback new Error "Could not find application" | true | # ClientApplication
#
# An API client.
#
# Schema: {
# secret: String,
# name: String
# }
_ = require 'underscore'
{ ObjectID } = mongodb = require 'mongodb'
# For now we only need to auth against our website so we can hard-code this.
ID = 'ddc7384ce313772cfad415c1ed2afc30'
SECRET = 'PI:KEY:<KEY>END_PI'
@findOne = (query, callback) ->
if query.id is ID and query.secret is SECRET
callback null, { id: ID, secret: SECRET, name: 'No Fee Digs Website' }
else
callback new Error "Could not find application" |
[
{
"context": "options =\n api_key: \"85a8bd06685691821a53e13633c6c798\"\n spawn_x: \"right\"\n spawn_y: \"bottom\"\n spawn_o",
"end": 54,
"score": 0.9997601509094238,
"start": 22,
"tag": "KEY",
"value": "85a8bd06685691821a53e13633c6c798"
}
] | wanikani_lesandrev.coffee | jakeoid/ubersicht-wanikani | 3 | options =
api_key: "85a8bd06685691821a53e13633c6c798"
spawn_x: "right"
spawn_y: "bottom"
spawn_offset_x: 0
spawn_offset_y: 0
refreshFrequency: 900
command: "curl --silent https://www.wanikani.com/api/user/#{options.api_key}/study-queue"
render: (output) -> """
<div class="row">
<div>
<img src="/wanikani_lesandrev/crabigator.png">
</div>
<div>
<p><b>Reviews</b> <label id="reviews-available">-</label></p>
<p><b>Lessons</b> <label id="lessons-available">-</label></p>
</div>
</div>
"""
style: """
position: absolute;
#{options.spawn_x}: #{options.spawn_offset_x}px;
#{options.spawn_y}: #{options.spawn_offset_y}px;
margin: 1rem;
color: #FFFFFF;
font-family: 'Roboto', sans-serif;
img
user-drag: none;
user-select: none;
.row
display: flex;
flex-direction: row;
img
height: 36px;
width: 36px;
:first-child
margin-right: .3rem;
p
margin: 0;
"""
update: (output, domEl) ->
try
data = JSON.parse(output)
catch e
return 0
ui = data.user_information
ri = data.requested_information
$(domEl).find('#reviews-available').text(ri.reviews_available)
$(domEl).find('#lessons-available').text(ri.lessons_available)
afterRender: (widget) ->
$.ajax({
url: "https://ajax.googleapis.com/ajax/libs/jqueryui/1.11.4/jquery-ui.min.js",
cache: true,
dataType: "script",
success: ->
$(widget).draggable()
return
}) | 7164 | options =
api_key: "<KEY>"
spawn_x: "right"
spawn_y: "bottom"
spawn_offset_x: 0
spawn_offset_y: 0
refreshFrequency: 900
command: "curl --silent https://www.wanikani.com/api/user/#{options.api_key}/study-queue"
render: (output) -> """
<div class="row">
<div>
<img src="/wanikani_lesandrev/crabigator.png">
</div>
<div>
<p><b>Reviews</b> <label id="reviews-available">-</label></p>
<p><b>Lessons</b> <label id="lessons-available">-</label></p>
</div>
</div>
"""
style: """
position: absolute;
#{options.spawn_x}: #{options.spawn_offset_x}px;
#{options.spawn_y}: #{options.spawn_offset_y}px;
margin: 1rem;
color: #FFFFFF;
font-family: 'Roboto', sans-serif;
img
user-drag: none;
user-select: none;
.row
display: flex;
flex-direction: row;
img
height: 36px;
width: 36px;
:first-child
margin-right: .3rem;
p
margin: 0;
"""
update: (output, domEl) ->
try
data = JSON.parse(output)
catch e
return 0
ui = data.user_information
ri = data.requested_information
$(domEl).find('#reviews-available').text(ri.reviews_available)
$(domEl).find('#lessons-available').text(ri.lessons_available)
afterRender: (widget) ->
$.ajax({
url: "https://ajax.googleapis.com/ajax/libs/jqueryui/1.11.4/jquery-ui.min.js",
cache: true,
dataType: "script",
success: ->
$(widget).draggable()
return
}) | true | options =
api_key: "PI:KEY:<KEY>END_PI"
spawn_x: "right"
spawn_y: "bottom"
spawn_offset_x: 0
spawn_offset_y: 0
refreshFrequency: 900
command: "curl --silent https://www.wanikani.com/api/user/#{options.api_key}/study-queue"
render: (output) -> """
<div class="row">
<div>
<img src="/wanikani_lesandrev/crabigator.png">
</div>
<div>
<p><b>Reviews</b> <label id="reviews-available">-</label></p>
<p><b>Lessons</b> <label id="lessons-available">-</label></p>
</div>
</div>
"""
style: """
position: absolute;
#{options.spawn_x}: #{options.spawn_offset_x}px;
#{options.spawn_y}: #{options.spawn_offset_y}px;
margin: 1rem;
color: #FFFFFF;
font-family: 'Roboto', sans-serif;
img
user-drag: none;
user-select: none;
.row
display: flex;
flex-direction: row;
img
height: 36px;
width: 36px;
:first-child
margin-right: .3rem;
p
margin: 0;
"""
update: (output, domEl) ->
try
data = JSON.parse(output)
catch e
return 0
ui = data.user_information
ri = data.requested_information
$(domEl).find('#reviews-available').text(ri.reviews_available)
$(domEl).find('#lessons-available').text(ri.lessons_available)
afterRender: (widget) ->
$.ajax({
url: "https://ajax.googleapis.com/ajax/libs/jqueryui/1.11.4/jquery-ui.min.js",
cache: true,
dataType: "script",
success: ->
$(widget).draggable()
return
}) |
[
{
"context": ": globals.userAccount._id\n constructorName : 'JAccount'\n }\n\n\ndefaultAccount = ->\n profile :\n first",
"end": 295,
"score": 0.5896362662315369,
"start": 288,
"tag": "USERNAME",
"value": "Account"
},
{
"context": "defaultAccount = ->\n profile :\n firstName : 'a koding'\n lastName : 'user'\n nickname : '#'\n isE",
"end": 361,
"score": 0.9983909726142883,
"start": 353,
"tag": "NAME",
"value": "a koding"
},
{
"context": "le :\n firstName : 'a koding'\n lastName : 'user'\n nickname : '#'\n isExempt : no\n\n\ndefaultTro",
"end": 384,
"score": 0.9948851466178894,
"start": 380,
"tag": "NAME",
"value": "user"
},
{
"context": "ount = (nickname) ->\n profile :\n nickname : 'foouser'\n firstName : ''\n lastName : ''\n\n\ngetGrava",
"end": 568,
"score": 0.999472975730896,
"start": 561,
"tag": "USERNAME",
"value": "foouser"
}
] | client/app/lib/components/profile/helper.coffee | ezgikaysi/koding | 1 | _ = require 'lodash'
proxifyUrl = require 'app/util/proxifyUrl'
regexps = require 'app/util/regexps'
defaultAccountOrigin = ->
globals = require 'globals'
return {
id : globals.userAccount._id
_id : globals.userAccount._id
constructorName : 'JAccount'
}
defaultAccount = ->
profile :
firstName : 'a koding'
lastName : 'user'
nickname : '#'
isExempt : no
defaultTrollAccount = -> _.assign {}, defaultAccount(), { isExempt: yes }
namelessAccount = (nickname) ->
profile :
nickname : 'foouser'
firstName : ''
lastName : ''
getGravatarUri = (account, size) ->
{ hash } = account.profile
# make sure we are fetching an image with a non-decimal size.
size = Math.round size
defaultUri = """
https://koding-cdn.s3.amazonaws.com/new-avatars/default.avatar.#{size}.png
"""
{ protocol } = global.location
return "#{protocol}//gravatar.com/avatar/#{hash}?size=#{size}&d=#{defaultUri}&r=g"
getAvatarUri = (account, width, height, dpr) ->
{ profile } = account
if profile.avatar?.match regexps.webProtocolRegExp
width = width * dpr
height = height * dpr
return proxifyUrl profile.avatar, { crop: yes, width, height }
return getGravatarUri account, width * dpr
module.exports = {
defaultAccountOrigin
defaultAccount
defaultTrollAccount
namelessAccount
getGravatarUri
getAvatarUri
}
| 150733 | _ = require 'lodash'
proxifyUrl = require 'app/util/proxifyUrl'
regexps = require 'app/util/regexps'
defaultAccountOrigin = ->
globals = require 'globals'
return {
id : globals.userAccount._id
_id : globals.userAccount._id
constructorName : 'JAccount'
}
defaultAccount = ->
profile :
firstName : '<NAME>'
lastName : '<NAME>'
nickname : '#'
isExempt : no
defaultTrollAccount = -> _.assign {}, defaultAccount(), { isExempt: yes }
namelessAccount = (nickname) ->
profile :
nickname : 'foouser'
firstName : ''
lastName : ''
getGravatarUri = (account, size) ->
{ hash } = account.profile
# make sure we are fetching an image with a non-decimal size.
size = Math.round size
defaultUri = """
https://koding-cdn.s3.amazonaws.com/new-avatars/default.avatar.#{size}.png
"""
{ protocol } = global.location
return "#{protocol}//gravatar.com/avatar/#{hash}?size=#{size}&d=#{defaultUri}&r=g"
getAvatarUri = (account, width, height, dpr) ->
{ profile } = account
if profile.avatar?.match regexps.webProtocolRegExp
width = width * dpr
height = height * dpr
return proxifyUrl profile.avatar, { crop: yes, width, height }
return getGravatarUri account, width * dpr
module.exports = {
defaultAccountOrigin
defaultAccount
defaultTrollAccount
namelessAccount
getGravatarUri
getAvatarUri
}
| true | _ = require 'lodash'
proxifyUrl = require 'app/util/proxifyUrl'
regexps = require 'app/util/regexps'
defaultAccountOrigin = ->
globals = require 'globals'
return {
id : globals.userAccount._id
_id : globals.userAccount._id
constructorName : 'JAccount'
}
defaultAccount = ->
profile :
firstName : 'PI:NAME:<NAME>END_PI'
lastName : 'PI:NAME:<NAME>END_PI'
nickname : '#'
isExempt : no
defaultTrollAccount = -> _.assign {}, defaultAccount(), { isExempt: yes }
namelessAccount = (nickname) ->
profile :
nickname : 'foouser'
firstName : ''
lastName : ''
getGravatarUri = (account, size) ->
{ hash } = account.profile
# make sure we are fetching an image with a non-decimal size.
size = Math.round size
defaultUri = """
https://koding-cdn.s3.amazonaws.com/new-avatars/default.avatar.#{size}.png
"""
{ protocol } = global.location
return "#{protocol}//gravatar.com/avatar/#{hash}?size=#{size}&d=#{defaultUri}&r=g"
getAvatarUri = (account, width, height, dpr) ->
{ profile } = account
if profile.avatar?.match regexps.webProtocolRegExp
width = width * dpr
height = height * dpr
return proxifyUrl profile.avatar, { crop: yes, width, height }
return getGravatarUri account, width * dpr
module.exports = {
defaultAccountOrigin
defaultAccount
defaultTrollAccount
namelessAccount
getGravatarUri
getAvatarUri
}
|
[
{
"context": "ile is in streamline syntax!\n# https://github.com/Sage/streamlinejs\n\n{expect} = require 'chai'\nflows = r",
"end": 62,
"score": 0.9951738119125366,
"start": 58,
"tag": "USERNAME",
"value": "Sage"
},
{
"context": " data we're going to use:\ndanielData =\n name: 'Daniel'\naseemData =\n name: 'Aseem'\nmatData =\n name",
"end": 280,
"score": 0.9997158050537109,
"start": 274,
"tag": "NAME",
"value": "Daniel"
},
{
"context": "lData =\n name: 'Daniel'\naseemData =\n name: 'Aseem'\nmatData =\n name: 'Mat'\n name2: 'Matt'\n ",
"end": 310,
"score": 0.9998207092285156,
"start": 305,
"tag": "NAME",
"value": "Aseem"
},
{
"context": "seemData =\n name: 'Aseem'\nmatData =\n name: 'Mat'\n name2: 'Matt'\n id: '12345'\nindexConfig =\n",
"end": 336,
"score": 0.9998573064804077,
"start": 333,
"tag": "NAME",
"value": "Mat"
},
{
"context": "me: 'Aseem'\nmatData =\n name: 'Mat'\n name2: 'Matt'\n id: '12345'\nindexConfig =\n type: 'fulltex",
"end": 354,
"score": 0.999769389629364,
"start": 350,
"tag": "NAME",
"value": "Matt"
},
{
"context": "'\n\n# instances we're going to reuse across tests:\ndaniel = null\naseem = null\nmat = null\nrelationship = nul",
"end": 573,
"score": 0.771730899810791,
"start": 567,
"tag": "NAME",
"value": "daniel"
},
{
"context": " we're going to reuse across tests:\ndaniel = null\naseem = null\nmat = null\nrelationship = null\n\n# index li",
"end": 586,
"score": 0.7554324865341187,
"start": 581,
"tag": "NAME",
"value": "aseem"
},
{
"context": "to reuse across tests:\ndaniel = null\naseem = null\nmat = null\nrelationship = null\n\n# index list\nnodeInde",
"end": 597,
"score": 0.9234272241592407,
"start": 594,
"tag": "NAME",
"value": "mat"
},
{
"context": "s': (_) ->\n daniel.index 'users', 'name', 'Daniel', _\n node = db.getIndexedNode 'users', 'na",
"end": 7879,
"score": 0.9993578195571899,
"start": 7873,
"tag": "NAME",
"value": "Daniel"
},
{
"context": " node = db.getIndexedNode 'users', 'name', 'Daniel', _\n expect(node).to.be.an 'object'\n ",
"end": 7941,
"score": 0.9993933439254761,
"start": 7935,
"tag": "NAME",
"value": "Daniel"
},
{
"context": ".be.true\n daniel.unindex 'users', 'name', 'Daniel', _ # Delete created node index\n # TODO FI",
"end": 8071,
"score": 0.9995113611221313,
"start": 8065,
"tag": "NAME",
"value": "Daniel"
},
{
"context": " daniel.index nodeCustomIndexName, 'name', 'Daniel', _\n nodes = db.queryNodeIndex nodeCustomI",
"end": 8617,
"score": 0.9992771148681641,
"start": 8611,
"tag": "NAME",
"value": "Daniel"
},
{
"context": " daniel.unindex nodeCustomIndexName, 'name', 'Daniel', _ # Delete created custom node index\n\n 'inde",
"end": 8833,
"score": 0.9993093013763428,
"start": 8827,
"tag": "NAME",
"value": "Daniel"
},
{
"context": " daniel.index nodeCustomIndexName2, 'name', 'Daniel', _\n nodes = db.queryNodeIndex nodeCustomI",
"end": 9001,
"score": 0.9990458488464355,
"start": 8995,
"tag": "NAME",
"value": "Daniel"
},
{
"context": "s = db.queryNodeIndex nodeCustomIndexName2, 'name:Dan*', _\n expect(nodes).to.be.an 'array'\n ",
"end": 9071,
"score": 0.9994311332702637,
"start": 9068,
"tag": "NAME",
"value": "Dan"
},
{
"context": " daniel.unindex nodeCustomIndexName2, 'name', 'Daniel', _ # Delete created custom node index\n\n 'inde",
"end": 9219,
"score": 0.9992652535438538,
"start": 9213,
"tag": "NAME",
"value": "Daniel"
},
{
"context": "->\n relationship.index 'follows', 'name', 'Daniel', _\n rel = db.getIndexedRelationship 'foll",
"end": 9347,
"score": 0.9990947246551514,
"start": 9341,
"tag": "NAME",
"value": "Daniel"
},
{
"context": "l = db.getIndexedRelationship 'follows', 'name', 'Daniel', _\n expect(rel).to.be.an 'object'\n ",
"end": 9418,
"score": 0.9991414546966553,
"start": 9412,
"tag": "NAME",
"value": "Daniel"
},
{
"context": "\n relationship.unindex 'follows', 'name', 'Daniel', _ # Delete created relationship index\n\n # Si",
"end": 9656,
"score": 0.9992679953575134,
"start": 9650,
"tag": "NAME",
"value": "Daniel"
},
{
"context": " relationship.index relCustomIndexName, 'name', 'Daniel', _\n rels = db.queryRelationshipIndex relC",
"end": 10160,
"score": 0.9994713068008423,
"start": 10154,
"tag": "NAME",
"value": "Daniel"
},
{
"context": "relationship.unindex relCustomIndexName, 'name', 'Daniel', _ # Delete created custom relationship index\n\n ",
"end": 10477,
"score": 0.9995958209037781,
"start": 10471,
"tag": "NAME",
"value": "Daniel"
},
{
"context": " relationship.index relCustomIndexName2, 'name', 'Daniel', _\n rels = db.queryRelationshipIndex relC",
"end": 10666,
"score": 0.999083399772644,
"start": 10660,
"tag": "NAME",
"value": "Daniel"
},
{
"context": "elationship.unindex relCustomIndexName2, 'name', 'Daniel', _ # Delete created custom relationship index\n\n ",
"end": 10985,
"score": 0.9993184208869934,
"start": 10979,
"tag": "NAME",
"value": "Daniel"
},
{
"context": " (_) ->\n mat.index nodeIndexName, 'name', 'Mat', _\n mat.index nodeIndexName, 'name', 'Mat",
"end": 11107,
"score": 0.9998672008514404,
"start": 11104,
"tag": "NAME",
"value": "Mat"
},
{
"context": "Mat', _\n mat.index nodeIndexName, 'name', 'Matt', _\n mat.index nodeIndexName, 'id', '12345",
"end": 11158,
"score": 0.9991964101791382,
"start": 11154,
"tag": "NAME",
"value": "Matt"
},
{
"context": "value\n mat.unindex nodeIndexName, 'name', 'Matt', _\n mattNode = db.getIndexedNode nodeInde",
"end": 11329,
"score": 0.9990761280059814,
"start": 11325,
"tag": "NAME",
"value": "Matt"
},
{
"context": "tNode = db.getIndexedNode nodeIndexName, 'name', 'Matt', _\n matNode = db.getIndexedNode nodeIndex",
"end": 11399,
"score": 0.9992352724075317,
"start": 11395,
"tag": "NAME",
"value": "Matt"
},
{
"context": "tNode = db.getIndexedNode nodeIndexName, 'name', 'Mat', _\n idNode = db.getIndexedNode nodeIndexN",
"end": 11467,
"score": 0.9998717308044434,
"start": 11464,
"tag": "NAME",
"value": "Mat"
},
{
"context": "tNode = db.getIndexedNode nodeIndexName, 'name', 'Matt', _\n matNode = db.getIndexedNode nodeIndex",
"end": 11914,
"score": 0.99860680103302,
"start": 11910,
"tag": "NAME",
"value": "Matt"
},
{
"context": "tNode = db.getIndexedNode nodeIndexName, 'name', 'Mat', _\n idNode = db.getIndexedNode nodeIndexN",
"end": 11982,
"score": 0.999685525894165,
"start": 11979,
"tag": "NAME",
"value": "Mat"
},
{
"context": "tNode = db.getIndexedNode nodeIndexName, 'name', 'Matt', _\n matNode = db.getIndexedNode nodeIndex",
"end": 12369,
"score": 0.9976835250854492,
"start": 12365,
"tag": "NAME",
"value": "Matt"
},
{
"context": "tNode = db.getIndexedNode nodeIndexName, 'name', 'Mat', _\n idNode = db.getIndexedNode nodeIndexN",
"end": 12437,
"score": 0.9997034668922424,
"start": 12434,
"tag": "NAME",
"value": "Mat"
},
{
"context": " relationship.index relIndexName, 'name', 'Mat', _\n relationship.index relIndexName, 'nam",
"end": 12710,
"score": 0.9996721744537354,
"start": 12707,
"tag": "NAME",
"value": "Mat"
},
{
"context": " relationship.index relIndexName, 'name', 'Matt', _\n relationship.index relIndexName, 'id'",
"end": 12769,
"score": 0.9973113536834717,
"start": 12765,
"tag": "NAME",
"value": "Matt"
},
{
"context": " relationship.unindex relIndexName, 'name', 'Matt', _\n mattRelationship = db.getIndexedRelat",
"end": 12964,
"score": 0.9959666132926941,
"start": 12960,
"tag": "NAME",
"value": "Matt"
},
{
"context": " db.getIndexedRelationship relIndexName, 'name', 'Matt', _\n matRelationship = db.getIndexedRelati",
"end": 13049,
"score": 0.9959229826927185,
"start": 13045,
"tag": "NAME",
"value": "Matt"
},
{
"context": " db.getIndexedRelationship relIndexName, 'name', 'Mat', _\n idRelationship = db.getIndexedRelatio",
"end": 13132,
"score": 0.9996725916862488,
"start": 13129,
"tag": "NAME",
"value": "Mat"
},
{
"context": " db.getIndexedRelationship relIndexName, 'name', 'Matt', _\n matRelationship = db.getIndexedRelati",
"end": 13665,
"score": 0.9988648891448975,
"start": 13661,
"tag": "NAME",
"value": "Matt"
},
{
"context": " db.getIndexedRelationship relIndexName, 'name', 'Mat', _\n idRelationship = db.getIndexedRelatio",
"end": 13748,
"score": 0.9998423457145691,
"start": 13745,
"tag": "NAME",
"value": "Mat"
},
{
"context": " db.getIndexedRelationship relIndexName, 'name', 'Matt', _\n matRelationship = db.getIndexedRelati",
"end": 14213,
"score": 0.9984045028686523,
"start": 14209,
"tag": "NAME",
"value": "Matt"
},
{
"context": " db.getIndexedRelationship relIndexName, 'name', 'Mat', _\n idRelationship = db.getIndexedRelatio",
"end": 14296,
"score": 0.9998549222946167,
"start": 14293,
"tag": "NAME",
"value": "Mat"
},
{
"context": "t for now:\n # expect(relationship.start).to.eql daniel\n # expect(relationship.end).to.eql aseem\n\n ",
"end": 15587,
"score": 0.968026340007782,
"start": 15581,
"tag": "NAME",
"value": "daniel"
},
{
"context": "o.eql daniel\n # expect(relationship.end).to.eql aseem\n\n # TEMP so for the time being, we're test",
"end": 15627,
"score": 0.6437289118766785,
"start": 15626,
"tag": "USERNAME",
"value": "a"
},
{
"context": "eql daniel\n # expect(relationship.end).to.eql aseem\n\n # TEMP so for the time being, we're testing ",
"end": 15631,
"score": 0.5904066562652588,
"start": 15627,
"tag": "NAME",
"value": "seem"
},
{
"context": "ject'\n expect(relationship.start.self).to.equal daniel.self\n expect(relationship.end.self).to.equal a",
"end": 15904,
"score": 0.7327951192855835,
"start": 15898,
"tag": "NAME",
"value": "daniel"
}
] | node_modules/neo4j/test/crud._coffee | aranoah/journey-planner | 484 | # this file is in streamline syntax!
# https://github.com/Sage/streamlinejs
{expect} = require 'chai'
flows = require 'streamline/lib/util/flows'
neo4j = require '..'
db = new neo4j.GraphDatabase 'http://localhost:7474'
# data we're going to use:
danielData =
name: 'Daniel'
aseemData =
name: 'Aseem'
matData =
name: 'Mat'
name2: 'Matt'
id: '12345'
indexConfig =
type: 'fulltext'
provider: 'lucene'
to_lower_case: 'true'
indexConfig2 =
type: 'fulltext'
to_lower_case: 'false'
# instances we're going to reuse across tests:
daniel = null
aseem = null
mat = null
relationship = null
# index list
nodeIndexName = 'testUsers'
nodeCustomIndexName = 'testUsersFullTextLowercase'
nodeCustomIndexName2 = 'testUsersFullTextNoLowercase'
relIndexName = 'testFollows'
relCustomIndexName = 'testFollowsFullTextLowercase'
relCustomIndexName2 = 'testFollowsFullTextNoLowercase'
## TESTS:
@crud =
'getNodeIndexes': (_) ->
nodeIndexes = db.getNodeIndexes _
# we should always get back an array of names, but the array should
# have map-like properties for the index config details too:
expect(nodeIndexes).to.be.an 'array'
for name in nodeIndexes
expect(nodeIndexes).to.contain.key name
expect(nodeIndexes[name]).to.be.an 'object'
expect(nodeIndexes[name].type).to.be.a 'string'
'getRelationshipIndexes': (_) ->
relIndexes = db.getRelationshipIndexes _
# we should always get back an array of names, but the array should
# have map-like properties for the index config details too:
expect(relIndexes).to.be.an 'array'
for name in relIndexes
expect(relIndexes).to.contain.key name
expect(relIndexes[name]).to.be.an 'object'
expect(relIndexes[name].type).to.be.a 'string'
'createNodeIndex': (_) ->
db.createNodeIndex nodeIndexName, _
# our newly created index should now be in the list of indexes:
nodeIndexes = db.getNodeIndexes _
expect(nodeIndexes).to.contain nodeIndexName
expect(nodeIndexes).to.contain.key nodeIndexName
'createNodeIndex custom fulltext with lowercase': (_) ->
db.createNodeIndex nodeCustomIndexName, indexConfig, _
# our newly created index should now be in the list of indexes:
nodeIndexes = db.getNodeIndexes _
expect(nodeIndexes).to.contain nodeCustomIndexName
expect(nodeIndexes).to.contain.key nodeCustomIndexName
'createNodeIndex custom fulltext with no lowercase': (_) ->
db.createNodeIndex nodeCustomIndexName2, indexConfig, _
# our newly created index should now be in the list of indexes:
nodeIndexes = db.getNodeIndexes _
expect(nodeIndexes).to.contain nodeCustomIndexName2
expect(nodeIndexes).to.contain.key nodeCustomIndexName2
'createRelationshipIndex': (_) ->
db.createRelationshipIndex relIndexName, _
# our newly created index should now be in the list of indexes:
relIndexes = db.getRelationshipIndexes _
expect(relIndexes).to.contain relIndexName
expect(relIndexes).to.contain.key relIndexName
'createRelationshipIndex custom fulltext with lowercase': (_) ->
db.createRelationshipIndex relCustomIndexName, indexConfig, _
# our newly created index should now be in the list of indexes:
relIndexes = db.getRelationshipIndexes _
expect(relIndexes).to.contain relCustomIndexName
expect(relIndexes).to.contain.key relCustomIndexName
'createRelationshipIndex custom fulltext with no lowercase': (_) ->
db.createRelationshipIndex relCustomIndexName2, indexConfig, _
# our newly created index should now be in the list of indexes:
relIndexes = db.getRelationshipIndexes _
expect(relIndexes).to.contain relCustomIndexName2
expect(relIndexes).to.contain.key relCustomIndexName2
'create nodes': (_) ->
daniel = db.createNode danielData
aseem = db.createNode aseemData
mat = db.createNode matData
expect(daniel).to.be.an 'object'
expect(daniel.exists).to.be.false
expect(daniel.self).to.not.exist
# TODO should this really be tested? is @self a public API?
# maybe it should just have a better name than 'self'? like url?
'save nodes': (_) ->
# test futures here by saving both aseem and daniel in parallel:
flows.collect _, [
daniel.save not _
aseem.save not _
mat.save not _
]
expect(daniel.exists).to.be.true
expect(daniel.self).to.be.a 'string' # TODO see above
expect(daniel.self).to.not.equal aseem.self # TODO see above
expect(daniel.data).to.eql danielData
expect(aseem.exists).to.be.true
expect(aseem.self).to.be.a 'string' # TODO see above
expect(aseem.self).to.not.equal daniel.self # TODO see above
expect(aseem.data).to.eql aseemData
'create realtionships': (_) ->
relationship = daniel.createRelationshipTo aseem, 'follows', {created: Date.now()}, _
testRelationship relationship
# in this case, the start and end *should* be our instances
expect(relationship.start).to.eq daniel
expect(relationship.end).to.eq aseem
'serialize & de-serialize nodes': (_) ->
json = JSON.stringify [aseem, daniel]
obj = JSON.parse json, db.reviveJSON
expect(obj).to.be.an 'array'
expect(obj).to.have.length 2
[aseem2, daniel2] = obj
expect(aseem2).to.be.an 'object'
expect(aseem2.data).to.eql aseem.data
expect(daniel2).to.be.an 'object'
expect(daniel2.data).to.eql daniel.data
'serialize & de-serialize relationship': (_) ->
json = JSON.stringify {foo: {bar: relationship}}
obj = JSON.parse json, db.reviveJSON
expect(obj).to.be.an 'object'
expect(obj.foo).to.be.an 'object'
rel2 = obj.foo.bar
expect(rel2).to.be.an 'object'
expect(rel2.data).to.eql relationship.data
'fetch relationships': (_) ->
# test futures by *initiating* getRelationships() for both aseem and daniel in
# parallel. note how we'll still "collect" (process) the futures in sequence.
danielFuture = daniel.getRelationships 'follows', not _
aseemFuture = aseem.getRelationships 'follows', not _
relationships = danielFuture _
testRelationships relationships
# in this case, the start *should* be our instance
expect(relationships[0].start).to.eq daniel
relationships = aseemFuture _
testRelationships relationships
# in this case, the end *should* be our instance
expect(relationships[0].end).to.eq aseem
'traverse nodes': (_) ->
# same parallel lookups using futures:
danielFuture = daniel.getRelationshipNodes 'follows', not _
aseemFuture = aseem.getRelationshipNodes 'follows', not _
nodes = danielFuture _
expect(nodes).to.be.an 'array'
expect(nodes).to.have.length 1
expect(nodes[0]).to.be.an 'object'
expect(nodes[0].exists).to.be.true
expect(nodes[0].self).to.equal aseem.self # TODO see above
expect(nodes[0].data).to.eql aseemData
# TODO see how this is misleading? we don't respect or report direction!
nodes = aseemFuture _
expect(nodes).to.be.an 'array'
expect(nodes).to.have.length 1
expect(nodes[0]).to.be.an 'object'
expect(nodes[0].exists).to.be.true
expect(nodes[0].self).to.equal daniel.self # TODO see above
expect(nodes[0].data).to.eql danielData
'index nodes': (_) ->
daniel.index 'users', 'name', 'Daniel', _
node = db.getIndexedNode 'users', 'name', 'Daniel', _
expect(node).to.be.an 'object'
expect(node.exists).to.be.true
daniel.unindex 'users', 'name', 'Daniel', _ # Delete created node index
# TODO FIXME we're not unindexing these nodes after each test, so in fact the
# returned node and data might be from a previous test!
# expect(node.self).to.equal daniel.self # TODO see above
# expect(node.data).to.eql danielData
# Since fulltext search is using Lucene Query Language we cannot use getIndexedNode, instead we use queryNodeIndex method
'index nodes to custom fulltext index with lowercase': (_) ->
daniel.index nodeCustomIndexName, 'name', 'Daniel', _
nodes = db.queryNodeIndex nodeCustomIndexName, 'name:dan*', _
expect(nodes).to.be.an 'array'
expect(nodes[0].exists).to.be.true
daniel.unindex nodeCustomIndexName, 'name', 'Daniel', _ # Delete created custom node index
'index nodes to custom fulltext index with no lowercase': (_) ->
daniel.index nodeCustomIndexName2, 'name', 'Daniel', _
nodes = db.queryNodeIndex nodeCustomIndexName2, 'name:Dan*', _
expect(nodes).to.be.an 'array'
expect(nodes[0].exists).to.be.true
daniel.unindex nodeCustomIndexName2, 'name', 'Daniel', _ # Delete created custom node index
'index relationships': (_) ->
relationship.index 'follows', 'name', 'Daniel', _
rel = db.getIndexedRelationship 'follows', 'name', 'Daniel', _
expect(rel).to.be.an 'object'
expect(rel.exists).to.be.true
expect(rel.self).to.be.a 'string' # TODO see above
expect(rel.type).to.eq 'follows'
relationship.unindex 'follows', 'name', 'Daniel', _ # Delete created relationship index
# Since fulltext search is using Lucene Query Language we cannot use getIndexedRelationship, instead we use queryRelationshipIndex method
# queryRelationshipIndex method was not implemented, so I implemented it for this method to work
# Due to comments of queryNodeIndex method, queryRelationshipIndex was a to-do
'index relationships to custom fulltext index with lowercase': (_) ->
relationship.index relCustomIndexName, 'name', 'Daniel', _
rels = db.queryRelationshipIndex relCustomIndexName, 'name:*niE*', _
expect(rels).to.be.an 'array'
expect(rels[0].exists).to.be.true
expect(rels[0].self).to.be.a 'string'
expect(rels[0].type).to.eq 'follows'
relationship.unindex relCustomIndexName, 'name', 'Daniel', _ # Delete created custom relationship index
'index relationships to custom fulltext index with no lowercase': (_) ->
relationship.index relCustomIndexName2, 'name', 'Daniel', _
rels = db.queryRelationshipIndex relCustomIndexName2, 'name:*nie*', _
expect(rels).to.be.an 'array'
expect(rels[0].exists).to.be.true
expect(rels[0].self).to.be.a 'string'
expect(rels[0].type).to.eq 'follows'
relationship.unindex relCustomIndexName2, 'name', 'Daniel', _ # Delete created custom relationship index
'unindex nodes': (_) ->
mat.index nodeIndexName, 'name', 'Mat', _
mat.index nodeIndexName, 'name', 'Matt', _
mat.index nodeIndexName, 'id', '12345', _
# delete entries for the node that match index, key, value
mat.unindex nodeIndexName, 'name', 'Matt', _
mattNode = db.getIndexedNode nodeIndexName, 'name', 'Matt', _
matNode = db.getIndexedNode nodeIndexName, 'name', 'Mat', _
idNode = db.getIndexedNode nodeIndexName, 'id', '12345', _
expect(mattNode).to.not.exist
expect(matNode).to.be.an 'object'
expect(matNode.exists).to.be.true
expect(idNode).to.be.an 'object'
expect(idNode.exists).to.be.true
# delete entries for the node that match index, key
mat.unindex nodeIndexName, 'name', _
mattNode = db.getIndexedNode nodeIndexName, 'name', 'Matt', _
matNode = db.getIndexedNode nodeIndexName, 'name', 'Mat', _
idNode = db.getIndexedNode nodeIndexName, 'id', '12345', _
expect(mattNode).to.not.exist
expect(matNode).to.not.exist
expect(idNode).to.be.an 'object'
expect(idNode.exists).to.be.true
# delete entries for the node that match index
mat.unindex nodeIndexName, _
mattNode = db.getIndexedNode nodeIndexName, 'name', 'Matt', _
matNode = db.getIndexedNode nodeIndexName, 'name', 'Mat', _
idNode = db.getIndexedNode nodeIndexName, 'id', '12345', _
expect(mattNode).to.not.exist
expect(matNode).to.not.exist
expect(idNode).to.not.exist
'unindex relationships': (_) ->
relationship.index relIndexName, 'name', 'Mat', _
relationship.index relIndexName, 'name', 'Matt', _
relationship.index relIndexName, 'id', '12345', _
# delete entries for the relationship that match index, key, value
relationship.unindex relIndexName, 'name', 'Matt', _
mattRelationship = db.getIndexedRelationship relIndexName, 'name', 'Matt', _
matRelationship = db.getIndexedRelationship relIndexName, 'name', 'Mat', _
idRelationship = db.getIndexedRelationship relIndexName, 'id', '12345', _
expect(mattRelationship).to.not.exist
expect(matRelationship).to.be.an 'object'
expect(matRelationship.exists).to.be.true
expect(idRelationship).to.be.an 'object'
expect(idRelationship.exists).to.be.true
# delete entries for the relationship that match index, key
relationship.unindex relIndexName, 'name', _
mattRelationship = db.getIndexedRelationship relIndexName, 'name', 'Matt', _
matRelationship = db.getIndexedRelationship relIndexName, 'name', 'Mat', _
idRelationship = db.getIndexedRelationship relIndexName, 'id', '12345', _
expect(mattRelationship).to.not.exist
expect(matRelationship).to.not.exist
expect(idRelationship).to.be.an 'object'
expect(idRelationship.exists).to.be.true
# delete entries for the relationship that match index
relationship.unindex relIndexName, _
mattRelationship = db.getIndexedRelationship relIndexName, 'name', 'Matt', _
matRelationship = db.getIndexedRelationship relIndexName, 'name', 'Mat', _
idRelationship = db.getIndexedRelationship relIndexName, 'id', '12345', _
expect(mattRelationship).to.not.exist
expect(matRelationship).to.not.exist
expect(idRelationship).to.not.exist
# TODO test deleting nodes and relationships!
'deleteNodeIndex': (_) ->
db.deleteNodeIndex nodeIndexName, _
# our index should no longer be in the list of indexes:
nodeIndexes = db.getNodeIndexes _
expect(nodeIndexes).to.not.contain nodeIndexName
expect(nodeIndexes).to.not.contain.key nodeIndexName
'deleteRelationshipIndex': (_) ->
db.deleteRelationshipIndex relIndexName, _
# our index should no longer be in the list of indexes:
relIndexes = db.getRelationshipIndexes _
expect(relIndexes).to.not.contain relIndexName
expect(relIndexes).to.not.contain.key relIndexName
## HELPERS:
testRelationship = (relationship) ->
expect(relationship).to.be.an 'object'
expect(relationship.exists).to.be.true
expect(relationship.self).to.be.a 'string' # TODO see above
expect(relationship.type).to.eq 'follows'
# in some cases, the start/end nodes may not be "filled", so these are
# commented out for now:
# expect(relationship.start).to.eql daniel
# expect(relationship.end).to.eql aseem
# TEMP so for the time being, we're testing that at least
# their "selves" match. not sure if this is a public API.
expect(relationship.start).to.be.an 'object'
expect(relationship.end).to.be.an 'object'
expect(relationship.start.self).to.equal daniel.self
expect(relationship.end.self).to.equal aseem.self
testRelationships = (relationships) ->
expect(relationships).to.be.an 'array'
expect(relationships).to.have.length 1
testRelationship relationships[0]
| 98689 | # this file is in streamline syntax!
# https://github.com/Sage/streamlinejs
{expect} = require 'chai'
flows = require 'streamline/lib/util/flows'
neo4j = require '..'
db = new neo4j.GraphDatabase 'http://localhost:7474'
# data we're going to use:
danielData =
name: '<NAME>'
aseemData =
name: '<NAME>'
matData =
name: '<NAME>'
name2: '<NAME>'
id: '12345'
indexConfig =
type: 'fulltext'
provider: 'lucene'
to_lower_case: 'true'
indexConfig2 =
type: 'fulltext'
to_lower_case: 'false'
# instances we're going to reuse across tests:
<NAME> = null
<NAME> = null
<NAME> = null
relationship = null
# index list
nodeIndexName = 'testUsers'
nodeCustomIndexName = 'testUsersFullTextLowercase'
nodeCustomIndexName2 = 'testUsersFullTextNoLowercase'
relIndexName = 'testFollows'
relCustomIndexName = 'testFollowsFullTextLowercase'
relCustomIndexName2 = 'testFollowsFullTextNoLowercase'
## TESTS:
@crud =
'getNodeIndexes': (_) ->
nodeIndexes = db.getNodeIndexes _
# we should always get back an array of names, but the array should
# have map-like properties for the index config details too:
expect(nodeIndexes).to.be.an 'array'
for name in nodeIndexes
expect(nodeIndexes).to.contain.key name
expect(nodeIndexes[name]).to.be.an 'object'
expect(nodeIndexes[name].type).to.be.a 'string'
'getRelationshipIndexes': (_) ->
relIndexes = db.getRelationshipIndexes _
# we should always get back an array of names, but the array should
# have map-like properties for the index config details too:
expect(relIndexes).to.be.an 'array'
for name in relIndexes
expect(relIndexes).to.contain.key name
expect(relIndexes[name]).to.be.an 'object'
expect(relIndexes[name].type).to.be.a 'string'
'createNodeIndex': (_) ->
db.createNodeIndex nodeIndexName, _
# our newly created index should now be in the list of indexes:
nodeIndexes = db.getNodeIndexes _
expect(nodeIndexes).to.contain nodeIndexName
expect(nodeIndexes).to.contain.key nodeIndexName
'createNodeIndex custom fulltext with lowercase': (_) ->
db.createNodeIndex nodeCustomIndexName, indexConfig, _
# our newly created index should now be in the list of indexes:
nodeIndexes = db.getNodeIndexes _
expect(nodeIndexes).to.contain nodeCustomIndexName
expect(nodeIndexes).to.contain.key nodeCustomIndexName
'createNodeIndex custom fulltext with no lowercase': (_) ->
db.createNodeIndex nodeCustomIndexName2, indexConfig, _
# our newly created index should now be in the list of indexes:
nodeIndexes = db.getNodeIndexes _
expect(nodeIndexes).to.contain nodeCustomIndexName2
expect(nodeIndexes).to.contain.key nodeCustomIndexName2
'createRelationshipIndex': (_) ->
db.createRelationshipIndex relIndexName, _
# our newly created index should now be in the list of indexes:
relIndexes = db.getRelationshipIndexes _
expect(relIndexes).to.contain relIndexName
expect(relIndexes).to.contain.key relIndexName
'createRelationshipIndex custom fulltext with lowercase': (_) ->
db.createRelationshipIndex relCustomIndexName, indexConfig, _
# our newly created index should now be in the list of indexes:
relIndexes = db.getRelationshipIndexes _
expect(relIndexes).to.contain relCustomIndexName
expect(relIndexes).to.contain.key relCustomIndexName
'createRelationshipIndex custom fulltext with no lowercase': (_) ->
db.createRelationshipIndex relCustomIndexName2, indexConfig, _
# our newly created index should now be in the list of indexes:
relIndexes = db.getRelationshipIndexes _
expect(relIndexes).to.contain relCustomIndexName2
expect(relIndexes).to.contain.key relCustomIndexName2
'create nodes': (_) ->
daniel = db.createNode danielData
aseem = db.createNode aseemData
mat = db.createNode matData
expect(daniel).to.be.an 'object'
expect(daniel.exists).to.be.false
expect(daniel.self).to.not.exist
# TODO should this really be tested? is @self a public API?
# maybe it should just have a better name than 'self'? like url?
'save nodes': (_) ->
# test futures here by saving both aseem and daniel in parallel:
flows.collect _, [
daniel.save not _
aseem.save not _
mat.save not _
]
expect(daniel.exists).to.be.true
expect(daniel.self).to.be.a 'string' # TODO see above
expect(daniel.self).to.not.equal aseem.self # TODO see above
expect(daniel.data).to.eql danielData
expect(aseem.exists).to.be.true
expect(aseem.self).to.be.a 'string' # TODO see above
expect(aseem.self).to.not.equal daniel.self # TODO see above
expect(aseem.data).to.eql aseemData
'create realtionships': (_) ->
relationship = daniel.createRelationshipTo aseem, 'follows', {created: Date.now()}, _
testRelationship relationship
# in this case, the start and end *should* be our instances
expect(relationship.start).to.eq daniel
expect(relationship.end).to.eq aseem
'serialize & de-serialize nodes': (_) ->
json = JSON.stringify [aseem, daniel]
obj = JSON.parse json, db.reviveJSON
expect(obj).to.be.an 'array'
expect(obj).to.have.length 2
[aseem2, daniel2] = obj
expect(aseem2).to.be.an 'object'
expect(aseem2.data).to.eql aseem.data
expect(daniel2).to.be.an 'object'
expect(daniel2.data).to.eql daniel.data
'serialize & de-serialize relationship': (_) ->
json = JSON.stringify {foo: {bar: relationship}}
obj = JSON.parse json, db.reviveJSON
expect(obj).to.be.an 'object'
expect(obj.foo).to.be.an 'object'
rel2 = obj.foo.bar
expect(rel2).to.be.an 'object'
expect(rel2.data).to.eql relationship.data
'fetch relationships': (_) ->
# test futures by *initiating* getRelationships() for both aseem and daniel in
# parallel. note how we'll still "collect" (process) the futures in sequence.
danielFuture = daniel.getRelationships 'follows', not _
aseemFuture = aseem.getRelationships 'follows', not _
relationships = danielFuture _
testRelationships relationships
# in this case, the start *should* be our instance
expect(relationships[0].start).to.eq daniel
relationships = aseemFuture _
testRelationships relationships
# in this case, the end *should* be our instance
expect(relationships[0].end).to.eq aseem
'traverse nodes': (_) ->
# same parallel lookups using futures:
danielFuture = daniel.getRelationshipNodes 'follows', not _
aseemFuture = aseem.getRelationshipNodes 'follows', not _
nodes = danielFuture _
expect(nodes).to.be.an 'array'
expect(nodes).to.have.length 1
expect(nodes[0]).to.be.an 'object'
expect(nodes[0].exists).to.be.true
expect(nodes[0].self).to.equal aseem.self # TODO see above
expect(nodes[0].data).to.eql aseemData
# TODO see how this is misleading? we don't respect or report direction!
nodes = aseemFuture _
expect(nodes).to.be.an 'array'
expect(nodes).to.have.length 1
expect(nodes[0]).to.be.an 'object'
expect(nodes[0].exists).to.be.true
expect(nodes[0].self).to.equal daniel.self # TODO see above
expect(nodes[0].data).to.eql danielData
'index nodes': (_) ->
daniel.index 'users', 'name', '<NAME>', _
node = db.getIndexedNode 'users', 'name', '<NAME>', _
expect(node).to.be.an 'object'
expect(node.exists).to.be.true
daniel.unindex 'users', 'name', '<NAME>', _ # Delete created node index
# TODO FIXME we're not unindexing these nodes after each test, so in fact the
# returned node and data might be from a previous test!
# expect(node.self).to.equal daniel.self # TODO see above
# expect(node.data).to.eql danielData
# Since fulltext search is using Lucene Query Language we cannot use getIndexedNode, instead we use queryNodeIndex method
'index nodes to custom fulltext index with lowercase': (_) ->
daniel.index nodeCustomIndexName, 'name', '<NAME>', _
nodes = db.queryNodeIndex nodeCustomIndexName, 'name:dan*', _
expect(nodes).to.be.an 'array'
expect(nodes[0].exists).to.be.true
daniel.unindex nodeCustomIndexName, 'name', '<NAME>', _ # Delete created custom node index
'index nodes to custom fulltext index with no lowercase': (_) ->
daniel.index nodeCustomIndexName2, 'name', '<NAME>', _
nodes = db.queryNodeIndex nodeCustomIndexName2, 'name:<NAME>*', _
expect(nodes).to.be.an 'array'
expect(nodes[0].exists).to.be.true
daniel.unindex nodeCustomIndexName2, 'name', '<NAME>', _ # Delete created custom node index
'index relationships': (_) ->
relationship.index 'follows', 'name', '<NAME>', _
rel = db.getIndexedRelationship 'follows', 'name', '<NAME>', _
expect(rel).to.be.an 'object'
expect(rel.exists).to.be.true
expect(rel.self).to.be.a 'string' # TODO see above
expect(rel.type).to.eq 'follows'
relationship.unindex 'follows', 'name', '<NAME>', _ # Delete created relationship index
# Since fulltext search is using Lucene Query Language we cannot use getIndexedRelationship, instead we use queryRelationshipIndex method
# queryRelationshipIndex method was not implemented, so I implemented it for this method to work
# Due to comments of queryNodeIndex method, queryRelationshipIndex was a to-do
'index relationships to custom fulltext index with lowercase': (_) ->
relationship.index relCustomIndexName, 'name', '<NAME>', _
rels = db.queryRelationshipIndex relCustomIndexName, 'name:*niE*', _
expect(rels).to.be.an 'array'
expect(rels[0].exists).to.be.true
expect(rels[0].self).to.be.a 'string'
expect(rels[0].type).to.eq 'follows'
relationship.unindex relCustomIndexName, 'name', '<NAME>', _ # Delete created custom relationship index
'index relationships to custom fulltext index with no lowercase': (_) ->
relationship.index relCustomIndexName2, 'name', '<NAME>', _
rels = db.queryRelationshipIndex relCustomIndexName2, 'name:*nie*', _
expect(rels).to.be.an 'array'
expect(rels[0].exists).to.be.true
expect(rels[0].self).to.be.a 'string'
expect(rels[0].type).to.eq 'follows'
relationship.unindex relCustomIndexName2, 'name', '<NAME>', _ # Delete created custom relationship index
'unindex nodes': (_) ->
mat.index nodeIndexName, 'name', '<NAME>', _
mat.index nodeIndexName, 'name', '<NAME>', _
mat.index nodeIndexName, 'id', '12345', _
# delete entries for the node that match index, key, value
mat.unindex nodeIndexName, 'name', '<NAME>', _
mattNode = db.getIndexedNode nodeIndexName, 'name', '<NAME>', _
matNode = db.getIndexedNode nodeIndexName, 'name', '<NAME>', _
idNode = db.getIndexedNode nodeIndexName, 'id', '12345', _
expect(mattNode).to.not.exist
expect(matNode).to.be.an 'object'
expect(matNode.exists).to.be.true
expect(idNode).to.be.an 'object'
expect(idNode.exists).to.be.true
# delete entries for the node that match index, key
mat.unindex nodeIndexName, 'name', _
mattNode = db.getIndexedNode nodeIndexName, 'name', '<NAME>', _
matNode = db.getIndexedNode nodeIndexName, 'name', '<NAME>', _
idNode = db.getIndexedNode nodeIndexName, 'id', '12345', _
expect(mattNode).to.not.exist
expect(matNode).to.not.exist
expect(idNode).to.be.an 'object'
expect(idNode.exists).to.be.true
# delete entries for the node that match index
mat.unindex nodeIndexName, _
mattNode = db.getIndexedNode nodeIndexName, 'name', '<NAME>', _
matNode = db.getIndexedNode nodeIndexName, 'name', '<NAME>', _
idNode = db.getIndexedNode nodeIndexName, 'id', '12345', _
expect(mattNode).to.not.exist
expect(matNode).to.not.exist
expect(idNode).to.not.exist
'unindex relationships': (_) ->
relationship.index relIndexName, 'name', '<NAME>', _
relationship.index relIndexName, 'name', '<NAME>', _
relationship.index relIndexName, 'id', '12345', _
# delete entries for the relationship that match index, key, value
relationship.unindex relIndexName, 'name', '<NAME>', _
mattRelationship = db.getIndexedRelationship relIndexName, 'name', '<NAME>', _
matRelationship = db.getIndexedRelationship relIndexName, 'name', '<NAME>', _
idRelationship = db.getIndexedRelationship relIndexName, 'id', '12345', _
expect(mattRelationship).to.not.exist
expect(matRelationship).to.be.an 'object'
expect(matRelationship.exists).to.be.true
expect(idRelationship).to.be.an 'object'
expect(idRelationship.exists).to.be.true
# delete entries for the relationship that match index, key
relationship.unindex relIndexName, 'name', _
mattRelationship = db.getIndexedRelationship relIndexName, 'name', '<NAME>', _
matRelationship = db.getIndexedRelationship relIndexName, 'name', '<NAME>', _
idRelationship = db.getIndexedRelationship relIndexName, 'id', '12345', _
expect(mattRelationship).to.not.exist
expect(matRelationship).to.not.exist
expect(idRelationship).to.be.an 'object'
expect(idRelationship.exists).to.be.true
# delete entries for the relationship that match index
relationship.unindex relIndexName, _
mattRelationship = db.getIndexedRelationship relIndexName, 'name', '<NAME>', _
matRelationship = db.getIndexedRelationship relIndexName, 'name', '<NAME>', _
idRelationship = db.getIndexedRelationship relIndexName, 'id', '12345', _
expect(mattRelationship).to.not.exist
expect(matRelationship).to.not.exist
expect(idRelationship).to.not.exist
# TODO test deleting nodes and relationships!
'deleteNodeIndex': (_) ->
db.deleteNodeIndex nodeIndexName, _
# our index should no longer be in the list of indexes:
nodeIndexes = db.getNodeIndexes _
expect(nodeIndexes).to.not.contain nodeIndexName
expect(nodeIndexes).to.not.contain.key nodeIndexName
'deleteRelationshipIndex': (_) ->
db.deleteRelationshipIndex relIndexName, _
# our index should no longer be in the list of indexes:
relIndexes = db.getRelationshipIndexes _
expect(relIndexes).to.not.contain relIndexName
expect(relIndexes).to.not.contain.key relIndexName
## HELPERS:
testRelationship = (relationship) ->
expect(relationship).to.be.an 'object'
expect(relationship.exists).to.be.true
expect(relationship.self).to.be.a 'string' # TODO see above
expect(relationship.type).to.eq 'follows'
# in some cases, the start/end nodes may not be "filled", so these are
# commented out for now:
# expect(relationship.start).to.eql <NAME>
# expect(relationship.end).to.eql a<NAME>
# TEMP so for the time being, we're testing that at least
# their "selves" match. not sure if this is a public API.
expect(relationship.start).to.be.an 'object'
expect(relationship.end).to.be.an 'object'
expect(relationship.start.self).to.equal <NAME>.self
expect(relationship.end.self).to.equal aseem.self
testRelationships = (relationships) ->
expect(relationships).to.be.an 'array'
expect(relationships).to.have.length 1
testRelationship relationships[0]
| true | # this file is in streamline syntax!
# https://github.com/Sage/streamlinejs
{expect} = require 'chai'
flows = require 'streamline/lib/util/flows'
neo4j = require '..'
db = new neo4j.GraphDatabase 'http://localhost:7474'
# data we're going to use:
danielData =
name: 'PI:NAME:<NAME>END_PI'
aseemData =
name: 'PI:NAME:<NAME>END_PI'
matData =
name: 'PI:NAME:<NAME>END_PI'
name2: 'PI:NAME:<NAME>END_PI'
id: '12345'
indexConfig =
type: 'fulltext'
provider: 'lucene'
to_lower_case: 'true'
indexConfig2 =
type: 'fulltext'
to_lower_case: 'false'
# instances we're going to reuse across tests:
PI:NAME:<NAME>END_PI = null
PI:NAME:<NAME>END_PI = null
PI:NAME:<NAME>END_PI = null
relationship = null
# index list
nodeIndexName = 'testUsers'
nodeCustomIndexName = 'testUsersFullTextLowercase'
nodeCustomIndexName2 = 'testUsersFullTextNoLowercase'
relIndexName = 'testFollows'
relCustomIndexName = 'testFollowsFullTextLowercase'
relCustomIndexName2 = 'testFollowsFullTextNoLowercase'
## TESTS:
@crud =
'getNodeIndexes': (_) ->
nodeIndexes = db.getNodeIndexes _
# we should always get back an array of names, but the array should
# have map-like properties for the index config details too:
expect(nodeIndexes).to.be.an 'array'
for name in nodeIndexes
expect(nodeIndexes).to.contain.key name
expect(nodeIndexes[name]).to.be.an 'object'
expect(nodeIndexes[name].type).to.be.a 'string'
'getRelationshipIndexes': (_) ->
relIndexes = db.getRelationshipIndexes _
# we should always get back an array of names, but the array should
# have map-like properties for the index config details too:
expect(relIndexes).to.be.an 'array'
for name in relIndexes
expect(relIndexes).to.contain.key name
expect(relIndexes[name]).to.be.an 'object'
expect(relIndexes[name].type).to.be.a 'string'
'createNodeIndex': (_) ->
db.createNodeIndex nodeIndexName, _
# our newly created index should now be in the list of indexes:
nodeIndexes = db.getNodeIndexes _
expect(nodeIndexes).to.contain nodeIndexName
expect(nodeIndexes).to.contain.key nodeIndexName
'createNodeIndex custom fulltext with lowercase': (_) ->
db.createNodeIndex nodeCustomIndexName, indexConfig, _
# our newly created index should now be in the list of indexes:
nodeIndexes = db.getNodeIndexes _
expect(nodeIndexes).to.contain nodeCustomIndexName
expect(nodeIndexes).to.contain.key nodeCustomIndexName
'createNodeIndex custom fulltext with no lowercase': (_) ->
db.createNodeIndex nodeCustomIndexName2, indexConfig, _
# our newly created index should now be in the list of indexes:
nodeIndexes = db.getNodeIndexes _
expect(nodeIndexes).to.contain nodeCustomIndexName2
expect(nodeIndexes).to.contain.key nodeCustomIndexName2
'createRelationshipIndex': (_) ->
db.createRelationshipIndex relIndexName, _
# our newly created index should now be in the list of indexes:
relIndexes = db.getRelationshipIndexes _
expect(relIndexes).to.contain relIndexName
expect(relIndexes).to.contain.key relIndexName
'createRelationshipIndex custom fulltext with lowercase': (_) ->
db.createRelationshipIndex relCustomIndexName, indexConfig, _
# our newly created index should now be in the list of indexes:
relIndexes = db.getRelationshipIndexes _
expect(relIndexes).to.contain relCustomIndexName
expect(relIndexes).to.contain.key relCustomIndexName
'createRelationshipIndex custom fulltext with no lowercase': (_) ->
db.createRelationshipIndex relCustomIndexName2, indexConfig, _
# our newly created index should now be in the list of indexes:
relIndexes = db.getRelationshipIndexes _
expect(relIndexes).to.contain relCustomIndexName2
expect(relIndexes).to.contain.key relCustomIndexName2
'create nodes': (_) ->
daniel = db.createNode danielData
aseem = db.createNode aseemData
mat = db.createNode matData
expect(daniel).to.be.an 'object'
expect(daniel.exists).to.be.false
expect(daniel.self).to.not.exist
# TODO should this really be tested? is @self a public API?
# maybe it should just have a better name than 'self'? like url?
'save nodes': (_) ->
# test futures here by saving both aseem and daniel in parallel:
flows.collect _, [
daniel.save not _
aseem.save not _
mat.save not _
]
expect(daniel.exists).to.be.true
expect(daniel.self).to.be.a 'string' # TODO see above
expect(daniel.self).to.not.equal aseem.self # TODO see above
expect(daniel.data).to.eql danielData
expect(aseem.exists).to.be.true
expect(aseem.self).to.be.a 'string' # TODO see above
expect(aseem.self).to.not.equal daniel.self # TODO see above
expect(aseem.data).to.eql aseemData
'create realtionships': (_) ->
relationship = daniel.createRelationshipTo aseem, 'follows', {created: Date.now()}, _
testRelationship relationship
# in this case, the start and end *should* be our instances
expect(relationship.start).to.eq daniel
expect(relationship.end).to.eq aseem
'serialize & de-serialize nodes': (_) ->
json = JSON.stringify [aseem, daniel]
obj = JSON.parse json, db.reviveJSON
expect(obj).to.be.an 'array'
expect(obj).to.have.length 2
[aseem2, daniel2] = obj
expect(aseem2).to.be.an 'object'
expect(aseem2.data).to.eql aseem.data
expect(daniel2).to.be.an 'object'
expect(daniel2.data).to.eql daniel.data
'serialize & de-serialize relationship': (_) ->
json = JSON.stringify {foo: {bar: relationship}}
obj = JSON.parse json, db.reviveJSON
expect(obj).to.be.an 'object'
expect(obj.foo).to.be.an 'object'
rel2 = obj.foo.bar
expect(rel2).to.be.an 'object'
expect(rel2.data).to.eql relationship.data
'fetch relationships': (_) ->
# test futures by *initiating* getRelationships() for both aseem and daniel in
# parallel. note how we'll still "collect" (process) the futures in sequence.
danielFuture = daniel.getRelationships 'follows', not _
aseemFuture = aseem.getRelationships 'follows', not _
relationships = danielFuture _
testRelationships relationships
# in this case, the start *should* be our instance
expect(relationships[0].start).to.eq daniel
relationships = aseemFuture _
testRelationships relationships
# in this case, the end *should* be our instance
expect(relationships[0].end).to.eq aseem
'traverse nodes': (_) ->
# same parallel lookups using futures:
danielFuture = daniel.getRelationshipNodes 'follows', not _
aseemFuture = aseem.getRelationshipNodes 'follows', not _
nodes = danielFuture _
expect(nodes).to.be.an 'array'
expect(nodes).to.have.length 1
expect(nodes[0]).to.be.an 'object'
expect(nodes[0].exists).to.be.true
expect(nodes[0].self).to.equal aseem.self # TODO see above
expect(nodes[0].data).to.eql aseemData
# TODO see how this is misleading? we don't respect or report direction!
nodes = aseemFuture _
expect(nodes).to.be.an 'array'
expect(nodes).to.have.length 1
expect(nodes[0]).to.be.an 'object'
expect(nodes[0].exists).to.be.true
expect(nodes[0].self).to.equal daniel.self # TODO see above
expect(nodes[0].data).to.eql danielData
'index nodes': (_) ->
daniel.index 'users', 'name', 'PI:NAME:<NAME>END_PI', _
node = db.getIndexedNode 'users', 'name', 'PI:NAME:<NAME>END_PI', _
expect(node).to.be.an 'object'
expect(node.exists).to.be.true
daniel.unindex 'users', 'name', 'PI:NAME:<NAME>END_PI', _ # Delete created node index
# TODO FIXME we're not unindexing these nodes after each test, so in fact the
# returned node and data might be from a previous test!
# expect(node.self).to.equal daniel.self # TODO see above
# expect(node.data).to.eql danielData
# Since fulltext search is using Lucene Query Language we cannot use getIndexedNode, instead we use queryNodeIndex method
'index nodes to custom fulltext index with lowercase': (_) ->
daniel.index nodeCustomIndexName, 'name', 'PI:NAME:<NAME>END_PI', _
nodes = db.queryNodeIndex nodeCustomIndexName, 'name:dan*', _
expect(nodes).to.be.an 'array'
expect(nodes[0].exists).to.be.true
daniel.unindex nodeCustomIndexName, 'name', 'PI:NAME:<NAME>END_PI', _ # Delete created custom node index
'index nodes to custom fulltext index with no lowercase': (_) ->
daniel.index nodeCustomIndexName2, 'name', 'PI:NAME:<NAME>END_PI', _
nodes = db.queryNodeIndex nodeCustomIndexName2, 'name:PI:NAME:<NAME>END_PI*', _
expect(nodes).to.be.an 'array'
expect(nodes[0].exists).to.be.true
daniel.unindex nodeCustomIndexName2, 'name', 'PI:NAME:<NAME>END_PI', _ # Delete created custom node index
'index relationships': (_) ->
relationship.index 'follows', 'name', 'PI:NAME:<NAME>END_PI', _
rel = db.getIndexedRelationship 'follows', 'name', 'PI:NAME:<NAME>END_PI', _
expect(rel).to.be.an 'object'
expect(rel.exists).to.be.true
expect(rel.self).to.be.a 'string' # TODO see above
expect(rel.type).to.eq 'follows'
relationship.unindex 'follows', 'name', 'PI:NAME:<NAME>END_PI', _ # Delete created relationship index
# Since fulltext search is using Lucene Query Language we cannot use getIndexedRelationship, instead we use queryRelationshipIndex method
# queryRelationshipIndex method was not implemented, so I implemented it for this method to work
# Due to comments of queryNodeIndex method, queryRelationshipIndex was a to-do
'index relationships to custom fulltext index with lowercase': (_) ->
relationship.index relCustomIndexName, 'name', 'PI:NAME:<NAME>END_PI', _
rels = db.queryRelationshipIndex relCustomIndexName, 'name:*niE*', _
expect(rels).to.be.an 'array'
expect(rels[0].exists).to.be.true
expect(rels[0].self).to.be.a 'string'
expect(rels[0].type).to.eq 'follows'
relationship.unindex relCustomIndexName, 'name', 'PI:NAME:<NAME>END_PI', _ # Delete created custom relationship index
'index relationships to custom fulltext index with no lowercase': (_) ->
relationship.index relCustomIndexName2, 'name', 'PI:NAME:<NAME>END_PI', _
rels = db.queryRelationshipIndex relCustomIndexName2, 'name:*nie*', _
expect(rels).to.be.an 'array'
expect(rels[0].exists).to.be.true
expect(rels[0].self).to.be.a 'string'
expect(rels[0].type).to.eq 'follows'
relationship.unindex relCustomIndexName2, 'name', 'PI:NAME:<NAME>END_PI', _ # Delete created custom relationship index
'unindex nodes': (_) ->
mat.index nodeIndexName, 'name', 'PI:NAME:<NAME>END_PI', _
mat.index nodeIndexName, 'name', 'PI:NAME:<NAME>END_PI', _
mat.index nodeIndexName, 'id', '12345', _
# delete entries for the node that match index, key, value
mat.unindex nodeIndexName, 'name', 'PI:NAME:<NAME>END_PI', _
mattNode = db.getIndexedNode nodeIndexName, 'name', 'PI:NAME:<NAME>END_PI', _
matNode = db.getIndexedNode nodeIndexName, 'name', 'PI:NAME:<NAME>END_PI', _
idNode = db.getIndexedNode nodeIndexName, 'id', '12345', _
expect(mattNode).to.not.exist
expect(matNode).to.be.an 'object'
expect(matNode.exists).to.be.true
expect(idNode).to.be.an 'object'
expect(idNode.exists).to.be.true
# delete entries for the node that match index, key
mat.unindex nodeIndexName, 'name', _
mattNode = db.getIndexedNode nodeIndexName, 'name', 'PI:NAME:<NAME>END_PI', _
matNode = db.getIndexedNode nodeIndexName, 'name', 'PI:NAME:<NAME>END_PI', _
idNode = db.getIndexedNode nodeIndexName, 'id', '12345', _
expect(mattNode).to.not.exist
expect(matNode).to.not.exist
expect(idNode).to.be.an 'object'
expect(idNode.exists).to.be.true
# delete entries for the node that match index
mat.unindex nodeIndexName, _
mattNode = db.getIndexedNode nodeIndexName, 'name', 'PI:NAME:<NAME>END_PI', _
matNode = db.getIndexedNode nodeIndexName, 'name', 'PI:NAME:<NAME>END_PI', _
idNode = db.getIndexedNode nodeIndexName, 'id', '12345', _
expect(mattNode).to.not.exist
expect(matNode).to.not.exist
expect(idNode).to.not.exist
'unindex relationships': (_) ->
relationship.index relIndexName, 'name', 'PI:NAME:<NAME>END_PI', _
relationship.index relIndexName, 'name', 'PI:NAME:<NAME>END_PI', _
relationship.index relIndexName, 'id', '12345', _
# delete entries for the relationship that match index, key, value
relationship.unindex relIndexName, 'name', 'PI:NAME:<NAME>END_PI', _
mattRelationship = db.getIndexedRelationship relIndexName, 'name', 'PI:NAME:<NAME>END_PI', _
matRelationship = db.getIndexedRelationship relIndexName, 'name', 'PI:NAME:<NAME>END_PI', _
idRelationship = db.getIndexedRelationship relIndexName, 'id', '12345', _
expect(mattRelationship).to.not.exist
expect(matRelationship).to.be.an 'object'
expect(matRelationship.exists).to.be.true
expect(idRelationship).to.be.an 'object'
expect(idRelationship.exists).to.be.true
# delete entries for the relationship that match index, key
relationship.unindex relIndexName, 'name', _
mattRelationship = db.getIndexedRelationship relIndexName, 'name', 'PI:NAME:<NAME>END_PI', _
matRelationship = db.getIndexedRelationship relIndexName, 'name', 'PI:NAME:<NAME>END_PI', _
idRelationship = db.getIndexedRelationship relIndexName, 'id', '12345', _
expect(mattRelationship).to.not.exist
expect(matRelationship).to.not.exist
expect(idRelationship).to.be.an 'object'
expect(idRelationship.exists).to.be.true
# delete entries for the relationship that match index
relationship.unindex relIndexName, _
mattRelationship = db.getIndexedRelationship relIndexName, 'name', 'PI:NAME:<NAME>END_PI', _
matRelationship = db.getIndexedRelationship relIndexName, 'name', 'PI:NAME:<NAME>END_PI', _
idRelationship = db.getIndexedRelationship relIndexName, 'id', '12345', _
expect(mattRelationship).to.not.exist
expect(matRelationship).to.not.exist
expect(idRelationship).to.not.exist
# TODO test deleting nodes and relationships!
'deleteNodeIndex': (_) ->
db.deleteNodeIndex nodeIndexName, _
# our index should no longer be in the list of indexes:
nodeIndexes = db.getNodeIndexes _
expect(nodeIndexes).to.not.contain nodeIndexName
expect(nodeIndexes).to.not.contain.key nodeIndexName
'deleteRelationshipIndex': (_) ->
db.deleteRelationshipIndex relIndexName, _
# our index should no longer be in the list of indexes:
relIndexes = db.getRelationshipIndexes _
expect(relIndexes).to.not.contain relIndexName
expect(relIndexes).to.not.contain.key relIndexName
## HELPERS:
testRelationship = (relationship) ->
expect(relationship).to.be.an 'object'
expect(relationship.exists).to.be.true
expect(relationship.self).to.be.a 'string' # TODO see above
expect(relationship.type).to.eq 'follows'
# in some cases, the start/end nodes may not be "filled", so these are
# commented out for now:
# expect(relationship.start).to.eql PI:NAME:<NAME>END_PI
# expect(relationship.end).to.eql aPI:NAME:<NAME>END_PI
# TEMP so for the time being, we're testing that at least
# their "selves" match. not sure if this is a public API.
expect(relationship.start).to.be.an 'object'
expect(relationship.end).to.be.an 'object'
expect(relationship.start.self).to.equal PI:NAME:<NAME>END_PI.self
expect(relationship.end.self).to.equal aseem.self
testRelationships = (relationships) ->
expect(relationships).to.be.an 'array'
expect(relationships).to.have.length 1
testRelationship relationships[0]
|
[
{
"context": "jector](http://neocotic.com/injector)\n#\n# (c) 2014 Alasdair Mercer\n#\n# Freely distributable under the MIT license\n\n#",
"end": 71,
"score": 0.9998696446418762,
"start": 56,
"tag": "NAME",
"value": "Alasdair Mercer"
}
] | src/coffee/options.coffee | SlinkySalmon633/injector-chrome | 33 | # [Injector](http://neocotic.com/injector)
#
# (c) 2014 Alasdair Mercer
#
# Freely distributable under the MIT license
# Extract any models and collections that are required by the options page.
{Snippet} = models
# Feedback
# --------
# Indicate whether the user feedback feature has been added to the page.
feedbackAdded = no
# Add the user feedback feature to the page using the `options` provided.
loadFeedback = (options) ->
# Only load and configure the feedback widget once.
return if feedbackAdded
# Create a script element to load the UserVoice widget.
uv = document.createElement('script')
uv.async = yes
uv.src = "https://widget.uservoice.com/#{options.id}.js"
# Insert the script element into the DOM.
script = document.querySelector('script')
script.parentNode.insertBefore(uv, script)
# Configure the widget as it's loading.
UserVoice = window.UserVoice or= []
UserVoice.push [
'showTab'
'classic_widget'
{
mode: 'full'
primary_color: '#333'
link_color: '#08c'
default_mode: 'feedback'
forum_id: options.forum
tab_label: i18n.get('feedback_button')
tab_color: '#333'
tab_position: 'middle-right'
tab_inverted: yes
}
]
# Ensure that the widget isn't loaded again.
feedbackAdded = yes
# Editor
# ------
# View containing buttons for saving/resetting the code of the selected snippet from the contents
# of the Ace editor.
EditorControls = Injector.View.extend {
# Overlay the editor controls on top of this element.
el: '#editor_controls'
# Register DOM events for the editor controls
events:
'click #reset_button:not(:disabled)': 'reset'
'click #save_button:not(:disabled)': 'save'
# Render the editor controls.
render: ->
@update()
@
# Reset the Ace editor so that it is empty.
#
# Nothing happens if there is no snippet selected.
reset: ->
return unless @hasModel()
{ace} = @options
ace.setValue(@model.get('code'))
ace.gotoLine(0)
# Save the contents of the Ace editor as the snippet code.
save: ->
return unless @hasModel()
$button = $('#save_button')
code = @options.ace.getValue()
$button.button('loading').delay(500)
@model.save({ code })
.then =>
@model.trigger('modified', no, code)
analytics.track('Snippet', 'Changed', 'Code')
$button.queue ->
$button.button('reset').dequeue()
# Update the state of the editor controls.
update: (@model) ->
$buttons = @$('#reset_button, #save_button')
# Ensure that specific buttons are only enabled when a snippet is selected.
$buttons.prop('disabled', not @hasModel())
}
# A selection of available modes/languages that are supported by this extension for injecting
# snippets.
EditorModes = Injector.View.extend {
# Overlay the editor modes on top of this element.
el: '#editor_modes'
# Template for mode group option groups.
groupTemplate: _.template """
<optgroup label="<%- ctx.label %>"></optgroup>
"""
# Template for mode options.
modeTemplate: _.template """
<option value="<%- ctx.value %>"><%= ctx.html %></option>
"""
# Register DOM events for the editor modes.
events:
'change': 'save'
# Render the editor modes.
render: ->
for name, modes of Snippet.modeGroups
do (name, modes) =>
$group = $ @groupTemplate {
label: i18n.get("editor_mode_group_#{name}")
}
for mode in modes
$group.append @modeTemplate {
html: i18n.get("editor_mode_#{mode}")
value: mode
}
@$el.append($group)
@update()
@
# Save the selected mode as the snippet mode.
save: ->
mode = @$el.val()
@options.ace.getSession().setMode("ace/mode/#{mode}")
if @hasModel()
analytics.track('Snippet', 'Changed', 'Mode')
@model.save({ mode })
# Update the state of the editor modes.
update: (@model) ->
mode = @model?.get('mode')
mode or= Snippet.defaultMode
@$el.prop('disabled', not @hasModel())
@$("option[value='#{mode}']").prop('selected', yes)
@save()
}
# View containing options that allow the user to configure the Ace editor.
EditorSettingsView = Injector.View.extend {
# Overlay the editor settings on top of this element.
el: '#editor_settings'
# Template for setting options.
template: _.template """
<option value="<%- ctx.value %>"><%= ctx.html %></option>
"""
# Register DOM events for the editor settings.
events:
'change #editor_indent_size': 'update'
'change #editor_line_wrap': 'update'
'change #editor_soft_tabs': 'update'
'change #editor_theme': 'update'
'click .modal-footer .btn-warning': 'restoreDefaults'
# Initialize the editor settings.
init: ->
$sizes = @$('#editor_indent_size')
$themes = @$('#editor_theme')
for size in page.config.editor.indentSizes
$sizes.append @template {
html: size
value: size
}
for theme in page.config.editor.themes
$themes.append @template {
html: i18n.get("editor_theme_#{theme}")
value: theme
}
@listenTo(@model, 'change', @captureAnalytics)
@listenTo(@model, 'change', @render)
# Capture the analytics for any changed model attributes.
captureAnalytics: ->
attrs = @model.changedAttributes() or {}
analytics.track('Editor', 'Changed', attr) for attr of attrs
# Render the editor settings.
render: ->
indentSize = @model.get('indentSize')
lineWrap = @model.get('lineWrap')
softTabs = @model.get('softTabs')
theme = @model.get('theme')
@$('#editor_indent_size').val("#{indentSize}")
@$('#editor_line_wrap').val("#{lineWrap}")
@$('#editor_soft_tabs').val("#{softTabs}")
@$('#editor_theme').val(theme)
@
# Restore the attributes of underlying model to their default values.
restoreDefaults: ->
@model.restoreDefaults()
@model.save()
# Update the state of the editor settings.
update: ->
$indentSize = @$('#editor_indent_size')
$lineWrap = @$('#editor_line_wrap')
$softTabs = @$('#editor_soft_tabs')
$theme = @$('#editor_theme')
@model.save
indentSize: parseInt($indentSize.val(), 0)
lineWrap: $lineWrap.val() is 'true'
softTabs: $softTabs.val() is 'true'
theme: $theme.val()
}
# Contains the Ace editor that allows the user to modify a snippet's code.
EditorView = Injector.View.extend {
# Overlay the editor on top of this element.
el: '#editor'
# Initialize the editor.
init: ->
@ace = ace.edit('editor')
@ace.setReadOnly(not @hasModel())
@ace.setShowPrintMargin(no)
@ace.getSession().on 'change', =>
@model.trigger('modified', @hasUnsavedChanges(), @ace.getValue()) if @hasModel()
@ace.commands.addCommand({
name: 'save'
bindKey: {
mac: 'Command-S'
win: 'Ctrl-S'
}
readOnly: no
exec: =>
@controls.save()
})
@settings = new EditorSettingsView({ model: @options.settings })
@controls = new EditorControls({ @ace })
@modes = new EditorModes({ @ace })
@listenTo(@options.settings, 'change', @updateEditor)
@updateEditor()
# Determine whether or not the contents of the Ace editor is different from the snippet code.
hasUnsavedChanges: ->
@ace.getValue() isnt @model?.get('code')
# Render the editor.
render: ->
@settings.render()
@controls.render()
@modes.render()
@
# Update the state of the editor.
update: (@model) ->
@ace.setReadOnly(not @hasModel())
@ace.setValue(@model?.get('code') or '')
@ace.gotoLine(0)
@settings.update(@model)
@controls.update(@model)
@modes.update(@model)
# Update the Ace editor with the selected options.
updateEditor: ->
{settings} = @options
aceSession = @ace.getSession()
aceSession.setUseWrapMode(settings.get('lineWrap'))
aceSession.setUseSoftTabs(settings.get('softTabs'))
aceSession.setTabSize(settings.get('indentSize'))
@ace.setTheme("ace/theme/#{settings.get('theme')}")
}
# Settings
# --------
# Allows the user to modify the general settings of the extension.
GeneralSettingsView = Injector.View.extend {
# Overlay the general settings on top of this element.
el: '#general_tab'
# Register DOM events for the general settings.
events:
'change #analytics': 'save'
# Initialize the general settings.
init: ->
@listenTo(@model, 'change:analytics', @render)
@listenTo(@model, 'change:analytics', @updateAnalytics)
@updateAnalytics()
# Render the general settings.
render: ->
@$('#analytics').prop('checked', @model.get('analytics'))
@
# Save the settings.
save: ->
$analytics = @$('#analytics')
@model.save({ analytics: $analytics.is(':checked') })
# Add or remove analytics from the page depending on settings.
updateAnalytics: ->
if @model.get('analytics')
analytics.add(page.config.analytics)
analytics.track('General', 'Changed', 'Analytics', 1)
else
analytics.track('General', 'Changed', 'Analytics', 0)
analytics.remove()
}
# Parent view for all configurable settings.
SettingsView = Injector.View.extend {
# Overlay the settings on top of this element.
el: 'body'
# Initialize the settings.
init: ->
@general = new GeneralSettingsView({ @model })
# Render the settings.
render: ->
@general.render()
@
}
# Snippets
# --------
# View contains buttons used to control/manage the user's snippets.
SnippetControls = Injector.View.extend {
# Overlay the snippet controls on top of this element.
el: '#snippets_controls'
# Register DOM events for the snippet controls.
events:
'click #delete_menu .js-resolve': 'removeSnippet'
'hide.bs.modal .modal': 'resetHost'
'show.bs.modal #snippet_clone_modal, #snippet_edit_modal': 'insertHost'
'shown.bs.modal .modal': 'focusHost'
'submit #snippet_add_form': 'addSnippet'
'submit #snippet_clone_form': 'cloneSnippet'
'submit #snippet_edit_form': 'editSnippet'
# Handle the form submission to add a new snippet.
addSnippet: (event) ->
@submitSnippet(event, 'add')
# Grant focus to the host field within the originating modal dialog.
focusHost: (event) ->
$modal = $(event.currentTarget)
$modal.find('form :text').focus()
# Handle the form submission to clone an existing snippet.
cloneSnippet: (event) ->
@submitSnippet(event, 'clone')
# Handle the form submission to edit an existing snippet.
editSnippet: (event) ->
@submitSnippet(event, 'edit')
# Insert the host attribute of the selected snippet in to the field within the originating modal
# dialog.
insertHost: (event) ->
$modal = $(event.currentTarget)
$modal.find('form :text').val(@model.get('host'))
# Deselect and destroy the active snippet.
removeSnippet: ->
return unless @hasModel()
{model} = @
model.deselect().done ->
model.destroy()
# Reset the host field within the originating modal dialog.
resetHost: (event) ->
$modal = $(event.currentTarget)
$modal.find('form :text').val('')
# Handle the form submission to determine how the input should be stored based on the `action`.
submitSnippet: (event, action) ->
$form = $(event.currentTarget)
$group = $form.find('.form-group:first')
$modal = $form.closest('.modal')
host = $group.find(':text').val().replace(/\s+/g, '')
unless host
$group.addClass('has-error')
else
$group.removeClass('has-error')
$modal.modal('hide')
if action is 'edit'
@model.save({ host })
.done ->
analytics.track('Snippet', 'Renamed', host)
page.snippets.list.sort()
else
base = if action is 'clone' then @model else new Snippet()
@collection.create {
host
code: base.get('code') or ''
mode: base.get('mode') or Snippet.defaultMode
}, success: (model) ->
if action is 'clone'
analytics.track('Snippet', 'Cloned', base.get('host'))
else
analytics.track('Snippet', 'Created', host)
model.select().done ->
page.snippets.list.sort().showSelected()
false
# Update the state of the snippet controls.
update: (@model) ->
$modelButtons = @$('#clone_button, #edit_button, #delete_menu .btn')
$modelButtons.prop('disabled', not @hasModel())
}
# Menu item which, when selected, enables the user to manage and modify the code of the underlying
# snippet.
SnippetItem = Injector.View.extend {
# Tag name for the element to be created for the snippet item.
tagName: 'li'
# Prevent `activateTooltips` from interfering with the tooltip of the snippet item.
className: 'js-tooltip-ignore'
# Template for the snippet item.
mainTemplate: _.template """
<a>
<span><%= ctx.host %></span>
</a>
"""
# Template for the tooltip of the snippet item.
tooltipTemplate: _.template """
<div class="snippet-tooltip">
<span class="snippet-tooltip-host"><%= ctx.host %></span>
<span class="snippet-tooltip-mode"><%= i18n.get('editor_mode_' + ctx.mode) %></span>
</div>
"""
# Register DOM events for the snippet item.
events:
'click a': 'updateSelection'
# Initialize the snippet item.
init: ->
@listenTo(@model, 'destroy', @remove)
@listenTo(@model, 'modified', @modified)
@listenTo(@model, 'change:host change:selected', @render)
@listenTo(@model, 'change:host change:mode', @updateTooltip)
@updateTooltip()
# Highlight that the snippet code has been modified in the editor.
modified: (changed) ->
if changed
@$el.addClass('modified')
else
@$el.removeClass('modified')
# Override `remove` to ensure that the tooltip is properly destroyed upon removal.
remove: ->
@$el.tooltip('destroy')
@super('remove')
# Render the snippet item.
render: ->
@$el.html(@mainTemplate(@model.pick('host')))
if @model.get('selected')
@$el.addClass('active')
else
@$el.removeClass('active modified')
@
# Update the selected state of the snippet depending on the given `event`.
updateSelection: (event) ->
if event.ctrlKey or event.metaKey and /^mac/i.test(navigator.platform)
@model.deselect()
else unless @$el.hasClass('active')
@model.select()
# Update the tooltip for the snippet item, destroying any previous tooltip in the process.
updateTooltip: ->
@$el
.tooltip('destroy')
.tooltip {
container: 'body'
html: yes
title: @tooltipTemplate(@model.pick('host', 'mode'))
}
}
# A menu of snippets that allows the user to easily manage them.
SnippetsList = Injector.View.extend {
# Overlay the snippets list on top of this element.
el: '#snippets_list'
# Create and add a `SnippetItem` for the specified `model`.
addItem: (model) ->
item = new SnippetItem({ model })
@items.push(item)
@$el.append(item.render().$el)
item
# Initialize the snippets list.
init: ->
@items = []
@listenTo(@collection, 'add', @addItem)
@listenTo(@collection, 'reset', @resetItems)
# Override `remove` to ensure that managed sub-views are removed as well.
remove: ->
@removeItems()
@super('remove')
# Remove all managed sub-views.
removeItems: ->
@items.shift().remove() while @items.length > 0
# Render the snippets list.
render: ->
@resetItems()
@
# Remove any existing managed sub-views before creating and adding new `SnippetItem` views for
# each snippet model in the collection.
resetItems: ->
@removeItems()
@collection.each(@addItem, @)
# Scroll to the selected snippet in the list.
showSelected: ->
$selectedItem = @$('li.active')
@$el.scrollTop($selectedItem.offset().top - @$el.offset().top)
@
# Detach each snippet item in the list and sort them based on their text contents before
# re-appending them.
sort: ->
@$el.append(_.sortBy(@$('li').detach(), 'textContent'))
@
}
# The primary view for managing snippets.
SnippetsView = Injector.View.extend {
# Overlay the snippets on top of this element.
el: '#snippets_tab'
# Initialize the snippets.
init: ->
@controls = new SnippetControls({ @collection })
@list = new SnippetsList({ @collection })
# Render the snippets.
render: ->
@controls.render()
@list.render()
@
# Update the state of the snippets.
update: (model) ->
@controls.update(model)
}
# Miscellaneous
# -------------
# Activate tooltip effects, optionally only within a specific context.
activateTooltips = (selector) ->
base = $(selector or document)
# Reset all previously treated tooltips.
base.find('[data-original-title]:not(.js-tooltip-ignore)')
.each ->
$this = $(@)
$this
.tooltip('destroy')
.attr('title', $this.attr 'data-original-title')
.removeAttr('data-original-title')
# Apply tooltips to all relevant elements.
base.find('[title]:not(.js-tooltip-ignore)')
.each ->
$this = $(@)
$this.tooltip {
container: $this.attr('data-container') or 'body'
placement: $this.attr('data-placement') or 'top'
}
# Options page setup
# ------------------
# Responsible for managing the options page.
class OptionsPage
# The current version of the extension.
#
# This will be updated with the actual value during the page's initialization.
version: ''
# Create a new instance of `OptionsPage`.
constructor: ->
@config = {}
# Public functions
# ----------------
# Initialize the options page.
#
# This will involve inserting and configuring the UI elements as well as loading the current
# settings.
init: ->
# It's nice knowing what version is running.
{@version} = chrome.runtime.getManifest()
# Load the configuration data from the file before storing it locally.
chrome.runtime.sendMessage { type: 'config' }, (@config) =>
# Map the mode groups now to save the configuration data from being loaded again by
# `Snippets.fetch`.
Snippet.mapModeGroups(@config.editor.modeGroups)
# Add the user feedback feature to the page.
loadFeedback(@config.options.userVoice)
# Begin initialization.
i18n.traverse()
# Retrieve all singleton instances as well as the collection for user-created snippets.
models.fetch (result) =>
{settings, editorSettings, snippets} = result
# Create views for the important models and collections.
@editor = new EditorView({ settings: editorSettings })
@settings = new SettingsView({ model: settings })
@snippets = new SnippetsView({ collection: snippets })
# Render these new views.
@editor.render()
@settings.render()
@snippets.render()
# Ensure that views are updated accordingly when snippets are selected/deselected.
snippets.on 'selected deselected', (snippet) =>
if snippet.get('selected') then @update(snippet) else @update()
selectedSnippet = snippets.findWhere({ selected: yes })
@update(selectedSnippet) if selectedSnippet
# Ensure the current year is displayed throughout, where appropriate.
$('.js-insert-year').html("#{new Date().getFullYear()}")
# Bind tab selection event to all tabs.
initialSnippetDisplay = initialTabChange = yes
$('a[data-tabify]').on 'click', ->
target = $(@).data('tabify')
$nav = $("header.navbar .nav a[data-tabify='#{target}']")
$parent = $nav.parent('li')
unless $parent.hasClass('active')
$parent.addClass('active').siblings().removeClass('active')
$(target).removeClass('hide').siblings('.tab').addClass('hide')
id = $nav.attr('id')
settings.save({ tab: id })
.then ->
unless initialTabChange
analytics.track('Tabs', 'Changed', _.capitalize(id.match(/(\S*)_nav$/)[1]))
if id is 'snippets' and initialSnippetDisplay
initialSnippetDisplay = no
page.snippets.list.showSelected()
initialTabChange = no
$(document.body).scrollTop(0)
# Reflect the previously persisted tab initially.
$("##{settings.get 'tab'}").trigger('click')
# Ensure that form submissions don't reload the page.
$('form.js-no-submit').on 'submit', -> false
# Support *goto* navigation elements that change the current scroll position when clicked.
$('[data-goto]').on 'click', ->
switch $(@).data('goto')
when 'top' then $(document.body).scrollTop(0)
# Bind analytical tracking events to key footer buttons and links.
$('footer a[href*="neocotic.com"]').on 'click', ->
analytics.track('Footer', 'Clicked', 'Homepage')
# Setup and configure donation buttons.
$('#donation input[name="hosted_button_id"]').val(@config.options.payPal)
$('.js-donate').on 'click', ->
$(@).tooltip('hide')
$('#donation').submit()
analytics.track('Donate', 'Clicked')
activateTooltips()
# Update the primary views with the selected `snippet` provided.
update: (snippet) ->
@editor.update(snippet)
@snippets.update(snippet)
# Create an global instance of `OptionsPage` and initialize it once the DOM is ready.
page = window.page = new OptionsPage()
$ -> page.init()
| 122776 | # [Injector](http://neocotic.com/injector)
#
# (c) 2014 <NAME>
#
# Freely distributable under the MIT license
# Extract any models and collections that are required by the options page.
{Snippet} = models
# Feedback
# --------
# Indicate whether the user feedback feature has been added to the page.
feedbackAdded = no
# Add the user feedback feature to the page using the `options` provided.
loadFeedback = (options) ->
# Only load and configure the feedback widget once.
return if feedbackAdded
# Create a script element to load the UserVoice widget.
uv = document.createElement('script')
uv.async = yes
uv.src = "https://widget.uservoice.com/#{options.id}.js"
# Insert the script element into the DOM.
script = document.querySelector('script')
script.parentNode.insertBefore(uv, script)
# Configure the widget as it's loading.
UserVoice = window.UserVoice or= []
UserVoice.push [
'showTab'
'classic_widget'
{
mode: 'full'
primary_color: '#333'
link_color: '#08c'
default_mode: 'feedback'
forum_id: options.forum
tab_label: i18n.get('feedback_button')
tab_color: '#333'
tab_position: 'middle-right'
tab_inverted: yes
}
]
# Ensure that the widget isn't loaded again.
feedbackAdded = yes
# Editor
# ------
# View containing buttons for saving/resetting the code of the selected snippet from the contents
# of the Ace editor.
EditorControls = Injector.View.extend {
# Overlay the editor controls on top of this element.
el: '#editor_controls'
# Register DOM events for the editor controls
events:
'click #reset_button:not(:disabled)': 'reset'
'click #save_button:not(:disabled)': 'save'
# Render the editor controls.
render: ->
@update()
@
# Reset the Ace editor so that it is empty.
#
# Nothing happens if there is no snippet selected.
reset: ->
return unless @hasModel()
{ace} = @options
ace.setValue(@model.get('code'))
ace.gotoLine(0)
# Save the contents of the Ace editor as the snippet code.
save: ->
return unless @hasModel()
$button = $('#save_button')
code = @options.ace.getValue()
$button.button('loading').delay(500)
@model.save({ code })
.then =>
@model.trigger('modified', no, code)
analytics.track('Snippet', 'Changed', 'Code')
$button.queue ->
$button.button('reset').dequeue()
# Update the state of the editor controls.
update: (@model) ->
$buttons = @$('#reset_button, #save_button')
# Ensure that specific buttons are only enabled when a snippet is selected.
$buttons.prop('disabled', not @hasModel())
}
# A selection of available modes/languages that are supported by this extension for injecting
# snippets.
EditorModes = Injector.View.extend {
# Overlay the editor modes on top of this element.
el: '#editor_modes'
# Template for mode group option groups.
groupTemplate: _.template """
<optgroup label="<%- ctx.label %>"></optgroup>
"""
# Template for mode options.
modeTemplate: _.template """
<option value="<%- ctx.value %>"><%= ctx.html %></option>
"""
# Register DOM events for the editor modes.
events:
'change': 'save'
# Render the editor modes.
render: ->
for name, modes of Snippet.modeGroups
do (name, modes) =>
$group = $ @groupTemplate {
label: i18n.get("editor_mode_group_#{name}")
}
for mode in modes
$group.append @modeTemplate {
html: i18n.get("editor_mode_#{mode}")
value: mode
}
@$el.append($group)
@update()
@
# Save the selected mode as the snippet mode.
save: ->
mode = @$el.val()
@options.ace.getSession().setMode("ace/mode/#{mode}")
if @hasModel()
analytics.track('Snippet', 'Changed', 'Mode')
@model.save({ mode })
# Update the state of the editor modes.
update: (@model) ->
mode = @model?.get('mode')
mode or= Snippet.defaultMode
@$el.prop('disabled', not @hasModel())
@$("option[value='#{mode}']").prop('selected', yes)
@save()
}
# View containing options that allow the user to configure the Ace editor.
EditorSettingsView = Injector.View.extend {
# Overlay the editor settings on top of this element.
el: '#editor_settings'
# Template for setting options.
template: _.template """
<option value="<%- ctx.value %>"><%= ctx.html %></option>
"""
# Register DOM events for the editor settings.
events:
'change #editor_indent_size': 'update'
'change #editor_line_wrap': 'update'
'change #editor_soft_tabs': 'update'
'change #editor_theme': 'update'
'click .modal-footer .btn-warning': 'restoreDefaults'
# Initialize the editor settings.
init: ->
$sizes = @$('#editor_indent_size')
$themes = @$('#editor_theme')
for size in page.config.editor.indentSizes
$sizes.append @template {
html: size
value: size
}
for theme in page.config.editor.themes
$themes.append @template {
html: i18n.get("editor_theme_#{theme}")
value: theme
}
@listenTo(@model, 'change', @captureAnalytics)
@listenTo(@model, 'change', @render)
# Capture the analytics for any changed model attributes.
captureAnalytics: ->
attrs = @model.changedAttributes() or {}
analytics.track('Editor', 'Changed', attr) for attr of attrs
# Render the editor settings.
render: ->
indentSize = @model.get('indentSize')
lineWrap = @model.get('lineWrap')
softTabs = @model.get('softTabs')
theme = @model.get('theme')
@$('#editor_indent_size').val("#{indentSize}")
@$('#editor_line_wrap').val("#{lineWrap}")
@$('#editor_soft_tabs').val("#{softTabs}")
@$('#editor_theme').val(theme)
@
# Restore the attributes of underlying model to their default values.
restoreDefaults: ->
@model.restoreDefaults()
@model.save()
# Update the state of the editor settings.
update: ->
$indentSize = @$('#editor_indent_size')
$lineWrap = @$('#editor_line_wrap')
$softTabs = @$('#editor_soft_tabs')
$theme = @$('#editor_theme')
@model.save
indentSize: parseInt($indentSize.val(), 0)
lineWrap: $lineWrap.val() is 'true'
softTabs: $softTabs.val() is 'true'
theme: $theme.val()
}
# Contains the Ace editor that allows the user to modify a snippet's code.
EditorView = Injector.View.extend {
# Overlay the editor on top of this element.
el: '#editor'
# Initialize the editor.
init: ->
@ace = ace.edit('editor')
@ace.setReadOnly(not @hasModel())
@ace.setShowPrintMargin(no)
@ace.getSession().on 'change', =>
@model.trigger('modified', @hasUnsavedChanges(), @ace.getValue()) if @hasModel()
@ace.commands.addCommand({
name: 'save'
bindKey: {
mac: 'Command-S'
win: 'Ctrl-S'
}
readOnly: no
exec: =>
@controls.save()
})
@settings = new EditorSettingsView({ model: @options.settings })
@controls = new EditorControls({ @ace })
@modes = new EditorModes({ @ace })
@listenTo(@options.settings, 'change', @updateEditor)
@updateEditor()
# Determine whether or not the contents of the Ace editor is different from the snippet code.
hasUnsavedChanges: ->
@ace.getValue() isnt @model?.get('code')
# Render the editor.
render: ->
@settings.render()
@controls.render()
@modes.render()
@
# Update the state of the editor.
update: (@model) ->
@ace.setReadOnly(not @hasModel())
@ace.setValue(@model?.get('code') or '')
@ace.gotoLine(0)
@settings.update(@model)
@controls.update(@model)
@modes.update(@model)
# Update the Ace editor with the selected options.
updateEditor: ->
{settings} = @options
aceSession = @ace.getSession()
aceSession.setUseWrapMode(settings.get('lineWrap'))
aceSession.setUseSoftTabs(settings.get('softTabs'))
aceSession.setTabSize(settings.get('indentSize'))
@ace.setTheme("ace/theme/#{settings.get('theme')}")
}
# Settings
# --------
# Allows the user to modify the general settings of the extension.
GeneralSettingsView = Injector.View.extend {
# Overlay the general settings on top of this element.
el: '#general_tab'
# Register DOM events for the general settings.
events:
'change #analytics': 'save'
# Initialize the general settings.
init: ->
@listenTo(@model, 'change:analytics', @render)
@listenTo(@model, 'change:analytics', @updateAnalytics)
@updateAnalytics()
# Render the general settings.
render: ->
@$('#analytics').prop('checked', @model.get('analytics'))
@
# Save the settings.
save: ->
$analytics = @$('#analytics')
@model.save({ analytics: $analytics.is(':checked') })
# Add or remove analytics from the page depending on settings.
updateAnalytics: ->
if @model.get('analytics')
analytics.add(page.config.analytics)
analytics.track('General', 'Changed', 'Analytics', 1)
else
analytics.track('General', 'Changed', 'Analytics', 0)
analytics.remove()
}
# Parent view for all configurable settings.
SettingsView = Injector.View.extend {
# Overlay the settings on top of this element.
el: 'body'
# Initialize the settings.
init: ->
@general = new GeneralSettingsView({ @model })
# Render the settings.
render: ->
@general.render()
@
}
# Snippets
# --------
# View contains buttons used to control/manage the user's snippets.
SnippetControls = Injector.View.extend {
# Overlay the snippet controls on top of this element.
el: '#snippets_controls'
# Register DOM events for the snippet controls.
events:
'click #delete_menu .js-resolve': 'removeSnippet'
'hide.bs.modal .modal': 'resetHost'
'show.bs.modal #snippet_clone_modal, #snippet_edit_modal': 'insertHost'
'shown.bs.modal .modal': 'focusHost'
'submit #snippet_add_form': 'addSnippet'
'submit #snippet_clone_form': 'cloneSnippet'
'submit #snippet_edit_form': 'editSnippet'
# Handle the form submission to add a new snippet.
addSnippet: (event) ->
@submitSnippet(event, 'add')
# Grant focus to the host field within the originating modal dialog.
focusHost: (event) ->
$modal = $(event.currentTarget)
$modal.find('form :text').focus()
# Handle the form submission to clone an existing snippet.
cloneSnippet: (event) ->
@submitSnippet(event, 'clone')
# Handle the form submission to edit an existing snippet.
editSnippet: (event) ->
@submitSnippet(event, 'edit')
# Insert the host attribute of the selected snippet in to the field within the originating modal
# dialog.
insertHost: (event) ->
$modal = $(event.currentTarget)
$modal.find('form :text').val(@model.get('host'))
# Deselect and destroy the active snippet.
removeSnippet: ->
return unless @hasModel()
{model} = @
model.deselect().done ->
model.destroy()
# Reset the host field within the originating modal dialog.
resetHost: (event) ->
$modal = $(event.currentTarget)
$modal.find('form :text').val('')
# Handle the form submission to determine how the input should be stored based on the `action`.
submitSnippet: (event, action) ->
$form = $(event.currentTarget)
$group = $form.find('.form-group:first')
$modal = $form.closest('.modal')
host = $group.find(':text').val().replace(/\s+/g, '')
unless host
$group.addClass('has-error')
else
$group.removeClass('has-error')
$modal.modal('hide')
if action is 'edit'
@model.save({ host })
.done ->
analytics.track('Snippet', 'Renamed', host)
page.snippets.list.sort()
else
base = if action is 'clone' then @model else new Snippet()
@collection.create {
host
code: base.get('code') or ''
mode: base.get('mode') or Snippet.defaultMode
}, success: (model) ->
if action is 'clone'
analytics.track('Snippet', 'Cloned', base.get('host'))
else
analytics.track('Snippet', 'Created', host)
model.select().done ->
page.snippets.list.sort().showSelected()
false
# Update the state of the snippet controls.
update: (@model) ->
$modelButtons = @$('#clone_button, #edit_button, #delete_menu .btn')
$modelButtons.prop('disabled', not @hasModel())
}
# Menu item which, when selected, enables the user to manage and modify the code of the underlying
# snippet.
SnippetItem = Injector.View.extend {
# Tag name for the element to be created for the snippet item.
tagName: 'li'
# Prevent `activateTooltips` from interfering with the tooltip of the snippet item.
className: 'js-tooltip-ignore'
# Template for the snippet item.
mainTemplate: _.template """
<a>
<span><%= ctx.host %></span>
</a>
"""
# Template for the tooltip of the snippet item.
tooltipTemplate: _.template """
<div class="snippet-tooltip">
<span class="snippet-tooltip-host"><%= ctx.host %></span>
<span class="snippet-tooltip-mode"><%= i18n.get('editor_mode_' + ctx.mode) %></span>
</div>
"""
# Register DOM events for the snippet item.
events:
'click a': 'updateSelection'
# Initialize the snippet item.
init: ->
@listenTo(@model, 'destroy', @remove)
@listenTo(@model, 'modified', @modified)
@listenTo(@model, 'change:host change:selected', @render)
@listenTo(@model, 'change:host change:mode', @updateTooltip)
@updateTooltip()
# Highlight that the snippet code has been modified in the editor.
modified: (changed) ->
if changed
@$el.addClass('modified')
else
@$el.removeClass('modified')
# Override `remove` to ensure that the tooltip is properly destroyed upon removal.
remove: ->
@$el.tooltip('destroy')
@super('remove')
# Render the snippet item.
render: ->
@$el.html(@mainTemplate(@model.pick('host')))
if @model.get('selected')
@$el.addClass('active')
else
@$el.removeClass('active modified')
@
# Update the selected state of the snippet depending on the given `event`.
updateSelection: (event) ->
if event.ctrlKey or event.metaKey and /^mac/i.test(navigator.platform)
@model.deselect()
else unless @$el.hasClass('active')
@model.select()
# Update the tooltip for the snippet item, destroying any previous tooltip in the process.
updateTooltip: ->
@$el
.tooltip('destroy')
.tooltip {
container: 'body'
html: yes
title: @tooltipTemplate(@model.pick('host', 'mode'))
}
}
# A menu of snippets that allows the user to easily manage them.
SnippetsList = Injector.View.extend {
# Overlay the snippets list on top of this element.
el: '#snippets_list'
# Create and add a `SnippetItem` for the specified `model`.
addItem: (model) ->
item = new SnippetItem({ model })
@items.push(item)
@$el.append(item.render().$el)
item
# Initialize the snippets list.
init: ->
@items = []
@listenTo(@collection, 'add', @addItem)
@listenTo(@collection, 'reset', @resetItems)
# Override `remove` to ensure that managed sub-views are removed as well.
remove: ->
@removeItems()
@super('remove')
# Remove all managed sub-views.
removeItems: ->
@items.shift().remove() while @items.length > 0
# Render the snippets list.
render: ->
@resetItems()
@
# Remove any existing managed sub-views before creating and adding new `SnippetItem` views for
# each snippet model in the collection.
resetItems: ->
@removeItems()
@collection.each(@addItem, @)
# Scroll to the selected snippet in the list.
showSelected: ->
$selectedItem = @$('li.active')
@$el.scrollTop($selectedItem.offset().top - @$el.offset().top)
@
# Detach each snippet item in the list and sort them based on their text contents before
# re-appending them.
sort: ->
@$el.append(_.sortBy(@$('li').detach(), 'textContent'))
@
}
# The primary view for managing snippets.
SnippetsView = Injector.View.extend {
# Overlay the snippets on top of this element.
el: '#snippets_tab'
# Initialize the snippets.
init: ->
@controls = new SnippetControls({ @collection })
@list = new SnippetsList({ @collection })
# Render the snippets.
render: ->
@controls.render()
@list.render()
@
# Update the state of the snippets.
update: (model) ->
@controls.update(model)
}
# Miscellaneous
# -------------
# Activate tooltip effects, optionally only within a specific context.
activateTooltips = (selector) ->
base = $(selector or document)
# Reset all previously treated tooltips.
base.find('[data-original-title]:not(.js-tooltip-ignore)')
.each ->
$this = $(@)
$this
.tooltip('destroy')
.attr('title', $this.attr 'data-original-title')
.removeAttr('data-original-title')
# Apply tooltips to all relevant elements.
base.find('[title]:not(.js-tooltip-ignore)')
.each ->
$this = $(@)
$this.tooltip {
container: $this.attr('data-container') or 'body'
placement: $this.attr('data-placement') or 'top'
}
# Options page setup
# ------------------
# Responsible for managing the options page.
class OptionsPage
# The current version of the extension.
#
# This will be updated with the actual value during the page's initialization.
version: ''
# Create a new instance of `OptionsPage`.
constructor: ->
@config = {}
# Public functions
# ----------------
# Initialize the options page.
#
# This will involve inserting and configuring the UI elements as well as loading the current
# settings.
init: ->
# It's nice knowing what version is running.
{@version} = chrome.runtime.getManifest()
# Load the configuration data from the file before storing it locally.
chrome.runtime.sendMessage { type: 'config' }, (@config) =>
# Map the mode groups now to save the configuration data from being loaded again by
# `Snippets.fetch`.
Snippet.mapModeGroups(@config.editor.modeGroups)
# Add the user feedback feature to the page.
loadFeedback(@config.options.userVoice)
# Begin initialization.
i18n.traverse()
# Retrieve all singleton instances as well as the collection for user-created snippets.
models.fetch (result) =>
{settings, editorSettings, snippets} = result
# Create views for the important models and collections.
@editor = new EditorView({ settings: editorSettings })
@settings = new SettingsView({ model: settings })
@snippets = new SnippetsView({ collection: snippets })
# Render these new views.
@editor.render()
@settings.render()
@snippets.render()
# Ensure that views are updated accordingly when snippets are selected/deselected.
snippets.on 'selected deselected', (snippet) =>
if snippet.get('selected') then @update(snippet) else @update()
selectedSnippet = snippets.findWhere({ selected: yes })
@update(selectedSnippet) if selectedSnippet
# Ensure the current year is displayed throughout, where appropriate.
$('.js-insert-year').html("#{new Date().getFullYear()}")
# Bind tab selection event to all tabs.
initialSnippetDisplay = initialTabChange = yes
$('a[data-tabify]').on 'click', ->
target = $(@).data('tabify')
$nav = $("header.navbar .nav a[data-tabify='#{target}']")
$parent = $nav.parent('li')
unless $parent.hasClass('active')
$parent.addClass('active').siblings().removeClass('active')
$(target).removeClass('hide').siblings('.tab').addClass('hide')
id = $nav.attr('id')
settings.save({ tab: id })
.then ->
unless initialTabChange
analytics.track('Tabs', 'Changed', _.capitalize(id.match(/(\S*)_nav$/)[1]))
if id is 'snippets' and initialSnippetDisplay
initialSnippetDisplay = no
page.snippets.list.showSelected()
initialTabChange = no
$(document.body).scrollTop(0)
# Reflect the previously persisted tab initially.
$("##{settings.get 'tab'}").trigger('click')
# Ensure that form submissions don't reload the page.
$('form.js-no-submit').on 'submit', -> false
# Support *goto* navigation elements that change the current scroll position when clicked.
$('[data-goto]').on 'click', ->
switch $(@).data('goto')
when 'top' then $(document.body).scrollTop(0)
# Bind analytical tracking events to key footer buttons and links.
$('footer a[href*="neocotic.com"]').on 'click', ->
analytics.track('Footer', 'Clicked', 'Homepage')
# Setup and configure donation buttons.
$('#donation input[name="hosted_button_id"]').val(@config.options.payPal)
$('.js-donate').on 'click', ->
$(@).tooltip('hide')
$('#donation').submit()
analytics.track('Donate', 'Clicked')
activateTooltips()
# Update the primary views with the selected `snippet` provided.
update: (snippet) ->
@editor.update(snippet)
@snippets.update(snippet)
# Create an global instance of `OptionsPage` and initialize it once the DOM is ready.
page = window.page = new OptionsPage()
$ -> page.init()
| true | # [Injector](http://neocotic.com/injector)
#
# (c) 2014 PI:NAME:<NAME>END_PI
#
# Freely distributable under the MIT license
# Extract any models and collections that are required by the options page.
{Snippet} = models
# Feedback
# --------
# Indicate whether the user feedback feature has been added to the page.
feedbackAdded = no
# Add the user feedback feature to the page using the `options` provided.
loadFeedback = (options) ->
# Only load and configure the feedback widget once.
return if feedbackAdded
# Create a script element to load the UserVoice widget.
uv = document.createElement('script')
uv.async = yes
uv.src = "https://widget.uservoice.com/#{options.id}.js"
# Insert the script element into the DOM.
script = document.querySelector('script')
script.parentNode.insertBefore(uv, script)
# Configure the widget as it's loading.
UserVoice = window.UserVoice or= []
UserVoice.push [
'showTab'
'classic_widget'
{
mode: 'full'
primary_color: '#333'
link_color: '#08c'
default_mode: 'feedback'
forum_id: options.forum
tab_label: i18n.get('feedback_button')
tab_color: '#333'
tab_position: 'middle-right'
tab_inverted: yes
}
]
# Ensure that the widget isn't loaded again.
feedbackAdded = yes
# Editor
# ------
# View containing buttons for saving/resetting the code of the selected snippet from the contents
# of the Ace editor.
EditorControls = Injector.View.extend {
# Overlay the editor controls on top of this element.
el: '#editor_controls'
# Register DOM events for the editor controls
events:
'click #reset_button:not(:disabled)': 'reset'
'click #save_button:not(:disabled)': 'save'
# Render the editor controls.
render: ->
@update()
@
# Reset the Ace editor so that it is empty.
#
# Nothing happens if there is no snippet selected.
reset: ->
return unless @hasModel()
{ace} = @options
ace.setValue(@model.get('code'))
ace.gotoLine(0)
# Save the contents of the Ace editor as the snippet code.
save: ->
return unless @hasModel()
$button = $('#save_button')
code = @options.ace.getValue()
$button.button('loading').delay(500)
@model.save({ code })
.then =>
@model.trigger('modified', no, code)
analytics.track('Snippet', 'Changed', 'Code')
$button.queue ->
$button.button('reset').dequeue()
# Update the state of the editor controls.
update: (@model) ->
$buttons = @$('#reset_button, #save_button')
# Ensure that specific buttons are only enabled when a snippet is selected.
$buttons.prop('disabled', not @hasModel())
}
# A selection of available modes/languages that are supported by this extension for injecting
# snippets.
EditorModes = Injector.View.extend {
# Overlay the editor modes on top of this element.
el: '#editor_modes'
# Template for mode group option groups.
groupTemplate: _.template """
<optgroup label="<%- ctx.label %>"></optgroup>
"""
# Template for mode options.
modeTemplate: _.template """
<option value="<%- ctx.value %>"><%= ctx.html %></option>
"""
# Register DOM events for the editor modes.
events:
'change': 'save'
# Render the editor modes.
render: ->
for name, modes of Snippet.modeGroups
do (name, modes) =>
$group = $ @groupTemplate {
label: i18n.get("editor_mode_group_#{name}")
}
for mode in modes
$group.append @modeTemplate {
html: i18n.get("editor_mode_#{mode}")
value: mode
}
@$el.append($group)
@update()
@
# Save the selected mode as the snippet mode.
save: ->
mode = @$el.val()
@options.ace.getSession().setMode("ace/mode/#{mode}")
if @hasModel()
analytics.track('Snippet', 'Changed', 'Mode')
@model.save({ mode })
# Update the state of the editor modes.
update: (@model) ->
mode = @model?.get('mode')
mode or= Snippet.defaultMode
@$el.prop('disabled', not @hasModel())
@$("option[value='#{mode}']").prop('selected', yes)
@save()
}
# View containing options that allow the user to configure the Ace editor.
EditorSettingsView = Injector.View.extend {
# Overlay the editor settings on top of this element.
el: '#editor_settings'
# Template for setting options.
template: _.template """
<option value="<%- ctx.value %>"><%= ctx.html %></option>
"""
# Register DOM events for the editor settings.
events:
'change #editor_indent_size': 'update'
'change #editor_line_wrap': 'update'
'change #editor_soft_tabs': 'update'
'change #editor_theme': 'update'
'click .modal-footer .btn-warning': 'restoreDefaults'
# Initialize the editor settings.
init: ->
$sizes = @$('#editor_indent_size')
$themes = @$('#editor_theme')
for size in page.config.editor.indentSizes
$sizes.append @template {
html: size
value: size
}
for theme in page.config.editor.themes
$themes.append @template {
html: i18n.get("editor_theme_#{theme}")
value: theme
}
@listenTo(@model, 'change', @captureAnalytics)
@listenTo(@model, 'change', @render)
# Capture the analytics for any changed model attributes.
captureAnalytics: ->
attrs = @model.changedAttributes() or {}
analytics.track('Editor', 'Changed', attr) for attr of attrs
# Render the editor settings.
render: ->
indentSize = @model.get('indentSize')
lineWrap = @model.get('lineWrap')
softTabs = @model.get('softTabs')
theme = @model.get('theme')
@$('#editor_indent_size').val("#{indentSize}")
@$('#editor_line_wrap').val("#{lineWrap}")
@$('#editor_soft_tabs').val("#{softTabs}")
@$('#editor_theme').val(theme)
@
# Restore the attributes of underlying model to their default values.
restoreDefaults: ->
@model.restoreDefaults()
@model.save()
# Update the state of the editor settings.
update: ->
$indentSize = @$('#editor_indent_size')
$lineWrap = @$('#editor_line_wrap')
$softTabs = @$('#editor_soft_tabs')
$theme = @$('#editor_theme')
@model.save
indentSize: parseInt($indentSize.val(), 0)
lineWrap: $lineWrap.val() is 'true'
softTabs: $softTabs.val() is 'true'
theme: $theme.val()
}
# Contains the Ace editor that allows the user to modify a snippet's code.
EditorView = Injector.View.extend {
# Overlay the editor on top of this element.
el: '#editor'
# Initialize the editor.
init: ->
@ace = ace.edit('editor')
@ace.setReadOnly(not @hasModel())
@ace.setShowPrintMargin(no)
@ace.getSession().on 'change', =>
@model.trigger('modified', @hasUnsavedChanges(), @ace.getValue()) if @hasModel()
@ace.commands.addCommand({
name: 'save'
bindKey: {
mac: 'Command-S'
win: 'Ctrl-S'
}
readOnly: no
exec: =>
@controls.save()
})
@settings = new EditorSettingsView({ model: @options.settings })
@controls = new EditorControls({ @ace })
@modes = new EditorModes({ @ace })
@listenTo(@options.settings, 'change', @updateEditor)
@updateEditor()
# Determine whether or not the contents of the Ace editor is different from the snippet code.
hasUnsavedChanges: ->
@ace.getValue() isnt @model?.get('code')
# Render the editor.
render: ->
@settings.render()
@controls.render()
@modes.render()
@
# Update the state of the editor.
update: (@model) ->
@ace.setReadOnly(not @hasModel())
@ace.setValue(@model?.get('code') or '')
@ace.gotoLine(0)
@settings.update(@model)
@controls.update(@model)
@modes.update(@model)
# Update the Ace editor with the selected options.
updateEditor: ->
{settings} = @options
aceSession = @ace.getSession()
aceSession.setUseWrapMode(settings.get('lineWrap'))
aceSession.setUseSoftTabs(settings.get('softTabs'))
aceSession.setTabSize(settings.get('indentSize'))
@ace.setTheme("ace/theme/#{settings.get('theme')}")
}
# Settings
# --------
# Allows the user to modify the general settings of the extension.
GeneralSettingsView = Injector.View.extend {
# Overlay the general settings on top of this element.
el: '#general_tab'
# Register DOM events for the general settings.
events:
'change #analytics': 'save'
# Initialize the general settings.
init: ->
@listenTo(@model, 'change:analytics', @render)
@listenTo(@model, 'change:analytics', @updateAnalytics)
@updateAnalytics()
# Render the general settings.
render: ->
@$('#analytics').prop('checked', @model.get('analytics'))
@
# Save the settings.
save: ->
$analytics = @$('#analytics')
@model.save({ analytics: $analytics.is(':checked') })
# Add or remove analytics from the page depending on settings.
updateAnalytics: ->
if @model.get('analytics')
analytics.add(page.config.analytics)
analytics.track('General', 'Changed', 'Analytics', 1)
else
analytics.track('General', 'Changed', 'Analytics', 0)
analytics.remove()
}
# Parent view for all configurable settings.
SettingsView = Injector.View.extend {
# Overlay the settings on top of this element.
el: 'body'
# Initialize the settings.
init: ->
@general = new GeneralSettingsView({ @model })
# Render the settings.
render: ->
@general.render()
@
}
# Snippets
# --------
# View contains buttons used to control/manage the user's snippets.
SnippetControls = Injector.View.extend {
# Overlay the snippet controls on top of this element.
el: '#snippets_controls'
# Register DOM events for the snippet controls.
events:
'click #delete_menu .js-resolve': 'removeSnippet'
'hide.bs.modal .modal': 'resetHost'
'show.bs.modal #snippet_clone_modal, #snippet_edit_modal': 'insertHost'
'shown.bs.modal .modal': 'focusHost'
'submit #snippet_add_form': 'addSnippet'
'submit #snippet_clone_form': 'cloneSnippet'
'submit #snippet_edit_form': 'editSnippet'
# Handle the form submission to add a new snippet.
addSnippet: (event) ->
@submitSnippet(event, 'add')
# Grant focus to the host field within the originating modal dialog.
focusHost: (event) ->
$modal = $(event.currentTarget)
$modal.find('form :text').focus()
# Handle the form submission to clone an existing snippet.
cloneSnippet: (event) ->
@submitSnippet(event, 'clone')
# Handle the form submission to edit an existing snippet.
editSnippet: (event) ->
@submitSnippet(event, 'edit')
# Insert the host attribute of the selected snippet in to the field within the originating modal
# dialog.
insertHost: (event) ->
$modal = $(event.currentTarget)
$modal.find('form :text').val(@model.get('host'))
# Deselect and destroy the active snippet.
removeSnippet: ->
return unless @hasModel()
{model} = @
model.deselect().done ->
model.destroy()
# Reset the host field within the originating modal dialog.
resetHost: (event) ->
$modal = $(event.currentTarget)
$modal.find('form :text').val('')
# Handle the form submission to determine how the input should be stored based on the `action`.
submitSnippet: (event, action) ->
$form = $(event.currentTarget)
$group = $form.find('.form-group:first')
$modal = $form.closest('.modal')
host = $group.find(':text').val().replace(/\s+/g, '')
unless host
$group.addClass('has-error')
else
$group.removeClass('has-error')
$modal.modal('hide')
if action is 'edit'
@model.save({ host })
.done ->
analytics.track('Snippet', 'Renamed', host)
page.snippets.list.sort()
else
base = if action is 'clone' then @model else new Snippet()
@collection.create {
host
code: base.get('code') or ''
mode: base.get('mode') or Snippet.defaultMode
}, success: (model) ->
if action is 'clone'
analytics.track('Snippet', 'Cloned', base.get('host'))
else
analytics.track('Snippet', 'Created', host)
model.select().done ->
page.snippets.list.sort().showSelected()
false
# Update the state of the snippet controls.
update: (@model) ->
$modelButtons = @$('#clone_button, #edit_button, #delete_menu .btn')
$modelButtons.prop('disabled', not @hasModel())
}
# Menu item which, when selected, enables the user to manage and modify the code of the underlying
# snippet.
SnippetItem = Injector.View.extend {
# Tag name for the element to be created for the snippet item.
tagName: 'li'
# Prevent `activateTooltips` from interfering with the tooltip of the snippet item.
className: 'js-tooltip-ignore'
# Template for the snippet item.
mainTemplate: _.template """
<a>
<span><%= ctx.host %></span>
</a>
"""
# Template for the tooltip of the snippet item.
tooltipTemplate: _.template """
<div class="snippet-tooltip">
<span class="snippet-tooltip-host"><%= ctx.host %></span>
<span class="snippet-tooltip-mode"><%= i18n.get('editor_mode_' + ctx.mode) %></span>
</div>
"""
# Register DOM events for the snippet item.
events:
'click a': 'updateSelection'
# Initialize the snippet item.
init: ->
@listenTo(@model, 'destroy', @remove)
@listenTo(@model, 'modified', @modified)
@listenTo(@model, 'change:host change:selected', @render)
@listenTo(@model, 'change:host change:mode', @updateTooltip)
@updateTooltip()
# Highlight that the snippet code has been modified in the editor.
modified: (changed) ->
if changed
@$el.addClass('modified')
else
@$el.removeClass('modified')
# Override `remove` to ensure that the tooltip is properly destroyed upon removal.
remove: ->
@$el.tooltip('destroy')
@super('remove')
# Render the snippet item.
render: ->
@$el.html(@mainTemplate(@model.pick('host')))
if @model.get('selected')
@$el.addClass('active')
else
@$el.removeClass('active modified')
@
# Update the selected state of the snippet depending on the given `event`.
updateSelection: (event) ->
if event.ctrlKey or event.metaKey and /^mac/i.test(navigator.platform)
@model.deselect()
else unless @$el.hasClass('active')
@model.select()
# Update the tooltip for the snippet item, destroying any previous tooltip in the process.
updateTooltip: ->
@$el
.tooltip('destroy')
.tooltip {
container: 'body'
html: yes
title: @tooltipTemplate(@model.pick('host', 'mode'))
}
}
# A menu of snippets that allows the user to easily manage them.
SnippetsList = Injector.View.extend {
# Overlay the snippets list on top of this element.
el: '#snippets_list'
# Create and add a `SnippetItem` for the specified `model`.
addItem: (model) ->
item = new SnippetItem({ model })
@items.push(item)
@$el.append(item.render().$el)
item
# Initialize the snippets list.
init: ->
@items = []
@listenTo(@collection, 'add', @addItem)
@listenTo(@collection, 'reset', @resetItems)
# Override `remove` to ensure that managed sub-views are removed as well.
remove: ->
@removeItems()
@super('remove')
# Remove all managed sub-views.
removeItems: ->
@items.shift().remove() while @items.length > 0
# Render the snippets list.
render: ->
@resetItems()
@
# Remove any existing managed sub-views before creating and adding new `SnippetItem` views for
# each snippet model in the collection.
resetItems: ->
@removeItems()
@collection.each(@addItem, @)
# Scroll to the selected snippet in the list.
showSelected: ->
$selectedItem = @$('li.active')
@$el.scrollTop($selectedItem.offset().top - @$el.offset().top)
@
# Detach each snippet item in the list and sort them based on their text contents before
# re-appending them.
sort: ->
@$el.append(_.sortBy(@$('li').detach(), 'textContent'))
@
}
# The primary view for managing snippets.
SnippetsView = Injector.View.extend {
# Overlay the snippets on top of this element.
el: '#snippets_tab'
# Initialize the snippets.
init: ->
@controls = new SnippetControls({ @collection })
@list = new SnippetsList({ @collection })
# Render the snippets.
render: ->
@controls.render()
@list.render()
@
# Update the state of the snippets.
update: (model) ->
@controls.update(model)
}
# Miscellaneous
# -------------
# Activate tooltip effects, optionally only within a specific context.
activateTooltips = (selector) ->
base = $(selector or document)
# Reset all previously treated tooltips.
base.find('[data-original-title]:not(.js-tooltip-ignore)')
.each ->
$this = $(@)
$this
.tooltip('destroy')
.attr('title', $this.attr 'data-original-title')
.removeAttr('data-original-title')
# Apply tooltips to all relevant elements.
base.find('[title]:not(.js-tooltip-ignore)')
.each ->
$this = $(@)
$this.tooltip {
container: $this.attr('data-container') or 'body'
placement: $this.attr('data-placement') or 'top'
}
# Options page setup
# ------------------
# Responsible for managing the options page.
class OptionsPage
# The current version of the extension.
#
# This will be updated with the actual value during the page's initialization.
version: ''
# Create a new instance of `OptionsPage`.
constructor: ->
@config = {}
# Public functions
# ----------------
# Initialize the options page.
#
# This will involve inserting and configuring the UI elements as well as loading the current
# settings.
init: ->
# It's nice knowing what version is running.
{@version} = chrome.runtime.getManifest()
# Load the configuration data from the file before storing it locally.
chrome.runtime.sendMessage { type: 'config' }, (@config) =>
# Map the mode groups now to save the configuration data from being loaded again by
# `Snippets.fetch`.
Snippet.mapModeGroups(@config.editor.modeGroups)
# Add the user feedback feature to the page.
loadFeedback(@config.options.userVoice)
# Begin initialization.
i18n.traverse()
# Retrieve all singleton instances as well as the collection for user-created snippets.
models.fetch (result) =>
{settings, editorSettings, snippets} = result
# Create views for the important models and collections.
@editor = new EditorView({ settings: editorSettings })
@settings = new SettingsView({ model: settings })
@snippets = new SnippetsView({ collection: snippets })
# Render these new views.
@editor.render()
@settings.render()
@snippets.render()
# Ensure that views are updated accordingly when snippets are selected/deselected.
snippets.on 'selected deselected', (snippet) =>
if snippet.get('selected') then @update(snippet) else @update()
selectedSnippet = snippets.findWhere({ selected: yes })
@update(selectedSnippet) if selectedSnippet
# Ensure the current year is displayed throughout, where appropriate.
$('.js-insert-year').html("#{new Date().getFullYear()}")
# Bind tab selection event to all tabs.
initialSnippetDisplay = initialTabChange = yes
$('a[data-tabify]').on 'click', ->
target = $(@).data('tabify')
$nav = $("header.navbar .nav a[data-tabify='#{target}']")
$parent = $nav.parent('li')
unless $parent.hasClass('active')
$parent.addClass('active').siblings().removeClass('active')
$(target).removeClass('hide').siblings('.tab').addClass('hide')
id = $nav.attr('id')
settings.save({ tab: id })
.then ->
unless initialTabChange
analytics.track('Tabs', 'Changed', _.capitalize(id.match(/(\S*)_nav$/)[1]))
if id is 'snippets' and initialSnippetDisplay
initialSnippetDisplay = no
page.snippets.list.showSelected()
initialTabChange = no
$(document.body).scrollTop(0)
# Reflect the previously persisted tab initially.
$("##{settings.get 'tab'}").trigger('click')
# Ensure that form submissions don't reload the page.
$('form.js-no-submit').on 'submit', -> false
# Support *goto* navigation elements that change the current scroll position when clicked.
$('[data-goto]').on 'click', ->
switch $(@).data('goto')
when 'top' then $(document.body).scrollTop(0)
# Bind analytical tracking events to key footer buttons and links.
$('footer a[href*="neocotic.com"]').on 'click', ->
analytics.track('Footer', 'Clicked', 'Homepage')
# Setup and configure donation buttons.
$('#donation input[name="hosted_button_id"]').val(@config.options.payPal)
$('.js-donate').on 'click', ->
$(@).tooltip('hide')
$('#donation').submit()
analytics.track('Donate', 'Clicked')
activateTooltips()
# Update the primary views with the selected `snippet` provided.
update: (snippet) ->
@editor.update(snippet)
@snippets.update(snippet)
# Create an global instance of `OptionsPage` and initialize it once the DOM is ready.
page = window.page = new OptionsPage()
$ -> page.init()
|
[
{
"context": " \"username\": $el.find(\"form.login-form input[name=username]\").val(),\n \"password\": $el.find(\"f",
"end": 11854,
"score": 0.5603985786437988,
"start": 11846,
"tag": "USERNAME",
"value": "username"
},
{
"context": "username: data.username,\n password: data.password,\n invitation_token: data.token\n ",
"end": 19517,
"score": 0.9985831379890442,
"start": 19504,
"tag": "PASSWORD",
"value": "data.password"
}
] | app/coffee/modules/auth.coffee | threefoldtech/Threefold-Circles-front | 0 | ###
# Copyright (C) 2014-2018 Taiga Agile LLC
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# File: modules/auth.coffee
###
taiga = @.taiga
debounce = @.taiga.debounce
module = angular.module("taigaAuth", ["taigaResources"])
class LoginPage
@.$inject = [
'tgCurrentUserService',
'$location',
'$tgNavUrls',
'$routeParams',
'$tgAuth'
]
constructor: (currentUserService, $location, $navUrls, $routeParams, $auth) ->
if currentUserService.isAuthenticated()
if not $routeParams['force_login']
url = $navUrls.resolve("home")
if $routeParams['next']
url = decodeURIComponent($routeParams['next'])
$location.search('next', null)
if $routeParams['unauthorized']
$auth.clear()
$auth.removeToken()
else
$location.url(url)
module.controller('LoginPage', LoginPage)
#############################################################################
## Authentication Service
#############################################################################
class AuthService extends taiga.Service
@.$inject = ["$rootScope",
"$tgStorage",
"$tgModel",
"$tgResources",
"$tgHttp",
"$tgUrls",
"$tgConfig",
"$translate",
"tgCurrentUserService",
"tgThemeService",
"$tgAnalytics",
"tgTermsAnnouncementService"]
constructor: (@rootscope, @storage, @model, @rs, @http, @urls, @config, @translate, @currentUserService,
@themeService, @analytics, @termsAnnouncementService) ->
super()
userModel = @.getUser()
@._currentTheme = @._getUserTheme()
@.setUserdata(userModel)
setUserdata: (userModel) ->
if userModel
@.userData = Immutable.fromJS(userModel.getAttrs())
@currentUserService.setUser(@.userData)
else
@.userData = null
@analytics.setUserId()
_getUserTheme: ->
return @rootscope.user?.theme || @config.get("defaultTheme") || "taiga" # load on index.jade
_setTheme: ->
newTheme = @._getUserTheme()
if @._currentTheme != newTheme
@._currentTheme = newTheme
@themeService.use(@._currentTheme)
_setLocales: ->
lang = @rootscope.user?.lang || @config.get("defaultLanguage") || "en"
@translate.preferredLanguage(lang) # Needed for calls to the api in the correct language
@translate.use(lang) # Needed for change the interface in runtime
getUser: ->
if @rootscope.user
return @rootscope.user
userData = @storage.get("userInfo")
if userData
user = @model.make_model("users", userData)
@rootscope.user = user
@._setLocales()
@._setTheme()
return user
else
@._setTheme()
return null
setUser: (user) ->
@rootscope.auth = user
@storage.set("userInfo", user.getAttrs())
@rootscope.user = user
@.setUserdata(user)
@._setLocales()
@._setTheme()
clear: ->
@rootscope.auth = null
@rootscope.user = null
@storage.remove("userInfo")
setToken: (token) ->
@storage.set("token", token)
getToken: ->
return @storage.get("token")
removeToken: ->
@storage.remove("token")
isAuthenticated: ->
if @.getUser() != null
return true
return false
## Http interface
refresh: () ->
url = @urls.resolve("user-me")
return @http.get(url).then (data, status) =>
user = data.data
user.token = @.getUser().auth_token
user = @model.make_model("users", user)
@.setUser(user)
@rootscope.$broadcast("auth:refresh", user)
return user
login: (data, type) ->
url = @urls.resolve("auth")
data = _.clone(data, false)
data.type = if type then type else "normal"
@.removeToken()
return @http.post(url, data).then (data, status) =>
user = @model.make_model("users", data.data)
@.setToken(user.auth_token)
@.setUser(user)
@rootscope.$broadcast("auth:login", user)
return user
threebot: (data, $route) ->
user = @model.make_model("users", data)
@.setToken(user.auth_token)
@.setUser(user)
$route.reload()
return user
logout: ->
@.removeToken()
@.clear()
@currentUserService.removeUser()
@._setTheme()
@._setLocales()
@rootscope.$broadcast("auth:logout")
@analytics.setUserId()
register: (data, type, existing) ->
url = @urls.resolve("auth-register")
data = _.clone(data, false)
data.type = if type then type else "public"
if type == "private"
data.existing = if existing then existing else false
@.removeToken()
return @http.post(url, data).then (response) =>
user = @model.make_model("users", response.data)
@.setToken(user.auth_token)
@.setUser(user)
@rootscope.$broadcast("auth:register", user)
return user
getInvitation: (token) ->
return @rs.invitations.get(token)
acceptInvitiationWithNewUser: (data) ->
return @.register(data, "private", false)
forgotPassword: (data) ->
url = @urls.resolve("users-password-recovery")
data = _.clone(data, false)
@.removeToken()
return @http.post(url, data)
changePasswordFromRecovery: (data) ->
url = @urls.resolve("users-change-password-from-recovery")
data = _.clone(data, false)
@.removeToken()
return @http.post(url, data)
changeEmail: (data) ->
url = @urls.resolve("users-change-email")
data = _.clone(data, false)
return @http.post(url, data)
cancelAccount: (data) ->
url = @urls.resolve("users-cancel-account")
data = _.clone(data, false)
return @http.post(url, data)
exportProfile: () ->
url = @urls.resolve("users-export")
return @http.post(url)
showTerms: (data) ->
user = @.getUser()
if not user or user.read_new_terms
return
@termsAnnouncementService.show()
module.service("$tgAuth", AuthService)
#############################################################################
## Login Directive
#############################################################################
# Directive that manages the visualization of public register
# message/link on login page.
ThreebotLoginButton = ($config, templates) ->
template = templates.get("auth/threebot-login-btn.html", true)
templateFn = ->
publicRegisterEnabled = $config.get("publicRegisterEnabled")
if not publicRegisterEnabled
return ""
link = $.ajax($config.get('api') + "threebot/login", {
type: 'GET',
async: false,
success: (data) ->
return data
error: (textStatus) -> console.log('Error', textStatus)
});
url = link.responseJSON.url
return template({url:url})
return {
restrict: "AE"
scope: {}
template: templateFn
}
module.directive("tgThreebotLoginButton", ["$tgConfig", "$tgTemplate", ThreebotLoginButton])
# PublicRegisterMessageDirective = ($config, templates) ->
# template = templates.get("auth/login-text.html", true)
# templateFn = ->
# publicRegisterEnabled = $config.get("publicRegisterEnabled")
# if not publicRegisterEnabled
# return ""
# link = $.ajax($config.get('api') + "threebot/login", {
# type: 'GET',
# async: false,
# success: (data) ->
# return data
# error: (textStatus) -> console.log('Error', textStatus)
# });
# url = link.responseJSON.url
# console.log('register',url)
# return template({url:url})
# return {
# restrict: "AE"
# scope: {}
# template: templateFn
# }
# module.directive("tgPublicRegisterMessage", ["$tgConfig", "$tgTemplate", PublicRegisterMessageDirective])
ThreeBotLoginDirective = ($auth, $routeParams, $route, $config, $confirm, $translate, $location, $navUrls) ->
link = ($el, $scope) ->
$.ajax($config.get('api') + "threebot/callback", {
type: 'GET',
beforeSend: (xhr, settings) ->
xhr.setRequestHeader("Authorization",'Bearer ' + "#{$auth.getToken()}")
xhr.setRequestHeader("X-Session-Id",taiga.sessionId)
xhr.setRequestHeader("Content-Type", "application/json")
data: $routeParams,
success: (res) -> $auth.threebot(res, $route)
error: (xhr) ->
if xhr.status == 400
$location.path($navUrls.resolve("home"))
$route.reload()
$confirm.notify("light-error", $translate.instant("LOGIN_FORM.ERROR_MESSAGE"))
});
return {link:link}
module.directive("tbLogin", ["$tgAuth", "$routeParams", "$route","$tgConfig", "$tgConfirm",
"$translate", "$tgLocation", "$tgNavUrls", ThreeBotLoginDirective])
LoginDirective = ($auth, $confirm, $location, $config, $routeParams, $navUrls, $events, $translate, $window, $analytics) ->
link = ($scope, $el, $attrs) ->
form = new checksley.Form($el.find("form.login-form"))
if $routeParams['next'] and $routeParams['next'] != $navUrls.resolve("login")
$scope.nextUrl = decodeURIComponent($routeParams['next'])
else
$scope.nextUrl = $navUrls.resolve("home")
if $routeParams['force_next']
$scope.nextUrl = decodeURIComponent($routeParams['force_next'])
onSuccess = (response) ->
$events.setupConnection()
$analytics.trackEvent("auth", "login", "user login", 1)
$auth.showTerms()
if $scope.nextUrl.indexOf('http') == 0
$window.location.href = $scope.nextUrl
else
$location.url($scope.nextUrl)
onError = (response) ->
$confirm.notify("light-error", $translate.instant("LOGIN_FORM.ERROR_AUTH_INCORRECT"))
$scope.onKeyUp = (event) ->
target = angular.element(event.currentTarget)
value = target.val()
$scope.iscapsLockActivated = false
if value != value.toLowerCase()
$scope.iscapsLockActivated = true
submit = debounce 2000, (event) =>
event.preventDefault()
if not form.validate()
return
data = {
"username": $el.find("form.login-form input[name=username]").val(),
"password": $el.find("form.login-form input[name=password]").val()
}
loginFormType = $config.get("loginFormType", "normal")
promise = $auth.login(data, loginFormType)
return promise.then(onSuccess, onError)
$el.on "submit", "form", submit
window.prerenderReady = true
$scope.$on "$destroy", ->
$el.off()
return {link:link}
module.directive("tgLogin", ["$tgAuth", "$tgConfirm", "$tgLocation", "$tgConfig", "$routeParams",
"$tgNavUrls", "$tgEvents", "$translate", "$window", "$tgAnalytics", LoginDirective])
#############################################################################
## Register Directive
#############################################################################
RegisterDirective = ($auth, $confirm, $location, $navUrls, $config, $routeParams, $analytics, $translate, $window) ->
link = ($scope, $el, $attrs) ->
if not $config.get("publicRegisterEnabled")
$location.path($navUrls.resolve("not-found"))
$location.replace()
$scope.data = {}
form = $el.find("form").checksley({onlyOneErrorElement: true})
if $routeParams['next'] and $routeParams['next'] != $navUrls.resolve("login")
$scope.nextUrl = decodeURIComponent($routeParams['next'])
else
$scope.nextUrl = $navUrls.resolve("home")
onSuccessSubmit = (response) ->
$analytics.trackEvent("auth", "register", "user registration", 1)
if $scope.nextUrl.indexOf('http') == 0
$window.location.href = $scope.nextUrl
else
$location.url($scope.nextUrl)
onErrorSubmit = (response) ->
if response.data._error_message
text = $translate.instant("COMMON.GENERIC_ERROR", {error: response.data._error_message})
$confirm.notify("light-error", text)
form.setErrors(response.data)
submit = debounce 2000, (event) =>
event.preventDefault()
if not form.validate()
return
promise = $auth.register($scope.data)
promise.then(onSuccessSubmit, onErrorSubmit)
$el.on "submit", "form", submit
$scope.$on "$destroy", ->
$el.off()
window.prerenderReady = true
return {link:link}
module.directive("tgRegister", ["$tgAuth", "$tgConfirm", "$tgLocation", "$tgNavUrls", "$tgConfig",
"$routeParams", "$tgAnalytics", "$translate", "$window", RegisterDirective])
#############################################################################
## Forgot Password Directive
#############################################################################
ForgotPasswordDirective = ($auth, $confirm, $location, $navUrls, $translate) ->
link = ($scope, $el, $attrs) ->
$scope.data = {}
form = $el.find("form").checksley()
onSuccessSubmit = (response) ->
$location.path($navUrls.resolve("login"))
title = $translate.instant("FORGOT_PASSWORD_FORM.SUCCESS_TITLE")
message = $translate.instant("FORGOT_PASSWORD_FORM.SUCCESS_TEXT")
$confirm.success(title, message)
onErrorSubmit = (response) ->
text = $translate.instant("FORGOT_PASSWORD_FORM.ERROR")
$confirm.notify("light-error", text)
submit = debounce 2000, (event) =>
event.preventDefault()
if not form.validate()
return
promise = $auth.forgotPassword($scope.data)
promise.then(onSuccessSubmit, onErrorSubmit)
$el.on "submit", "form", submit
$scope.$on "$destroy", ->
$el.off()
window.prerenderReady = true
return {link:link}
module.directive("tgForgotPassword", ["$tgAuth", "$tgConfirm", "$tgLocation", "$tgNavUrls", "$translate",
ForgotPasswordDirective])
#############################################################################
## Change Password from Recovery Directive
#############################################################################
ChangePasswordFromRecoveryDirective = ($auth, $confirm, $location, $params, $navUrls, $translate) ->
link = ($scope, $el, $attrs) ->
$scope.data = {}
if $params.token?
$scope.tokenInParams = true
$scope.data.token = $params.token
else
$location.path($navUrls.resolve("login"))
text = $translate.instant("CHANGE_PASSWORD_RECOVERY_FORM.ERROR")
$confirm.notify("light-error",text)
form = $el.find("form").checksley()
onSuccessSubmit = (response) ->
$location.path($navUrls.resolve("login"))
text = $translate.instant("CHANGE_PASSWORD_RECOVERY_FORM.SUCCESS")
$confirm.success(text)
onErrorSubmit = (response) ->
text = $translate.instant("CHANGE_PASSWORD_RECOVERY_FORM.ERROR")
$confirm.notify("light-error", text)
submit = debounce 2000, (event) =>
event.preventDefault()
if not form.validate()
return
promise = $auth.changePasswordFromRecovery($scope.data)
promise.then(onSuccessSubmit, onErrorSubmit)
$el.on "submit", "form", submit
$scope.$on "$destroy", ->
$el.off()
return {link:link}
module.directive("tgChangePasswordFromRecovery", ["$tgAuth", "$tgConfirm", "$tgLocation", "$routeParams",
"$tgNavUrls", "$translate",
ChangePasswordFromRecoveryDirective])
#############################################################################
## Invitation
#############################################################################
InvitationDirective = ($auth, $confirm, $location, $config, $params, $navUrls, $analytics, $translate, config) ->
link = ($scope, $el, $attrs) ->
token = $params.token
promise = $auth.getInvitation(token)
promise.then (invitation) ->
$scope.invitation = invitation
$scope.publicRegisterEnabled = config.get("publicRegisterEnabled")
promise.then null, (response) ->
$location.path($navUrls.resolve("login"))
text = $translate.instant("INVITATION_LOGIN_FORM.NOT_FOUND")
$confirm.notify("light-error", text)
# Login form
$scope.dataLogin = {token: token}
loginForm = $el.find("form.login-form").checksley({onlyOneErrorElement: true})
onSuccessSubmitLogin = (response) ->
$analytics.trackEvent("auth", "invitationAccept", "invitation accept with existing user", 1)
$location.path($navUrls.resolve("project", {project: $scope.invitation.project_slug}))
text = $translate.instant("INVITATION_LOGIN_FORM.SUCCESS", {
"project_name": $scope.invitation.project_name
})
$confirm.notify("success", text)
onErrorSubmitLogin = (response) ->
$confirm.notify("light-error", response.data._error_message)
submitLogin = debounce 2000, (event) =>
event.preventDefault()
if not loginForm.validate()
return
loginFormType = $config.get("loginFormType", "normal")
data = $scope.dataLogin
promise = $auth.login({
username: data.username,
password: data.password,
invitation_token: data.token
}, loginFormType)
promise.then(onSuccessSubmitLogin, onErrorSubmitLogin)
$el.on "submit", "form.login-form", submitLogin
$el.on "click", ".button-login", submitLogin
# Register form
$scope.dataRegister = {token: token}
registerForm = $el.find("form.register-form").checksley({onlyOneErrorElement: true})
onSuccessSubmitRegister = (response) ->
$analytics.trackEvent("auth", "invitationAccept", "invitation accept with new user", 1)
$location.path($navUrls.resolve("project", {project: $scope.invitation.project_slug}))
text = $translate.instant("INVITATION_LOGIN_FORM.SUCCESS", {
"project_name": $scope.invitation.project_name
})
$confirm.notify("success", text)
onErrorSubmitRegister = (response) ->
if response.data._error_message
text = $translate.instant("COMMON.GENERIC_ERROR", {error: response.data._error_message})
$confirm.notify("light-error", text)
registerForm.setErrors(response.data)
submitRegister = debounce 2000, (event) =>
event.preventDefault()
if not registerForm.validate()
return
promise = $auth.acceptInvitiationWithNewUser($scope.dataRegister)
promise.then(onSuccessSubmitRegister, onErrorSubmitRegister)
$el.on "submit", "form.register-form", submitRegister
$el.on "click", ".button-register", submitRegister
$scope.$on "$destroy", ->
$el.off()
return {link:link}
module.directive("tgInvitation", ["$tgAuth", "$tgConfirm", "$tgLocation", "$tgConfig", "$routeParams",
"$tgNavUrls", "$tgAnalytics", "$translate", "$tgConfig", InvitationDirective])
#############################################################################
## Change Email
#############################################################################
ChangeEmailDirective = ($repo, $model, $auth, $confirm, $location, $params, $navUrls, $translate) ->
link = ($scope, $el, $attrs) ->
$scope.data = {}
$scope.data.email_token = $params.email_token
form = $el.find("form").checksley()
onSuccessSubmit = (response) ->
if $auth.isAuthenticated()
$repo.queryOne("users", $auth.getUser().id).then (data) =>
$auth.setUser(data)
$location.path($navUrls.resolve("home"))
$location.replace()
else
$location.path($navUrls.resolve("login"))
$location.replace()
text = $translate.instant("CHANGE_EMAIL_FORM.SUCCESS")
$confirm.success(text)
onErrorSubmit = (response) ->
text = $translate.instant("COMMON.GENERIC_ERROR", {error: response.data._error_message})
$confirm.notify("light-error", text)
submit = ->
if not form.validate()
return
promise = $auth.changeEmail($scope.data)
promise.then(onSuccessSubmit, onErrorSubmit)
$el.on "submit", (event) ->
event.preventDefault()
submit()
$el.on "click", "a.button-change-email", (event) ->
event.preventDefault()
submit()
$scope.$on "$destroy", ->
$el.off()
return {link:link}
module.directive("tgChangeEmail", ["$tgRepo", "$tgModel", "$tgAuth", "$tgConfirm", "$tgLocation",
"$routeParams", "$tgNavUrls", "$translate", ChangeEmailDirective])
#############################################################################
## Cancel account
#############################################################################
CancelAccountDirective = ($repo, $model, $auth, $confirm, $location, $params, $navUrls) ->
link = ($scope, $el, $attrs) ->
$scope.data = {}
$scope.data.cancel_token = $params.cancel_token
form = $el.find("form").checksley()
onSuccessSubmit = (response) ->
$auth.logout()
$location.path($navUrls.resolve("home"))
text = $translate.instant("CANCEL_ACCOUNT.SUCCESS")
$confirm.success(text)
onErrorSubmit = (response) ->
text = $translate.instant("COMMON.GENERIC_ERROR", {error: response.data._error_message})
$confirm.notify("error", text)
submit = debounce 2000, (event) =>
event.preventDefault()
if not form.validate()
return
promise = $auth.cancelAccount($scope.data)
promise.then(onSuccessSubmit, onErrorSubmit)
$el.on "submit", "form", submit
$scope.$on "$destroy", ->
$el.off()
return {link:link}
module.directive("tgCancelAccount", ["$tgRepo", "$tgModel", "$tgAuth", "$tgConfirm", "$tgLocation",
"$routeParams","$tgNavUrls", CancelAccountDirective])
| 211187 | ###
# Copyright (C) 2014-2018 Taiga Agile LLC
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# File: modules/auth.coffee
###
taiga = @.taiga
debounce = @.taiga.debounce
module = angular.module("taigaAuth", ["taigaResources"])
class LoginPage
@.$inject = [
'tgCurrentUserService',
'$location',
'$tgNavUrls',
'$routeParams',
'$tgAuth'
]
constructor: (currentUserService, $location, $navUrls, $routeParams, $auth) ->
if currentUserService.isAuthenticated()
if not $routeParams['force_login']
url = $navUrls.resolve("home")
if $routeParams['next']
url = decodeURIComponent($routeParams['next'])
$location.search('next', null)
if $routeParams['unauthorized']
$auth.clear()
$auth.removeToken()
else
$location.url(url)
module.controller('LoginPage', LoginPage)
#############################################################################
## Authentication Service
#############################################################################
class AuthService extends taiga.Service
@.$inject = ["$rootScope",
"$tgStorage",
"$tgModel",
"$tgResources",
"$tgHttp",
"$tgUrls",
"$tgConfig",
"$translate",
"tgCurrentUserService",
"tgThemeService",
"$tgAnalytics",
"tgTermsAnnouncementService"]
constructor: (@rootscope, @storage, @model, @rs, @http, @urls, @config, @translate, @currentUserService,
@themeService, @analytics, @termsAnnouncementService) ->
super()
userModel = @.getUser()
@._currentTheme = @._getUserTheme()
@.setUserdata(userModel)
setUserdata: (userModel) ->
if userModel
@.userData = Immutable.fromJS(userModel.getAttrs())
@currentUserService.setUser(@.userData)
else
@.userData = null
@analytics.setUserId()
_getUserTheme: ->
return @rootscope.user?.theme || @config.get("defaultTheme") || "taiga" # load on index.jade
_setTheme: ->
newTheme = @._getUserTheme()
if @._currentTheme != newTheme
@._currentTheme = newTheme
@themeService.use(@._currentTheme)
_setLocales: ->
lang = @rootscope.user?.lang || @config.get("defaultLanguage") || "en"
@translate.preferredLanguage(lang) # Needed for calls to the api in the correct language
@translate.use(lang) # Needed for change the interface in runtime
getUser: ->
if @rootscope.user
return @rootscope.user
userData = @storage.get("userInfo")
if userData
user = @model.make_model("users", userData)
@rootscope.user = user
@._setLocales()
@._setTheme()
return user
else
@._setTheme()
return null
setUser: (user) ->
@rootscope.auth = user
@storage.set("userInfo", user.getAttrs())
@rootscope.user = user
@.setUserdata(user)
@._setLocales()
@._setTheme()
clear: ->
@rootscope.auth = null
@rootscope.user = null
@storage.remove("userInfo")
setToken: (token) ->
@storage.set("token", token)
getToken: ->
return @storage.get("token")
removeToken: ->
@storage.remove("token")
isAuthenticated: ->
if @.getUser() != null
return true
return false
## Http interface
refresh: () ->
url = @urls.resolve("user-me")
return @http.get(url).then (data, status) =>
user = data.data
user.token = @.getUser().auth_token
user = @model.make_model("users", user)
@.setUser(user)
@rootscope.$broadcast("auth:refresh", user)
return user
login: (data, type) ->
url = @urls.resolve("auth")
data = _.clone(data, false)
data.type = if type then type else "normal"
@.removeToken()
return @http.post(url, data).then (data, status) =>
user = @model.make_model("users", data.data)
@.setToken(user.auth_token)
@.setUser(user)
@rootscope.$broadcast("auth:login", user)
return user
threebot: (data, $route) ->
user = @model.make_model("users", data)
@.setToken(user.auth_token)
@.setUser(user)
$route.reload()
return user
logout: ->
@.removeToken()
@.clear()
@currentUserService.removeUser()
@._setTheme()
@._setLocales()
@rootscope.$broadcast("auth:logout")
@analytics.setUserId()
register: (data, type, existing) ->
url = @urls.resolve("auth-register")
data = _.clone(data, false)
data.type = if type then type else "public"
if type == "private"
data.existing = if existing then existing else false
@.removeToken()
return @http.post(url, data).then (response) =>
user = @model.make_model("users", response.data)
@.setToken(user.auth_token)
@.setUser(user)
@rootscope.$broadcast("auth:register", user)
return user
getInvitation: (token) ->
return @rs.invitations.get(token)
acceptInvitiationWithNewUser: (data) ->
return @.register(data, "private", false)
forgotPassword: (data) ->
url = @urls.resolve("users-password-recovery")
data = _.clone(data, false)
@.removeToken()
return @http.post(url, data)
changePasswordFromRecovery: (data) ->
url = @urls.resolve("users-change-password-from-recovery")
data = _.clone(data, false)
@.removeToken()
return @http.post(url, data)
changeEmail: (data) ->
url = @urls.resolve("users-change-email")
data = _.clone(data, false)
return @http.post(url, data)
cancelAccount: (data) ->
url = @urls.resolve("users-cancel-account")
data = _.clone(data, false)
return @http.post(url, data)
exportProfile: () ->
url = @urls.resolve("users-export")
return @http.post(url)
showTerms: (data) ->
user = @.getUser()
if not user or user.read_new_terms
return
@termsAnnouncementService.show()
module.service("$tgAuth", AuthService)
#############################################################################
## Login Directive
#############################################################################
# Directive that manages the visualization of public register
# message/link on login page.
ThreebotLoginButton = ($config, templates) ->
template = templates.get("auth/threebot-login-btn.html", true)
templateFn = ->
publicRegisterEnabled = $config.get("publicRegisterEnabled")
if not publicRegisterEnabled
return ""
link = $.ajax($config.get('api') + "threebot/login", {
type: 'GET',
async: false,
success: (data) ->
return data
error: (textStatus) -> console.log('Error', textStatus)
});
url = link.responseJSON.url
return template({url:url})
return {
restrict: "AE"
scope: {}
template: templateFn
}
module.directive("tgThreebotLoginButton", ["$tgConfig", "$tgTemplate", ThreebotLoginButton])
# PublicRegisterMessageDirective = ($config, templates) ->
# template = templates.get("auth/login-text.html", true)
# templateFn = ->
# publicRegisterEnabled = $config.get("publicRegisterEnabled")
# if not publicRegisterEnabled
# return ""
# link = $.ajax($config.get('api') + "threebot/login", {
# type: 'GET',
# async: false,
# success: (data) ->
# return data
# error: (textStatus) -> console.log('Error', textStatus)
# });
# url = link.responseJSON.url
# console.log('register',url)
# return template({url:url})
# return {
# restrict: "AE"
# scope: {}
# template: templateFn
# }
# module.directive("tgPublicRegisterMessage", ["$tgConfig", "$tgTemplate", PublicRegisterMessageDirective])
ThreeBotLoginDirective = ($auth, $routeParams, $route, $config, $confirm, $translate, $location, $navUrls) ->
link = ($el, $scope) ->
$.ajax($config.get('api') + "threebot/callback", {
type: 'GET',
beforeSend: (xhr, settings) ->
xhr.setRequestHeader("Authorization",'Bearer ' + "#{$auth.getToken()}")
xhr.setRequestHeader("X-Session-Id",taiga.sessionId)
xhr.setRequestHeader("Content-Type", "application/json")
data: $routeParams,
success: (res) -> $auth.threebot(res, $route)
error: (xhr) ->
if xhr.status == 400
$location.path($navUrls.resolve("home"))
$route.reload()
$confirm.notify("light-error", $translate.instant("LOGIN_FORM.ERROR_MESSAGE"))
});
return {link:link}
module.directive("tbLogin", ["$tgAuth", "$routeParams", "$route","$tgConfig", "$tgConfirm",
"$translate", "$tgLocation", "$tgNavUrls", ThreeBotLoginDirective])
LoginDirective = ($auth, $confirm, $location, $config, $routeParams, $navUrls, $events, $translate, $window, $analytics) ->
link = ($scope, $el, $attrs) ->
form = new checksley.Form($el.find("form.login-form"))
if $routeParams['next'] and $routeParams['next'] != $navUrls.resolve("login")
$scope.nextUrl = decodeURIComponent($routeParams['next'])
else
$scope.nextUrl = $navUrls.resolve("home")
if $routeParams['force_next']
$scope.nextUrl = decodeURIComponent($routeParams['force_next'])
onSuccess = (response) ->
$events.setupConnection()
$analytics.trackEvent("auth", "login", "user login", 1)
$auth.showTerms()
if $scope.nextUrl.indexOf('http') == 0
$window.location.href = $scope.nextUrl
else
$location.url($scope.nextUrl)
onError = (response) ->
$confirm.notify("light-error", $translate.instant("LOGIN_FORM.ERROR_AUTH_INCORRECT"))
$scope.onKeyUp = (event) ->
target = angular.element(event.currentTarget)
value = target.val()
$scope.iscapsLockActivated = false
if value != value.toLowerCase()
$scope.iscapsLockActivated = true
submit = debounce 2000, (event) =>
event.preventDefault()
if not form.validate()
return
data = {
"username": $el.find("form.login-form input[name=username]").val(),
"password": $el.find("form.login-form input[name=password]").val()
}
loginFormType = $config.get("loginFormType", "normal")
promise = $auth.login(data, loginFormType)
return promise.then(onSuccess, onError)
$el.on "submit", "form", submit
window.prerenderReady = true
$scope.$on "$destroy", ->
$el.off()
return {link:link}
module.directive("tgLogin", ["$tgAuth", "$tgConfirm", "$tgLocation", "$tgConfig", "$routeParams",
"$tgNavUrls", "$tgEvents", "$translate", "$window", "$tgAnalytics", LoginDirective])
#############################################################################
## Register Directive
#############################################################################
RegisterDirective = ($auth, $confirm, $location, $navUrls, $config, $routeParams, $analytics, $translate, $window) ->
link = ($scope, $el, $attrs) ->
if not $config.get("publicRegisterEnabled")
$location.path($navUrls.resolve("not-found"))
$location.replace()
$scope.data = {}
form = $el.find("form").checksley({onlyOneErrorElement: true})
if $routeParams['next'] and $routeParams['next'] != $navUrls.resolve("login")
$scope.nextUrl = decodeURIComponent($routeParams['next'])
else
$scope.nextUrl = $navUrls.resolve("home")
onSuccessSubmit = (response) ->
$analytics.trackEvent("auth", "register", "user registration", 1)
if $scope.nextUrl.indexOf('http') == 0
$window.location.href = $scope.nextUrl
else
$location.url($scope.nextUrl)
onErrorSubmit = (response) ->
if response.data._error_message
text = $translate.instant("COMMON.GENERIC_ERROR", {error: response.data._error_message})
$confirm.notify("light-error", text)
form.setErrors(response.data)
submit = debounce 2000, (event) =>
event.preventDefault()
if not form.validate()
return
promise = $auth.register($scope.data)
promise.then(onSuccessSubmit, onErrorSubmit)
$el.on "submit", "form", submit
$scope.$on "$destroy", ->
$el.off()
window.prerenderReady = true
return {link:link}
module.directive("tgRegister", ["$tgAuth", "$tgConfirm", "$tgLocation", "$tgNavUrls", "$tgConfig",
"$routeParams", "$tgAnalytics", "$translate", "$window", RegisterDirective])
#############################################################################
## Forgot Password Directive
#############################################################################
ForgotPasswordDirective = ($auth, $confirm, $location, $navUrls, $translate) ->
link = ($scope, $el, $attrs) ->
$scope.data = {}
form = $el.find("form").checksley()
onSuccessSubmit = (response) ->
$location.path($navUrls.resolve("login"))
title = $translate.instant("FORGOT_PASSWORD_FORM.SUCCESS_TITLE")
message = $translate.instant("FORGOT_PASSWORD_FORM.SUCCESS_TEXT")
$confirm.success(title, message)
onErrorSubmit = (response) ->
text = $translate.instant("FORGOT_PASSWORD_FORM.ERROR")
$confirm.notify("light-error", text)
submit = debounce 2000, (event) =>
event.preventDefault()
if not form.validate()
return
promise = $auth.forgotPassword($scope.data)
promise.then(onSuccessSubmit, onErrorSubmit)
$el.on "submit", "form", submit
$scope.$on "$destroy", ->
$el.off()
window.prerenderReady = true
return {link:link}
module.directive("tgForgotPassword", ["$tgAuth", "$tgConfirm", "$tgLocation", "$tgNavUrls", "$translate",
ForgotPasswordDirective])
#############################################################################
## Change Password from Recovery Directive
#############################################################################
ChangePasswordFromRecoveryDirective = ($auth, $confirm, $location, $params, $navUrls, $translate) ->
link = ($scope, $el, $attrs) ->
$scope.data = {}
if $params.token?
$scope.tokenInParams = true
$scope.data.token = $params.token
else
$location.path($navUrls.resolve("login"))
text = $translate.instant("CHANGE_PASSWORD_RECOVERY_FORM.ERROR")
$confirm.notify("light-error",text)
form = $el.find("form").checksley()
onSuccessSubmit = (response) ->
$location.path($navUrls.resolve("login"))
text = $translate.instant("CHANGE_PASSWORD_RECOVERY_FORM.SUCCESS")
$confirm.success(text)
onErrorSubmit = (response) ->
text = $translate.instant("CHANGE_PASSWORD_RECOVERY_FORM.ERROR")
$confirm.notify("light-error", text)
submit = debounce 2000, (event) =>
event.preventDefault()
if not form.validate()
return
promise = $auth.changePasswordFromRecovery($scope.data)
promise.then(onSuccessSubmit, onErrorSubmit)
$el.on "submit", "form", submit
$scope.$on "$destroy", ->
$el.off()
return {link:link}
module.directive("tgChangePasswordFromRecovery", ["$tgAuth", "$tgConfirm", "$tgLocation", "$routeParams",
"$tgNavUrls", "$translate",
ChangePasswordFromRecoveryDirective])
#############################################################################
## Invitation
#############################################################################
InvitationDirective = ($auth, $confirm, $location, $config, $params, $navUrls, $analytics, $translate, config) ->
link = ($scope, $el, $attrs) ->
token = $params.token
promise = $auth.getInvitation(token)
promise.then (invitation) ->
$scope.invitation = invitation
$scope.publicRegisterEnabled = config.get("publicRegisterEnabled")
promise.then null, (response) ->
$location.path($navUrls.resolve("login"))
text = $translate.instant("INVITATION_LOGIN_FORM.NOT_FOUND")
$confirm.notify("light-error", text)
# Login form
$scope.dataLogin = {token: token}
loginForm = $el.find("form.login-form").checksley({onlyOneErrorElement: true})
onSuccessSubmitLogin = (response) ->
$analytics.trackEvent("auth", "invitationAccept", "invitation accept with existing user", 1)
$location.path($navUrls.resolve("project", {project: $scope.invitation.project_slug}))
text = $translate.instant("INVITATION_LOGIN_FORM.SUCCESS", {
"project_name": $scope.invitation.project_name
})
$confirm.notify("success", text)
onErrorSubmitLogin = (response) ->
$confirm.notify("light-error", response.data._error_message)
submitLogin = debounce 2000, (event) =>
event.preventDefault()
if not loginForm.validate()
return
loginFormType = $config.get("loginFormType", "normal")
data = $scope.dataLogin
promise = $auth.login({
username: data.username,
password: <PASSWORD>,
invitation_token: data.token
}, loginFormType)
promise.then(onSuccessSubmitLogin, onErrorSubmitLogin)
$el.on "submit", "form.login-form", submitLogin
$el.on "click", ".button-login", submitLogin
# Register form
$scope.dataRegister = {token: token}
registerForm = $el.find("form.register-form").checksley({onlyOneErrorElement: true})
onSuccessSubmitRegister = (response) ->
$analytics.trackEvent("auth", "invitationAccept", "invitation accept with new user", 1)
$location.path($navUrls.resolve("project", {project: $scope.invitation.project_slug}))
text = $translate.instant("INVITATION_LOGIN_FORM.SUCCESS", {
"project_name": $scope.invitation.project_name
})
$confirm.notify("success", text)
onErrorSubmitRegister = (response) ->
if response.data._error_message
text = $translate.instant("COMMON.GENERIC_ERROR", {error: response.data._error_message})
$confirm.notify("light-error", text)
registerForm.setErrors(response.data)
submitRegister = debounce 2000, (event) =>
event.preventDefault()
if not registerForm.validate()
return
promise = $auth.acceptInvitiationWithNewUser($scope.dataRegister)
promise.then(onSuccessSubmitRegister, onErrorSubmitRegister)
$el.on "submit", "form.register-form", submitRegister
$el.on "click", ".button-register", submitRegister
$scope.$on "$destroy", ->
$el.off()
return {link:link}
module.directive("tgInvitation", ["$tgAuth", "$tgConfirm", "$tgLocation", "$tgConfig", "$routeParams",
"$tgNavUrls", "$tgAnalytics", "$translate", "$tgConfig", InvitationDirective])
#############################################################################
## Change Email
#############################################################################
ChangeEmailDirective = ($repo, $model, $auth, $confirm, $location, $params, $navUrls, $translate) ->
link = ($scope, $el, $attrs) ->
$scope.data = {}
$scope.data.email_token = $params.email_token
form = $el.find("form").checksley()
onSuccessSubmit = (response) ->
if $auth.isAuthenticated()
$repo.queryOne("users", $auth.getUser().id).then (data) =>
$auth.setUser(data)
$location.path($navUrls.resolve("home"))
$location.replace()
else
$location.path($navUrls.resolve("login"))
$location.replace()
text = $translate.instant("CHANGE_EMAIL_FORM.SUCCESS")
$confirm.success(text)
onErrorSubmit = (response) ->
text = $translate.instant("COMMON.GENERIC_ERROR", {error: response.data._error_message})
$confirm.notify("light-error", text)
submit = ->
if not form.validate()
return
promise = $auth.changeEmail($scope.data)
promise.then(onSuccessSubmit, onErrorSubmit)
$el.on "submit", (event) ->
event.preventDefault()
submit()
$el.on "click", "a.button-change-email", (event) ->
event.preventDefault()
submit()
$scope.$on "$destroy", ->
$el.off()
return {link:link}
module.directive("tgChangeEmail", ["$tgRepo", "$tgModel", "$tgAuth", "$tgConfirm", "$tgLocation",
"$routeParams", "$tgNavUrls", "$translate", ChangeEmailDirective])
#############################################################################
## Cancel account
#############################################################################
CancelAccountDirective = ($repo, $model, $auth, $confirm, $location, $params, $navUrls) ->
link = ($scope, $el, $attrs) ->
$scope.data = {}
$scope.data.cancel_token = $params.cancel_token
form = $el.find("form").checksley()
onSuccessSubmit = (response) ->
$auth.logout()
$location.path($navUrls.resolve("home"))
text = $translate.instant("CANCEL_ACCOUNT.SUCCESS")
$confirm.success(text)
onErrorSubmit = (response) ->
text = $translate.instant("COMMON.GENERIC_ERROR", {error: response.data._error_message})
$confirm.notify("error", text)
submit = debounce 2000, (event) =>
event.preventDefault()
if not form.validate()
return
promise = $auth.cancelAccount($scope.data)
promise.then(onSuccessSubmit, onErrorSubmit)
$el.on "submit", "form", submit
$scope.$on "$destroy", ->
$el.off()
return {link:link}
module.directive("tgCancelAccount", ["$tgRepo", "$tgModel", "$tgAuth", "$tgConfirm", "$tgLocation",
"$routeParams","$tgNavUrls", CancelAccountDirective])
| true | ###
# Copyright (C) 2014-2018 Taiga Agile LLC
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# File: modules/auth.coffee
###
taiga = @.taiga
debounce = @.taiga.debounce
module = angular.module("taigaAuth", ["taigaResources"])
class LoginPage
@.$inject = [
'tgCurrentUserService',
'$location',
'$tgNavUrls',
'$routeParams',
'$tgAuth'
]
constructor: (currentUserService, $location, $navUrls, $routeParams, $auth) ->
if currentUserService.isAuthenticated()
if not $routeParams['force_login']
url = $navUrls.resolve("home")
if $routeParams['next']
url = decodeURIComponent($routeParams['next'])
$location.search('next', null)
if $routeParams['unauthorized']
$auth.clear()
$auth.removeToken()
else
$location.url(url)
module.controller('LoginPage', LoginPage)
#############################################################################
## Authentication Service
#############################################################################
class AuthService extends taiga.Service
@.$inject = ["$rootScope",
"$tgStorage",
"$tgModel",
"$tgResources",
"$tgHttp",
"$tgUrls",
"$tgConfig",
"$translate",
"tgCurrentUserService",
"tgThemeService",
"$tgAnalytics",
"tgTermsAnnouncementService"]
constructor: (@rootscope, @storage, @model, @rs, @http, @urls, @config, @translate, @currentUserService,
@themeService, @analytics, @termsAnnouncementService) ->
super()
userModel = @.getUser()
@._currentTheme = @._getUserTheme()
@.setUserdata(userModel)
setUserdata: (userModel) ->
if userModel
@.userData = Immutable.fromJS(userModel.getAttrs())
@currentUserService.setUser(@.userData)
else
@.userData = null
@analytics.setUserId()
_getUserTheme: ->
return @rootscope.user?.theme || @config.get("defaultTheme") || "taiga" # load on index.jade
_setTheme: ->
newTheme = @._getUserTheme()
if @._currentTheme != newTheme
@._currentTheme = newTheme
@themeService.use(@._currentTheme)
_setLocales: ->
lang = @rootscope.user?.lang || @config.get("defaultLanguage") || "en"
@translate.preferredLanguage(lang) # Needed for calls to the api in the correct language
@translate.use(lang) # Needed for change the interface in runtime
getUser: ->
if @rootscope.user
return @rootscope.user
userData = @storage.get("userInfo")
if userData
user = @model.make_model("users", userData)
@rootscope.user = user
@._setLocales()
@._setTheme()
return user
else
@._setTheme()
return null
setUser: (user) ->
@rootscope.auth = user
@storage.set("userInfo", user.getAttrs())
@rootscope.user = user
@.setUserdata(user)
@._setLocales()
@._setTheme()
clear: ->
@rootscope.auth = null
@rootscope.user = null
@storage.remove("userInfo")
setToken: (token) ->
@storage.set("token", token)
getToken: ->
return @storage.get("token")
removeToken: ->
@storage.remove("token")
isAuthenticated: ->
if @.getUser() != null
return true
return false
## Http interface
refresh: () ->
url = @urls.resolve("user-me")
return @http.get(url).then (data, status) =>
user = data.data
user.token = @.getUser().auth_token
user = @model.make_model("users", user)
@.setUser(user)
@rootscope.$broadcast("auth:refresh", user)
return user
login: (data, type) ->
url = @urls.resolve("auth")
data = _.clone(data, false)
data.type = if type then type else "normal"
@.removeToken()
return @http.post(url, data).then (data, status) =>
user = @model.make_model("users", data.data)
@.setToken(user.auth_token)
@.setUser(user)
@rootscope.$broadcast("auth:login", user)
return user
threebot: (data, $route) ->
user = @model.make_model("users", data)
@.setToken(user.auth_token)
@.setUser(user)
$route.reload()
return user
logout: ->
@.removeToken()
@.clear()
@currentUserService.removeUser()
@._setTheme()
@._setLocales()
@rootscope.$broadcast("auth:logout")
@analytics.setUserId()
register: (data, type, existing) ->
url = @urls.resolve("auth-register")
data = _.clone(data, false)
data.type = if type then type else "public"
if type == "private"
data.existing = if existing then existing else false
@.removeToken()
return @http.post(url, data).then (response) =>
user = @model.make_model("users", response.data)
@.setToken(user.auth_token)
@.setUser(user)
@rootscope.$broadcast("auth:register", user)
return user
getInvitation: (token) ->
return @rs.invitations.get(token)
acceptInvitiationWithNewUser: (data) ->
return @.register(data, "private", false)
forgotPassword: (data) ->
url = @urls.resolve("users-password-recovery")
data = _.clone(data, false)
@.removeToken()
return @http.post(url, data)
changePasswordFromRecovery: (data) ->
url = @urls.resolve("users-change-password-from-recovery")
data = _.clone(data, false)
@.removeToken()
return @http.post(url, data)
changeEmail: (data) ->
url = @urls.resolve("users-change-email")
data = _.clone(data, false)
return @http.post(url, data)
cancelAccount: (data) ->
url = @urls.resolve("users-cancel-account")
data = _.clone(data, false)
return @http.post(url, data)
exportProfile: () ->
url = @urls.resolve("users-export")
return @http.post(url)
showTerms: (data) ->
user = @.getUser()
if not user or user.read_new_terms
return
@termsAnnouncementService.show()
module.service("$tgAuth", AuthService)
#############################################################################
## Login Directive
#############################################################################
# Directive that manages the visualization of public register
# message/link on login page.
ThreebotLoginButton = ($config, templates) ->
template = templates.get("auth/threebot-login-btn.html", true)
templateFn = ->
publicRegisterEnabled = $config.get("publicRegisterEnabled")
if not publicRegisterEnabled
return ""
link = $.ajax($config.get('api') + "threebot/login", {
type: 'GET',
async: false,
success: (data) ->
return data
error: (textStatus) -> console.log('Error', textStatus)
});
url = link.responseJSON.url
return template({url:url})
return {
restrict: "AE"
scope: {}
template: templateFn
}
module.directive("tgThreebotLoginButton", ["$tgConfig", "$tgTemplate", ThreebotLoginButton])
# PublicRegisterMessageDirective = ($config, templates) ->
# template = templates.get("auth/login-text.html", true)
# templateFn = ->
# publicRegisterEnabled = $config.get("publicRegisterEnabled")
# if not publicRegisterEnabled
# return ""
# link = $.ajax($config.get('api') + "threebot/login", {
# type: 'GET',
# async: false,
# success: (data) ->
# return data
# error: (textStatus) -> console.log('Error', textStatus)
# });
# url = link.responseJSON.url
# console.log('register',url)
# return template({url:url})
# return {
# restrict: "AE"
# scope: {}
# template: templateFn
# }
# module.directive("tgPublicRegisterMessage", ["$tgConfig", "$tgTemplate", PublicRegisterMessageDirective])
ThreeBotLoginDirective = ($auth, $routeParams, $route, $config, $confirm, $translate, $location, $navUrls) ->
link = ($el, $scope) ->
$.ajax($config.get('api') + "threebot/callback", {
type: 'GET',
beforeSend: (xhr, settings) ->
xhr.setRequestHeader("Authorization",'Bearer ' + "#{$auth.getToken()}")
xhr.setRequestHeader("X-Session-Id",taiga.sessionId)
xhr.setRequestHeader("Content-Type", "application/json")
data: $routeParams,
success: (res) -> $auth.threebot(res, $route)
error: (xhr) ->
if xhr.status == 400
$location.path($navUrls.resolve("home"))
$route.reload()
$confirm.notify("light-error", $translate.instant("LOGIN_FORM.ERROR_MESSAGE"))
});
return {link:link}
module.directive("tbLogin", ["$tgAuth", "$routeParams", "$route","$tgConfig", "$tgConfirm",
"$translate", "$tgLocation", "$tgNavUrls", ThreeBotLoginDirective])
LoginDirective = ($auth, $confirm, $location, $config, $routeParams, $navUrls, $events, $translate, $window, $analytics) ->
link = ($scope, $el, $attrs) ->
form = new checksley.Form($el.find("form.login-form"))
if $routeParams['next'] and $routeParams['next'] != $navUrls.resolve("login")
$scope.nextUrl = decodeURIComponent($routeParams['next'])
else
$scope.nextUrl = $navUrls.resolve("home")
if $routeParams['force_next']
$scope.nextUrl = decodeURIComponent($routeParams['force_next'])
onSuccess = (response) ->
$events.setupConnection()
$analytics.trackEvent("auth", "login", "user login", 1)
$auth.showTerms()
if $scope.nextUrl.indexOf('http') == 0
$window.location.href = $scope.nextUrl
else
$location.url($scope.nextUrl)
onError = (response) ->
$confirm.notify("light-error", $translate.instant("LOGIN_FORM.ERROR_AUTH_INCORRECT"))
$scope.onKeyUp = (event) ->
target = angular.element(event.currentTarget)
value = target.val()
$scope.iscapsLockActivated = false
if value != value.toLowerCase()
$scope.iscapsLockActivated = true
submit = debounce 2000, (event) =>
event.preventDefault()
if not form.validate()
return
data = {
"username": $el.find("form.login-form input[name=username]").val(),
"password": $el.find("form.login-form input[name=password]").val()
}
loginFormType = $config.get("loginFormType", "normal")
promise = $auth.login(data, loginFormType)
return promise.then(onSuccess, onError)
$el.on "submit", "form", submit
window.prerenderReady = true
$scope.$on "$destroy", ->
$el.off()
return {link:link}
module.directive("tgLogin", ["$tgAuth", "$tgConfirm", "$tgLocation", "$tgConfig", "$routeParams",
"$tgNavUrls", "$tgEvents", "$translate", "$window", "$tgAnalytics", LoginDirective])
#############################################################################
## Register Directive
#############################################################################
RegisterDirective = ($auth, $confirm, $location, $navUrls, $config, $routeParams, $analytics, $translate, $window) ->
link = ($scope, $el, $attrs) ->
if not $config.get("publicRegisterEnabled")
$location.path($navUrls.resolve("not-found"))
$location.replace()
$scope.data = {}
form = $el.find("form").checksley({onlyOneErrorElement: true})
if $routeParams['next'] and $routeParams['next'] != $navUrls.resolve("login")
$scope.nextUrl = decodeURIComponent($routeParams['next'])
else
$scope.nextUrl = $navUrls.resolve("home")
onSuccessSubmit = (response) ->
$analytics.trackEvent("auth", "register", "user registration", 1)
if $scope.nextUrl.indexOf('http') == 0
$window.location.href = $scope.nextUrl
else
$location.url($scope.nextUrl)
onErrorSubmit = (response) ->
if response.data._error_message
text = $translate.instant("COMMON.GENERIC_ERROR", {error: response.data._error_message})
$confirm.notify("light-error", text)
form.setErrors(response.data)
submit = debounce 2000, (event) =>
event.preventDefault()
if not form.validate()
return
promise = $auth.register($scope.data)
promise.then(onSuccessSubmit, onErrorSubmit)
$el.on "submit", "form", submit
$scope.$on "$destroy", ->
$el.off()
window.prerenderReady = true
return {link:link}
module.directive("tgRegister", ["$tgAuth", "$tgConfirm", "$tgLocation", "$tgNavUrls", "$tgConfig",
"$routeParams", "$tgAnalytics", "$translate", "$window", RegisterDirective])
#############################################################################
## Forgot Password Directive
#############################################################################
ForgotPasswordDirective = ($auth, $confirm, $location, $navUrls, $translate) ->
link = ($scope, $el, $attrs) ->
$scope.data = {}
form = $el.find("form").checksley()
onSuccessSubmit = (response) ->
$location.path($navUrls.resolve("login"))
title = $translate.instant("FORGOT_PASSWORD_FORM.SUCCESS_TITLE")
message = $translate.instant("FORGOT_PASSWORD_FORM.SUCCESS_TEXT")
$confirm.success(title, message)
onErrorSubmit = (response) ->
text = $translate.instant("FORGOT_PASSWORD_FORM.ERROR")
$confirm.notify("light-error", text)
submit = debounce 2000, (event) =>
event.preventDefault()
if not form.validate()
return
promise = $auth.forgotPassword($scope.data)
promise.then(onSuccessSubmit, onErrorSubmit)
$el.on "submit", "form", submit
$scope.$on "$destroy", ->
$el.off()
window.prerenderReady = true
return {link:link}
module.directive("tgForgotPassword", ["$tgAuth", "$tgConfirm", "$tgLocation", "$tgNavUrls", "$translate",
ForgotPasswordDirective])
#############################################################################
## Change Password from Recovery Directive
#############################################################################
ChangePasswordFromRecoveryDirective = ($auth, $confirm, $location, $params, $navUrls, $translate) ->
link = ($scope, $el, $attrs) ->
$scope.data = {}
if $params.token?
$scope.tokenInParams = true
$scope.data.token = $params.token
else
$location.path($navUrls.resolve("login"))
text = $translate.instant("CHANGE_PASSWORD_RECOVERY_FORM.ERROR")
$confirm.notify("light-error",text)
form = $el.find("form").checksley()
onSuccessSubmit = (response) ->
$location.path($navUrls.resolve("login"))
text = $translate.instant("CHANGE_PASSWORD_RECOVERY_FORM.SUCCESS")
$confirm.success(text)
onErrorSubmit = (response) ->
text = $translate.instant("CHANGE_PASSWORD_RECOVERY_FORM.ERROR")
$confirm.notify("light-error", text)
submit = debounce 2000, (event) =>
event.preventDefault()
if not form.validate()
return
promise = $auth.changePasswordFromRecovery($scope.data)
promise.then(onSuccessSubmit, onErrorSubmit)
$el.on "submit", "form", submit
$scope.$on "$destroy", ->
$el.off()
return {link:link}
module.directive("tgChangePasswordFromRecovery", ["$tgAuth", "$tgConfirm", "$tgLocation", "$routeParams",
"$tgNavUrls", "$translate",
ChangePasswordFromRecoveryDirective])
#############################################################################
## Invitation
#############################################################################
InvitationDirective = ($auth, $confirm, $location, $config, $params, $navUrls, $analytics, $translate, config) ->
link = ($scope, $el, $attrs) ->
token = $params.token
promise = $auth.getInvitation(token)
promise.then (invitation) ->
$scope.invitation = invitation
$scope.publicRegisterEnabled = config.get("publicRegisterEnabled")
promise.then null, (response) ->
$location.path($navUrls.resolve("login"))
text = $translate.instant("INVITATION_LOGIN_FORM.NOT_FOUND")
$confirm.notify("light-error", text)
# Login form
$scope.dataLogin = {token: token}
loginForm = $el.find("form.login-form").checksley({onlyOneErrorElement: true})
onSuccessSubmitLogin = (response) ->
$analytics.trackEvent("auth", "invitationAccept", "invitation accept with existing user", 1)
$location.path($navUrls.resolve("project", {project: $scope.invitation.project_slug}))
text = $translate.instant("INVITATION_LOGIN_FORM.SUCCESS", {
"project_name": $scope.invitation.project_name
})
$confirm.notify("success", text)
onErrorSubmitLogin = (response) ->
$confirm.notify("light-error", response.data._error_message)
submitLogin = debounce 2000, (event) =>
event.preventDefault()
if not loginForm.validate()
return
loginFormType = $config.get("loginFormType", "normal")
data = $scope.dataLogin
promise = $auth.login({
username: data.username,
password: PI:PASSWORD:<PASSWORD>END_PI,
invitation_token: data.token
}, loginFormType)
promise.then(onSuccessSubmitLogin, onErrorSubmitLogin)
$el.on "submit", "form.login-form", submitLogin
$el.on "click", ".button-login", submitLogin
# Register form
$scope.dataRegister = {token: token}
registerForm = $el.find("form.register-form").checksley({onlyOneErrorElement: true})
onSuccessSubmitRegister = (response) ->
$analytics.trackEvent("auth", "invitationAccept", "invitation accept with new user", 1)
$location.path($navUrls.resolve("project", {project: $scope.invitation.project_slug}))
text = $translate.instant("INVITATION_LOGIN_FORM.SUCCESS", {
"project_name": $scope.invitation.project_name
})
$confirm.notify("success", text)
onErrorSubmitRegister = (response) ->
if response.data._error_message
text = $translate.instant("COMMON.GENERIC_ERROR", {error: response.data._error_message})
$confirm.notify("light-error", text)
registerForm.setErrors(response.data)
submitRegister = debounce 2000, (event) =>
event.preventDefault()
if not registerForm.validate()
return
promise = $auth.acceptInvitiationWithNewUser($scope.dataRegister)
promise.then(onSuccessSubmitRegister, onErrorSubmitRegister)
$el.on "submit", "form.register-form", submitRegister
$el.on "click", ".button-register", submitRegister
$scope.$on "$destroy", ->
$el.off()
return {link:link}
module.directive("tgInvitation", ["$tgAuth", "$tgConfirm", "$tgLocation", "$tgConfig", "$routeParams",
"$tgNavUrls", "$tgAnalytics", "$translate", "$tgConfig", InvitationDirective])
#############################################################################
## Change Email
#############################################################################
ChangeEmailDirective = ($repo, $model, $auth, $confirm, $location, $params, $navUrls, $translate) ->
link = ($scope, $el, $attrs) ->
$scope.data = {}
$scope.data.email_token = $params.email_token
form = $el.find("form").checksley()
onSuccessSubmit = (response) ->
if $auth.isAuthenticated()
$repo.queryOne("users", $auth.getUser().id).then (data) =>
$auth.setUser(data)
$location.path($navUrls.resolve("home"))
$location.replace()
else
$location.path($navUrls.resolve("login"))
$location.replace()
text = $translate.instant("CHANGE_EMAIL_FORM.SUCCESS")
$confirm.success(text)
onErrorSubmit = (response) ->
text = $translate.instant("COMMON.GENERIC_ERROR", {error: response.data._error_message})
$confirm.notify("light-error", text)
submit = ->
if not form.validate()
return
promise = $auth.changeEmail($scope.data)
promise.then(onSuccessSubmit, onErrorSubmit)
$el.on "submit", (event) ->
event.preventDefault()
submit()
$el.on "click", "a.button-change-email", (event) ->
event.preventDefault()
submit()
$scope.$on "$destroy", ->
$el.off()
return {link:link}
module.directive("tgChangeEmail", ["$tgRepo", "$tgModel", "$tgAuth", "$tgConfirm", "$tgLocation",
"$routeParams", "$tgNavUrls", "$translate", ChangeEmailDirective])
#############################################################################
## Cancel account
#############################################################################
CancelAccountDirective = ($repo, $model, $auth, $confirm, $location, $params, $navUrls) ->
link = ($scope, $el, $attrs) ->
$scope.data = {}
$scope.data.cancel_token = $params.cancel_token
form = $el.find("form").checksley()
onSuccessSubmit = (response) ->
$auth.logout()
$location.path($navUrls.resolve("home"))
text = $translate.instant("CANCEL_ACCOUNT.SUCCESS")
$confirm.success(text)
onErrorSubmit = (response) ->
text = $translate.instant("COMMON.GENERIC_ERROR", {error: response.data._error_message})
$confirm.notify("error", text)
submit = debounce 2000, (event) =>
event.preventDefault()
if not form.validate()
return
promise = $auth.cancelAccount($scope.data)
promise.then(onSuccessSubmit, onErrorSubmit)
$el.on "submit", "form", submit
$scope.$on "$destroy", ->
$el.off()
return {link:link}
module.directive("tgCancelAccount", ["$tgRepo", "$tgModel", "$tgAuth", "$tgConfirm", "$tgLocation",
"$routeParams","$tgNavUrls", CancelAccountDirective])
|
[
{
"context": "it '#save', (done) ->\n ctrlUser.createNewUser 'testUser', 'testPass', 'testEmail', (err, user)->\n ex",
"end": 337,
"score": 0.9993525743484497,
"start": 329,
"tag": "USERNAME",
"value": "testUser"
},
{
"context": " expect testUser.password\n .to.not.eql 'testPass'\n console.log 'reached save condition'\n ",
"end": 509,
"score": 0.9974619150161743,
"start": 501,
"tag": "PASSWORD",
"value": "testPass"
}
] | test/controllers/ctrlUserTest.coffee | zodoz/EC2Manager | 0 | sinon = require 'sinon'
chai = require 'chai'
expect = chai.expect
mongoose = require 'mongoose'
mockgoose = require 'mockgoose'
ctrlUser = require '../../src/controllers/ctrlUser.coffee'
mockgoose mongoose
mongoose.connect 'mongodb://localhost/test'
describe 'CtrlUser', ->
it '#save', (done) ->
ctrlUser.createNewUser 'testUser', 'testPass', 'testEmail', (err, user)->
expect err
.to.be.null
testUser.save (err) ->
expect testUser.password
.to.not.eql 'testPass'
console.log 'reached save condition'
done()
| 33571 | sinon = require 'sinon'
chai = require 'chai'
expect = chai.expect
mongoose = require 'mongoose'
mockgoose = require 'mockgoose'
ctrlUser = require '../../src/controllers/ctrlUser.coffee'
mockgoose mongoose
mongoose.connect 'mongodb://localhost/test'
describe 'CtrlUser', ->
it '#save', (done) ->
ctrlUser.createNewUser 'testUser', 'testPass', 'testEmail', (err, user)->
expect err
.to.be.null
testUser.save (err) ->
expect testUser.password
.to.not.eql '<PASSWORD>'
console.log 'reached save condition'
done()
| true | sinon = require 'sinon'
chai = require 'chai'
expect = chai.expect
mongoose = require 'mongoose'
mockgoose = require 'mockgoose'
ctrlUser = require '../../src/controllers/ctrlUser.coffee'
mockgoose mongoose
mongoose.connect 'mongodb://localhost/test'
describe 'CtrlUser', ->
it '#save', (done) ->
ctrlUser.createNewUser 'testUser', 'testPass', 'testEmail', (err, user)->
expect err
.to.be.null
testUser.save (err) ->
expect testUser.password
.to.not.eql 'PI:PASSWORD:<PASSWORD>END_PI'
console.log 'reached save condition'
done()
|
[
{
"context": "g-a-grammar/\n\nscopeName: 'source.lavender'\nname: 'Lavender'\nfileTypes: [\n 'lv'\n]\nfoldingStartMarker: '\\\\(",
"end": 191,
"score": 0.6596484184265137,
"start": 183,
"tag": "NAME",
"value": "Lavender"
}
] | grammars/lavender.cson | kvverti/language-lavender | 0 | # If this is your first time writing a language grammar, check out:
# - https://flight-manual.atom.io/hacking-atom/sections/creating-a-grammar/
scopeName: 'source.lavender'
name: 'Lavender'
fileTypes: [
'lv'
]
foldingStartMarker: '\\(|\\[|\\{'
foldingStopMarker: '\\)|\\]|\\}'
patterns: [
{ include: '#shebang' }
{ include: '#commandImport' }
{ include: '#commandQuit' }
{ include: '#commandBasic' }
{ include: '#expression' }
]
repository:
shebang:
comment: 'Unix shebang line'
match: '^#!.*$'
name: 'comment.shebang.lavender'
comment:
comment: 'Single line comment'
match: '\'.*$'
name: 'comment.line.quote.lavender'
namespace:
comment: 'Namespace'
match: '[a-zA-Z_][a-zA-Z_0-9]*:'
name: 'support.constant.namespace.lavender'
symbol:
comment: 'Symbolic name'
match: '[~!%\\^\\&*\\-+=|<>/?:$]+'
name: 'support.function.symbolic.lavender'
variable:
comment: 'Variable (begins lowercase or with a single underscore)'
match: '_?[a-z][a-zA-Z0-9]*'
name: 'variable.lavender'
constant:
comment: 'Constant value (begins uppercase or with two underscores)'
match: '_?[A-Z_][a-zA-Z_0-9]*'
name: 'support.constant.value.lavender'
number:
comment: 'Number'
match: '\\d*\\.?\\d+([eE][+-]?\\d+)?'
name: 'constant.numeric.lavender'
string:
comment: 'String'
begin: '"'
end: '"'
name: 'string.quoted.double.lavender'
patterns: [
{
comment: 'String escapes'
match: '\\\\.'
name: 'keyword.operator.string-escape.lavender'
}
]
funcValue:
comment: 'Function value'
match: '\\\\((?:[a-zA-Z_][a-zA-Z_0-9]*):)?((?:[a-zA-Z_][a-zA-Z_0-9]*)|[~!%\\^\\&*\\-+=|<>/?:$]+)\\\\?'
name: 'meta.function-value.lavender'
captures:
1: name: 'support.constant.namespace.lavender'
2: name: 'support.function.value.lavender'
funcCall:
comment: 'Function value when calling'
match: '[a-zA-Z_][a-zA-Z_0-9]*\\s*(?=\\()'
name: 'support.function.call.lavender'
funcName:
comment: 'Function declared name'
match: '([uir]_)?((?:[a-zA-Z_][a-zA-Z_0-9]*)|[~!%\\^\\&*\\-+=|<>/?:$]+)'
name: 'meta.function.name.lavender'
captures:
1: name: 'keyword.control.function.prefix.lavender'
2: name: 'support.function.declaration.lavender'
argList:
comment: 'Function parameter list'
begin: '\\('
end: '\\)'
beginCaptures:
0: name: 'punctuation.params.lparen.lavender'
endCaptures:
0: name: 'punctuation.params.rparen.lavender'
name: 'meta.arg-list.lavender'
patterns: [
{ include: '#comment' }
{ include: '#keyword' }
{
comment: 'Formal parameter name'
match: '[a-zA-Z_][a-zA-Z_0-9]*'
name: 'variable.parameter.formal.lavender'
}
{
comment: 'By name modifier'
match: '=>'
name: 'storage.modifier.byname.lavender'
}
{
comment: 'Varargs modifier'
match: '\\.\\.\\.'
name: 'storage.modifier.varargs.lavender'
}
]
funcSig:
comment: 'Function signature'
begin: '\\bdef\\b'
end: '(?==>)'
beginCaptures:
0: name: 'storage.type.def.lavender'
name: 'meta.function.header.lavender'
patterns: [
{ include: '#comment' }
{ include: '#letExpr' }
{ include: '#funcName' }
{ include: '#argList' }
]
letDefinition:
comment: 'Single function local definition'
begin: '\\('
end: '\\)'
name: 'meta.let-expr.definition.lavender'
beginCaptures:
1: name: 'punctuation.let-expr.lparen.lavender'
endCaptures:
1: name: 'punctuation.let-expr.rparen.lavender'
name: 'meta.let-definition.lavender'
patterns: [
{ include: '#expression' }
]
letExpr:
comment: 'Function local declarations'
begin: '\\blet\\b'
end: '(?==>)'
beginCaptures:
0: name: 'storage.type.let.lavender'
name: 'meta.let-expr.lavender'
patterns: [
{ include: '#comment' }
{ include: '#keyword' }
{ include: '#letDefinition' }
{
comment: 'Function local name'
match: '[a-zA-Z_][a-zA-Z_0-9]*'
name: 'variable.parameter.local.lavender'
}
]
doExpr:
comment: 'Imperative-style monadic comprehension'
begin: '\\bdo\\b'
end: '(?=\\})'
beginCaptures:
0: name: 'keyword.control.do.lavender'
name: 'meta.do-expr.lavender'
patterns: [
{ include: '#doBlock' }
]
doBlock:
comment: 'Do expression body'
begin: '\\{'
end: '(?=\\})'
name: 'meta.do-block.lavender'
patterns: [
{ include: '#letGenerator' }
{ include: '#expression' }
]
letGenerator:
comment: 'New value binding in do expression'
begin: '\\blet\\b'
end: '<-(?=[^~!%\\^\\&*\\-+=|<>/?:$])'
beginCaptures:
0: name: 'storage.type.let.lavender'
endCaptures:
0: name: 'keyword.control.gets.lavender'
name: 'meta.let-generator.lavender'
patterns: [
{ include: '#comment' }
{ include: '#keyword' }
{
comment: 'Let bound name'
match: '[a-zA-Z_][a-zA-Z_0-9]*'
name: 'variable.parameter.formal.lavender'
}
]
parenExpression:
comment: 'Parenthesized expression'
begin: '\\('
end: '\\)'
beginCaptures:
0: name: 'punctuation.expression.lparen.lavender'
endCaptures:
0: name: 'punctuation.expression.rparen.lavender'
name: 'meta.paren-expression.lavender'
patterns: [
{ include: '#expression' }
]
bracketExpression:
comment: 'Bracketed expression'
begin: '\\['
end: '\\]'
beginCaptures:
0: name: 'punctuation.expression.lbracket.lavender'
endCaptures:
0: name: 'punctuation.expression.rbracket.lavender'
name: 'meta.bracket-expression.lavender'
patterns: [
{ include: '#expression' }
]
braceExpression:
comment: 'Brace expression'
begin: '\\{'
end: '\\}'
beginCaptures:
0: name: 'punctuation.expression.lbrace.lavender'
endCaptures:
0: name: 'punctuation.expression.rbrace.lavender'
name: 'meta.bracket-expression.lavender'
patterns: [
{ include: '#expression' }
]
expression:
comment: 'Expression'
patterns: [
{ include: '#comment' }
{ include: '#number' }
{ include: '#string' }
{ include: '#funcValue' }
{ include: '#standardConstants' }
{ include: '#doExpr' }
{ include: '#funcSig' }
{ include: '#funcCall' }
{ include: '#keyword' }
{ include: '#namespace' }
{ include: '#variable' }
{ include: '#constant' }
{ include: '#symbol' }
{ include: '#parenExpression' }
{ include: '#bracketExpression' }
{ include: '#braceExpression' }
]
keyword:
comment: 'Keywords'
match: '(=>(?=[^~!%\\^\\&*\\-+=|<>/?:$]))|(<-(?=[^~!%\\^\\&*\\-+=|<>/?:$]))|(\\bdef\\b)|(\\bdo\\b)|(\\blet\\b)|(\\bnative\\b)'
captures:
1: name: 'keyword.control.arrow.lavender'
2: name: 'keyword.control.gets.lavender'
3: name: 'storage.type.def.lavender'
4: name: 'keyword.control.do.lavender'
5: name: 'storage.type.let.lavender'
6: name: 'keyword.control.native.lavender'
standardConstants:
comment: 'Standard constants'
match: '(true)|(false)|(undefined)'
captures:
1: name: 'support.constant.predef.true.lavender'
2: name: 'support.constant.predef.false.lavender'
3: name: 'support.constant.predef.undefined.lavender'
commandBasic:
comment: 'Pre-command'
match: '^\\s*@[a-z]*'
name: 'keyword.control.command.lavender'
commandQuit:
comment: 'Quit command'
match: '^\\s*@quit'
name: 'keyword.control.command.quit.lavender'
commandImport:
comment: 'Import command'
begin: '^\\s*(@import)(\\s+[A-Za-z0-9]*)?(\\s+using)?'
end: '$'
beginCaptures:
1: name: 'keyword.control.command.import.lavender'
2: name: 'support.constant.namespace.lavender'
3: name: 'keyword.control.command-help.using.lavender'
patterns: [
{
comment: 'Underscore'
match: '\\b_\\b'
name: 'keyword.control.underscore.lavender'
}
{
comment: 'Alias'
match: '([^,]+?)(?:(\\bas\\b)([^,]+?))?'
captures:
1: name: 'support.function.import.lavender'
2: name: 'keyword.control.command-help.as.lavender'
3: name: 'support.function.alias.lavender'
}
]
| 56178 | # If this is your first time writing a language grammar, check out:
# - https://flight-manual.atom.io/hacking-atom/sections/creating-a-grammar/
scopeName: 'source.lavender'
name: '<NAME>'
fileTypes: [
'lv'
]
foldingStartMarker: '\\(|\\[|\\{'
foldingStopMarker: '\\)|\\]|\\}'
patterns: [
{ include: '#shebang' }
{ include: '#commandImport' }
{ include: '#commandQuit' }
{ include: '#commandBasic' }
{ include: '#expression' }
]
repository:
shebang:
comment: 'Unix shebang line'
match: '^#!.*$'
name: 'comment.shebang.lavender'
comment:
comment: 'Single line comment'
match: '\'.*$'
name: 'comment.line.quote.lavender'
namespace:
comment: 'Namespace'
match: '[a-zA-Z_][a-zA-Z_0-9]*:'
name: 'support.constant.namespace.lavender'
symbol:
comment: 'Symbolic name'
match: '[~!%\\^\\&*\\-+=|<>/?:$]+'
name: 'support.function.symbolic.lavender'
variable:
comment: 'Variable (begins lowercase or with a single underscore)'
match: '_?[a-z][a-zA-Z0-9]*'
name: 'variable.lavender'
constant:
comment: 'Constant value (begins uppercase or with two underscores)'
match: '_?[A-Z_][a-zA-Z_0-9]*'
name: 'support.constant.value.lavender'
number:
comment: 'Number'
match: '\\d*\\.?\\d+([eE][+-]?\\d+)?'
name: 'constant.numeric.lavender'
string:
comment: 'String'
begin: '"'
end: '"'
name: 'string.quoted.double.lavender'
patterns: [
{
comment: 'String escapes'
match: '\\\\.'
name: 'keyword.operator.string-escape.lavender'
}
]
funcValue:
comment: 'Function value'
match: '\\\\((?:[a-zA-Z_][a-zA-Z_0-9]*):)?((?:[a-zA-Z_][a-zA-Z_0-9]*)|[~!%\\^\\&*\\-+=|<>/?:$]+)\\\\?'
name: 'meta.function-value.lavender'
captures:
1: name: 'support.constant.namespace.lavender'
2: name: 'support.function.value.lavender'
funcCall:
comment: 'Function value when calling'
match: '[a-zA-Z_][a-zA-Z_0-9]*\\s*(?=\\()'
name: 'support.function.call.lavender'
funcName:
comment: 'Function declared name'
match: '([uir]_)?((?:[a-zA-Z_][a-zA-Z_0-9]*)|[~!%\\^\\&*\\-+=|<>/?:$]+)'
name: 'meta.function.name.lavender'
captures:
1: name: 'keyword.control.function.prefix.lavender'
2: name: 'support.function.declaration.lavender'
argList:
comment: 'Function parameter list'
begin: '\\('
end: '\\)'
beginCaptures:
0: name: 'punctuation.params.lparen.lavender'
endCaptures:
0: name: 'punctuation.params.rparen.lavender'
name: 'meta.arg-list.lavender'
patterns: [
{ include: '#comment' }
{ include: '#keyword' }
{
comment: 'Formal parameter name'
match: '[a-zA-Z_][a-zA-Z_0-9]*'
name: 'variable.parameter.formal.lavender'
}
{
comment: 'By name modifier'
match: '=>'
name: 'storage.modifier.byname.lavender'
}
{
comment: 'Varargs modifier'
match: '\\.\\.\\.'
name: 'storage.modifier.varargs.lavender'
}
]
funcSig:
comment: 'Function signature'
begin: '\\bdef\\b'
end: '(?==>)'
beginCaptures:
0: name: 'storage.type.def.lavender'
name: 'meta.function.header.lavender'
patterns: [
{ include: '#comment' }
{ include: '#letExpr' }
{ include: '#funcName' }
{ include: '#argList' }
]
letDefinition:
comment: 'Single function local definition'
begin: '\\('
end: '\\)'
name: 'meta.let-expr.definition.lavender'
beginCaptures:
1: name: 'punctuation.let-expr.lparen.lavender'
endCaptures:
1: name: 'punctuation.let-expr.rparen.lavender'
name: 'meta.let-definition.lavender'
patterns: [
{ include: '#expression' }
]
letExpr:
comment: 'Function local declarations'
begin: '\\blet\\b'
end: '(?==>)'
beginCaptures:
0: name: 'storage.type.let.lavender'
name: 'meta.let-expr.lavender'
patterns: [
{ include: '#comment' }
{ include: '#keyword' }
{ include: '#letDefinition' }
{
comment: 'Function local name'
match: '[a-zA-Z_][a-zA-Z_0-9]*'
name: 'variable.parameter.local.lavender'
}
]
doExpr:
comment: 'Imperative-style monadic comprehension'
begin: '\\bdo\\b'
end: '(?=\\})'
beginCaptures:
0: name: 'keyword.control.do.lavender'
name: 'meta.do-expr.lavender'
patterns: [
{ include: '#doBlock' }
]
doBlock:
comment: 'Do expression body'
begin: '\\{'
end: '(?=\\})'
name: 'meta.do-block.lavender'
patterns: [
{ include: '#letGenerator' }
{ include: '#expression' }
]
letGenerator:
comment: 'New value binding in do expression'
begin: '\\blet\\b'
end: '<-(?=[^~!%\\^\\&*\\-+=|<>/?:$])'
beginCaptures:
0: name: 'storage.type.let.lavender'
endCaptures:
0: name: 'keyword.control.gets.lavender'
name: 'meta.let-generator.lavender'
patterns: [
{ include: '#comment' }
{ include: '#keyword' }
{
comment: 'Let bound name'
match: '[a-zA-Z_][a-zA-Z_0-9]*'
name: 'variable.parameter.formal.lavender'
}
]
parenExpression:
comment: 'Parenthesized expression'
begin: '\\('
end: '\\)'
beginCaptures:
0: name: 'punctuation.expression.lparen.lavender'
endCaptures:
0: name: 'punctuation.expression.rparen.lavender'
name: 'meta.paren-expression.lavender'
patterns: [
{ include: '#expression' }
]
bracketExpression:
comment: 'Bracketed expression'
begin: '\\['
end: '\\]'
beginCaptures:
0: name: 'punctuation.expression.lbracket.lavender'
endCaptures:
0: name: 'punctuation.expression.rbracket.lavender'
name: 'meta.bracket-expression.lavender'
patterns: [
{ include: '#expression' }
]
braceExpression:
comment: 'Brace expression'
begin: '\\{'
end: '\\}'
beginCaptures:
0: name: 'punctuation.expression.lbrace.lavender'
endCaptures:
0: name: 'punctuation.expression.rbrace.lavender'
name: 'meta.bracket-expression.lavender'
patterns: [
{ include: '#expression' }
]
expression:
comment: 'Expression'
patterns: [
{ include: '#comment' }
{ include: '#number' }
{ include: '#string' }
{ include: '#funcValue' }
{ include: '#standardConstants' }
{ include: '#doExpr' }
{ include: '#funcSig' }
{ include: '#funcCall' }
{ include: '#keyword' }
{ include: '#namespace' }
{ include: '#variable' }
{ include: '#constant' }
{ include: '#symbol' }
{ include: '#parenExpression' }
{ include: '#bracketExpression' }
{ include: '#braceExpression' }
]
keyword:
comment: 'Keywords'
match: '(=>(?=[^~!%\\^\\&*\\-+=|<>/?:$]))|(<-(?=[^~!%\\^\\&*\\-+=|<>/?:$]))|(\\bdef\\b)|(\\bdo\\b)|(\\blet\\b)|(\\bnative\\b)'
captures:
1: name: 'keyword.control.arrow.lavender'
2: name: 'keyword.control.gets.lavender'
3: name: 'storage.type.def.lavender'
4: name: 'keyword.control.do.lavender'
5: name: 'storage.type.let.lavender'
6: name: 'keyword.control.native.lavender'
standardConstants:
comment: 'Standard constants'
match: '(true)|(false)|(undefined)'
captures:
1: name: 'support.constant.predef.true.lavender'
2: name: 'support.constant.predef.false.lavender'
3: name: 'support.constant.predef.undefined.lavender'
commandBasic:
comment: 'Pre-command'
match: '^\\s*@[a-z]*'
name: 'keyword.control.command.lavender'
commandQuit:
comment: 'Quit command'
match: '^\\s*@quit'
name: 'keyword.control.command.quit.lavender'
commandImport:
comment: 'Import command'
begin: '^\\s*(@import)(\\s+[A-Za-z0-9]*)?(\\s+using)?'
end: '$'
beginCaptures:
1: name: 'keyword.control.command.import.lavender'
2: name: 'support.constant.namespace.lavender'
3: name: 'keyword.control.command-help.using.lavender'
patterns: [
{
comment: 'Underscore'
match: '\\b_\\b'
name: 'keyword.control.underscore.lavender'
}
{
comment: 'Alias'
match: '([^,]+?)(?:(\\bas\\b)([^,]+?))?'
captures:
1: name: 'support.function.import.lavender'
2: name: 'keyword.control.command-help.as.lavender'
3: name: 'support.function.alias.lavender'
}
]
| true | # If this is your first time writing a language grammar, check out:
# - https://flight-manual.atom.io/hacking-atom/sections/creating-a-grammar/
scopeName: 'source.lavender'
name: 'PI:NAME:<NAME>END_PI'
fileTypes: [
'lv'
]
foldingStartMarker: '\\(|\\[|\\{'
foldingStopMarker: '\\)|\\]|\\}'
patterns: [
{ include: '#shebang' }
{ include: '#commandImport' }
{ include: '#commandQuit' }
{ include: '#commandBasic' }
{ include: '#expression' }
]
repository:
shebang:
comment: 'Unix shebang line'
match: '^#!.*$'
name: 'comment.shebang.lavender'
comment:
comment: 'Single line comment'
match: '\'.*$'
name: 'comment.line.quote.lavender'
namespace:
comment: 'Namespace'
match: '[a-zA-Z_][a-zA-Z_0-9]*:'
name: 'support.constant.namespace.lavender'
symbol:
comment: 'Symbolic name'
match: '[~!%\\^\\&*\\-+=|<>/?:$]+'
name: 'support.function.symbolic.lavender'
variable:
comment: 'Variable (begins lowercase or with a single underscore)'
match: '_?[a-z][a-zA-Z0-9]*'
name: 'variable.lavender'
constant:
comment: 'Constant value (begins uppercase or with two underscores)'
match: '_?[A-Z_][a-zA-Z_0-9]*'
name: 'support.constant.value.lavender'
number:
comment: 'Number'
match: '\\d*\\.?\\d+([eE][+-]?\\d+)?'
name: 'constant.numeric.lavender'
string:
comment: 'String'
begin: '"'
end: '"'
name: 'string.quoted.double.lavender'
patterns: [
{
comment: 'String escapes'
match: '\\\\.'
name: 'keyword.operator.string-escape.lavender'
}
]
funcValue:
comment: 'Function value'
match: '\\\\((?:[a-zA-Z_][a-zA-Z_0-9]*):)?((?:[a-zA-Z_][a-zA-Z_0-9]*)|[~!%\\^\\&*\\-+=|<>/?:$]+)\\\\?'
name: 'meta.function-value.lavender'
captures:
1: name: 'support.constant.namespace.lavender'
2: name: 'support.function.value.lavender'
funcCall:
comment: 'Function value when calling'
match: '[a-zA-Z_][a-zA-Z_0-9]*\\s*(?=\\()'
name: 'support.function.call.lavender'
funcName:
comment: 'Function declared name'
match: '([uir]_)?((?:[a-zA-Z_][a-zA-Z_0-9]*)|[~!%\\^\\&*\\-+=|<>/?:$]+)'
name: 'meta.function.name.lavender'
captures:
1: name: 'keyword.control.function.prefix.lavender'
2: name: 'support.function.declaration.lavender'
argList:
comment: 'Function parameter list'
begin: '\\('
end: '\\)'
beginCaptures:
0: name: 'punctuation.params.lparen.lavender'
endCaptures:
0: name: 'punctuation.params.rparen.lavender'
name: 'meta.arg-list.lavender'
patterns: [
{ include: '#comment' }
{ include: '#keyword' }
{
comment: 'Formal parameter name'
match: '[a-zA-Z_][a-zA-Z_0-9]*'
name: 'variable.parameter.formal.lavender'
}
{
comment: 'By name modifier'
match: '=>'
name: 'storage.modifier.byname.lavender'
}
{
comment: 'Varargs modifier'
match: '\\.\\.\\.'
name: 'storage.modifier.varargs.lavender'
}
]
funcSig:
comment: 'Function signature'
begin: '\\bdef\\b'
end: '(?==>)'
beginCaptures:
0: name: 'storage.type.def.lavender'
name: 'meta.function.header.lavender'
patterns: [
{ include: '#comment' }
{ include: '#letExpr' }
{ include: '#funcName' }
{ include: '#argList' }
]
letDefinition:
comment: 'Single function local definition'
begin: '\\('
end: '\\)'
name: 'meta.let-expr.definition.lavender'
beginCaptures:
1: name: 'punctuation.let-expr.lparen.lavender'
endCaptures:
1: name: 'punctuation.let-expr.rparen.lavender'
name: 'meta.let-definition.lavender'
patterns: [
{ include: '#expression' }
]
letExpr:
comment: 'Function local declarations'
begin: '\\blet\\b'
end: '(?==>)'
beginCaptures:
0: name: 'storage.type.let.lavender'
name: 'meta.let-expr.lavender'
patterns: [
{ include: '#comment' }
{ include: '#keyword' }
{ include: '#letDefinition' }
{
comment: 'Function local name'
match: '[a-zA-Z_][a-zA-Z_0-9]*'
name: 'variable.parameter.local.lavender'
}
]
doExpr:
comment: 'Imperative-style monadic comprehension'
begin: '\\bdo\\b'
end: '(?=\\})'
beginCaptures:
0: name: 'keyword.control.do.lavender'
name: 'meta.do-expr.lavender'
patterns: [
{ include: '#doBlock' }
]
doBlock:
comment: 'Do expression body'
begin: '\\{'
end: '(?=\\})'
name: 'meta.do-block.lavender'
patterns: [
{ include: '#letGenerator' }
{ include: '#expression' }
]
letGenerator:
comment: 'New value binding in do expression'
begin: '\\blet\\b'
end: '<-(?=[^~!%\\^\\&*\\-+=|<>/?:$])'
beginCaptures:
0: name: 'storage.type.let.lavender'
endCaptures:
0: name: 'keyword.control.gets.lavender'
name: 'meta.let-generator.lavender'
patterns: [
{ include: '#comment' }
{ include: '#keyword' }
{
comment: 'Let bound name'
match: '[a-zA-Z_][a-zA-Z_0-9]*'
name: 'variable.parameter.formal.lavender'
}
]
parenExpression:
comment: 'Parenthesized expression'
begin: '\\('
end: '\\)'
beginCaptures:
0: name: 'punctuation.expression.lparen.lavender'
endCaptures:
0: name: 'punctuation.expression.rparen.lavender'
name: 'meta.paren-expression.lavender'
patterns: [
{ include: '#expression' }
]
bracketExpression:
comment: 'Bracketed expression'
begin: '\\['
end: '\\]'
beginCaptures:
0: name: 'punctuation.expression.lbracket.lavender'
endCaptures:
0: name: 'punctuation.expression.rbracket.lavender'
name: 'meta.bracket-expression.lavender'
patterns: [
{ include: '#expression' }
]
braceExpression:
comment: 'Brace expression'
begin: '\\{'
end: '\\}'
beginCaptures:
0: name: 'punctuation.expression.lbrace.lavender'
endCaptures:
0: name: 'punctuation.expression.rbrace.lavender'
name: 'meta.bracket-expression.lavender'
patterns: [
{ include: '#expression' }
]
expression:
comment: 'Expression'
patterns: [
{ include: '#comment' }
{ include: '#number' }
{ include: '#string' }
{ include: '#funcValue' }
{ include: '#standardConstants' }
{ include: '#doExpr' }
{ include: '#funcSig' }
{ include: '#funcCall' }
{ include: '#keyword' }
{ include: '#namespace' }
{ include: '#variable' }
{ include: '#constant' }
{ include: '#symbol' }
{ include: '#parenExpression' }
{ include: '#bracketExpression' }
{ include: '#braceExpression' }
]
keyword:
comment: 'Keywords'
match: '(=>(?=[^~!%\\^\\&*\\-+=|<>/?:$]))|(<-(?=[^~!%\\^\\&*\\-+=|<>/?:$]))|(\\bdef\\b)|(\\bdo\\b)|(\\blet\\b)|(\\bnative\\b)'
captures:
1: name: 'keyword.control.arrow.lavender'
2: name: 'keyword.control.gets.lavender'
3: name: 'storage.type.def.lavender'
4: name: 'keyword.control.do.lavender'
5: name: 'storage.type.let.lavender'
6: name: 'keyword.control.native.lavender'
standardConstants:
comment: 'Standard constants'
match: '(true)|(false)|(undefined)'
captures:
1: name: 'support.constant.predef.true.lavender'
2: name: 'support.constant.predef.false.lavender'
3: name: 'support.constant.predef.undefined.lavender'
commandBasic:
comment: 'Pre-command'
match: '^\\s*@[a-z]*'
name: 'keyword.control.command.lavender'
commandQuit:
comment: 'Quit command'
match: '^\\s*@quit'
name: 'keyword.control.command.quit.lavender'
commandImport:
comment: 'Import command'
begin: '^\\s*(@import)(\\s+[A-Za-z0-9]*)?(\\s+using)?'
end: '$'
beginCaptures:
1: name: 'keyword.control.command.import.lavender'
2: name: 'support.constant.namespace.lavender'
3: name: 'keyword.control.command-help.using.lavender'
patterns: [
{
comment: 'Underscore'
match: '\\b_\\b'
name: 'keyword.control.underscore.lavender'
}
{
comment: 'Alias'
match: '([^,]+?)(?:(\\bas\\b)([^,]+?))?'
captures:
1: name: 'support.function.import.lavender'
2: name: 'keyword.control.command-help.as.lavender'
3: name: 'support.function.alias.lavender'
}
]
|
[
{
"context": "work.Socket.emit 'ReconnectLogin', {\"username\" : \"per\", \"password\": \"per\"}\n console.log \"Reconnect",
"end": 6164,
"score": 0.9884327054023743,
"start": 6161,
"tag": "USERNAME",
"value": "per"
},
{
"context": "econnectLogin', {\"username\" : \"per\", \"password\": \"per\"}\n console.log \"Reconnected!\"\n\n return\n\n\n",
"end": 6183,
"score": 0.9995244145393372,
"start": 6180,
"tag": "PASSWORD",
"value": "per"
},
{
"context": "mGame.Network.Socket.emit 'Login', {\"username\" : \"per\", \"password\": \"per\"}\n GothamGame.Network.Socke",
"end": 6461,
"score": 0.9975981712341309,
"start": 6458,
"tag": "USERNAME",
"value": "per"
},
{
"context": "t.emit 'Login', {\"username\" : \"per\", \"password\": \"per\"}\n GothamGame.Network.Socket.on 'Login', (repl",
"end": 6480,
"score": 0.9995185136795044,
"start": 6477,
"tag": "PASSWORD",
"value": "per"
}
] | GOTHAM/Game/src/main.coffee | perara/gotham | 0 | Gotham = require '../../GameFramework/src/Gotham.coffee'
GothamGame = require './GothamGame.coffee'
require './dependencies/jquery-ui.min'
window.moment = require './dependencies/moment.min'
setup =
started: false
preloadFonts: (_c)->
# Just return if user is IE:
userAgent = userAgent or navigator.userAgent
if userAgent.indexOf('MSIE ') > -1 or userAgent.indexOf('Trident/') > -1
_c()
return false
done = false
# Export Google WebFont Config
window.WebFontConfig =
# Load some fonts from google
google:
families: ['Inconsolata', 'Pacifico', 'Orbitron', 'Droid Serif']
# ... you can do something here if you'd like
active: () ->
if not done
done = true
_c()
# Create a timeout if WebFonts hangs.
setTimeout window.WebFontConfig.active(), 5000
# Create script tag matching protocol
s = document.createElement 'script'
s.src = "#{if document.location.protocol is 'https:' then 'https' else 'http'}://ajax.googleapis.com/ajax/libs/webfont/1/webfont.js"
s.type = 'text/javascript'
s.async = 'true'
# Insert it before the first script tag
s0 = (document.getElementsByTagName 'script')[0]
s0.parentNode.insertBefore s, s0
preload: ->
# User Management
Gotham.Preload.image("/assets/img/user_management_background.jpg", "user_management_background", "image")
Gotham.Preload.image("/assets/img/user_management_frame.png", "user_management_frame", "image")
Gotham.Preload.image("/assets/img/user_management_network_item.png","user_management_network_item", "image")
Gotham.Preload.image("/assets/img/user_mangement_host.png","user_mangement_host", "image")
# Shop Background
Gotham.Preload.image("/assets/img/shop_background.jpg", "shop_background", "image")
# Mission
Gotham.Preload.image("/assets/img/mission_background.jpg","mission_background", "image")
Gotham.Preload.image("/assets/img/iron_button.jpg","iron_button", "image")
Gotham.Preload.image("/assets/img/mission_spacer.png","mission_spacer", "image")
Gotham.Preload.image("/assets/img/mission_item.png","mission_item", "image")
Gotham.Preload.image("/assets/img/user_management_frame.png", "mission_frame", "image")
# World Map
Gotham.Preload.image("/assets/img/node_details.png","node_details", "image")
Gotham.Preload.image("/assets/img/map_marker.png", "map_marker", "image")
Gotham.Preload.image("/assets/img/map_marker_deactivated.png", "map_marker_deactivated", "image")
Gotham.Preload.json("/assets/json/json.json", "map")
Gotham.Preload.image("/assets/img/sea_background.png", "sea_background", "image")
Gotham.Preload.image("/assets/img/sun.png", "sun", "image")
# Bar
Gotham.Preload.image("/assets/img/bottombar.png", "bottomBar", "image")
Gotham.Preload.image("/assets/img/sidebar.png", "sidebar", "image")
Gotham.Preload.image("/assets/img/topbar.png", "topBar", "image")
# Bar icons
Gotham.Preload.image("/assets/img/home.png", "home", "image")
Gotham.Preload.image("/assets/img/mission.png", "mission", "image")
Gotham.Preload.image("/assets/img/menu.png", "menu", "image")
Gotham.Preload.image("/assets/img/shop.png", "shop", "image")
Gotham.Preload.image("/assets/img/settings.png", "settings", "image")
Gotham.Preload.image("/assets/img/help.png", "help", "image")
Gotham.Preload.image("/assets/img/attack.png", "attack", "image")
Gotham.Preload.image("/assets/img/cable.png", "cable", "image")
Gotham.Preload.image("/assets/img/user.png", "user", "image")
# Menu
Gotham.Preload.image("/assets/img/menu_button.png", "menu_button", "image")
Gotham.Preload.image("/assets/img/menu_button_hover.png", "menu_button_hover", "image")
Gotham.Preload.image("/assets/img/menu_background.jpg", "menu_background", "image")
Gotham.Preload.image("/assets/img/menu_background2.jpg", "menu_background2", "image")
Gotham.Preload.image("/assets/img/about_background.jpg", "about_background", "image")
Gotham.Preload.mp3("./assets/audio/menu_theme.mp3", "menu_theme")
Gotham.Preload.mp3("./assets/audio/button_click_1.mp3", "button_click_1")
# Settings
Gotham.Preload.image("/assets/img/settings_background.jpg", "settings_background", "image")
Gotham.Preload.image("/assets/img/settings_close.png", "settings_close", "image")
Gotham.Preload.image("/assets/img/slider_background.png", "slider_background", "image")
#NodeList
Gotham.Preload.image("/assets/img/nodelist_background.jpg", "nodelist_background", "image")
#Terminal
Gotham.Preload.image("/assets/img/terminal_background.png", "terminal_background", "image")
networkPreload: ->
socket = GothamGame.Network
Gotham.Preload.network("GetNodes", Gotham.Database.table("node"), socket)
Gotham.Preload.network("GetCables", Gotham.Database.table("cable"), socket)
Gotham.Preload.network("GetUser", Gotham.Database.table("user"), socket)
Gotham.Preload.network("GetMission", Gotham.Database.table("mission"), socket)
startGame: ->
# Create Scenes
scene_World = new GothamGame.Scenes.World 0xffffff, true #0x333333, true
scene_Menu = new GothamGame.Scenes.Menu 0x000000, true
# Add Scenes to renderer
GothamGame.Renderer.addScene("World", scene_World)
GothamGame.Renderer.addScene("Menu", scene_Menu)
# Transfer all flying loading documents from Loading Scene to Menu Scene
scene_Menu.documentContainer.addChild GothamGame.Renderer.getScene("Loading").documentContainer
# Set Menu Scene
GothamGame.Renderer.setScene("Menu")
startNetwork: (callback) ->
GothamGame.Network = new Gotham.Network location.hostname, 8081
GothamGame.Network.connect()
console.log "Connecting to #{location.hostname}:8081 ..."
GothamGame.Network.onConnect = ->
console.log "Connected!"
callback(GothamGame.Network)
GothamGame.Network.onReconnecting = ->
console.log "Attempting to reconnect"
GothamGame.Network.onReconnect = ->
GothamGame.Network.Socket.emit 'ReconnectLogin', {"username" : "per", "password": "per"}
console.log "Reconnected!"
return
# Preload Google Fonts
setup.preloadFonts ->
# Start networking, Callback to preload when done
setup.startNetwork ->
# Preload Assets
setup.preload()
GothamGame.Network.Socket.emit 'Login', {"username" : "per", "password": "per"}
GothamGame.Network.Socket.on 'Login', (reply) ->
if reply.status == 200
# Start Network Preloading
setup.networkPreload()
scene_Loading = new GothamGame.Scenes.Loading 0x3490CF, true
GothamGame.Renderer.addScene("Loading", scene_Loading)
# Set Start Scene
GothamGame.Renderer.setScene("Loading")
Gotham.Preload.onLoad = (source,type, name, percent) ->
scene_Loading.addAsset name, type, Math.round(percent)
#console.log("Preload: " + percent + "%")
Gotham.Preload.onComplete = () ->
console.log "Preload: Complete.. Starting Game"
#Gotham.Tween.clear()
if not setup.started
setup.startGame()
setup.started = true
| 137076 | Gotham = require '../../GameFramework/src/Gotham.coffee'
GothamGame = require './GothamGame.coffee'
require './dependencies/jquery-ui.min'
window.moment = require './dependencies/moment.min'
setup =
started: false
preloadFonts: (_c)->
# Just return if user is IE:
userAgent = userAgent or navigator.userAgent
if userAgent.indexOf('MSIE ') > -1 or userAgent.indexOf('Trident/') > -1
_c()
return false
done = false
# Export Google WebFont Config
window.WebFontConfig =
# Load some fonts from google
google:
families: ['Inconsolata', 'Pacifico', 'Orbitron', 'Droid Serif']
# ... you can do something here if you'd like
active: () ->
if not done
done = true
_c()
# Create a timeout if WebFonts hangs.
setTimeout window.WebFontConfig.active(), 5000
# Create script tag matching protocol
s = document.createElement 'script'
s.src = "#{if document.location.protocol is 'https:' then 'https' else 'http'}://ajax.googleapis.com/ajax/libs/webfont/1/webfont.js"
s.type = 'text/javascript'
s.async = 'true'
# Insert it before the first script tag
s0 = (document.getElementsByTagName 'script')[0]
s0.parentNode.insertBefore s, s0
preload: ->
# User Management
Gotham.Preload.image("/assets/img/user_management_background.jpg", "user_management_background", "image")
Gotham.Preload.image("/assets/img/user_management_frame.png", "user_management_frame", "image")
Gotham.Preload.image("/assets/img/user_management_network_item.png","user_management_network_item", "image")
Gotham.Preload.image("/assets/img/user_mangement_host.png","user_mangement_host", "image")
# Shop Background
Gotham.Preload.image("/assets/img/shop_background.jpg", "shop_background", "image")
# Mission
Gotham.Preload.image("/assets/img/mission_background.jpg","mission_background", "image")
Gotham.Preload.image("/assets/img/iron_button.jpg","iron_button", "image")
Gotham.Preload.image("/assets/img/mission_spacer.png","mission_spacer", "image")
Gotham.Preload.image("/assets/img/mission_item.png","mission_item", "image")
Gotham.Preload.image("/assets/img/user_management_frame.png", "mission_frame", "image")
# World Map
Gotham.Preload.image("/assets/img/node_details.png","node_details", "image")
Gotham.Preload.image("/assets/img/map_marker.png", "map_marker", "image")
Gotham.Preload.image("/assets/img/map_marker_deactivated.png", "map_marker_deactivated", "image")
Gotham.Preload.json("/assets/json/json.json", "map")
Gotham.Preload.image("/assets/img/sea_background.png", "sea_background", "image")
Gotham.Preload.image("/assets/img/sun.png", "sun", "image")
# Bar
Gotham.Preload.image("/assets/img/bottombar.png", "bottomBar", "image")
Gotham.Preload.image("/assets/img/sidebar.png", "sidebar", "image")
Gotham.Preload.image("/assets/img/topbar.png", "topBar", "image")
# Bar icons
Gotham.Preload.image("/assets/img/home.png", "home", "image")
Gotham.Preload.image("/assets/img/mission.png", "mission", "image")
Gotham.Preload.image("/assets/img/menu.png", "menu", "image")
Gotham.Preload.image("/assets/img/shop.png", "shop", "image")
Gotham.Preload.image("/assets/img/settings.png", "settings", "image")
Gotham.Preload.image("/assets/img/help.png", "help", "image")
Gotham.Preload.image("/assets/img/attack.png", "attack", "image")
Gotham.Preload.image("/assets/img/cable.png", "cable", "image")
Gotham.Preload.image("/assets/img/user.png", "user", "image")
# Menu
Gotham.Preload.image("/assets/img/menu_button.png", "menu_button", "image")
Gotham.Preload.image("/assets/img/menu_button_hover.png", "menu_button_hover", "image")
Gotham.Preload.image("/assets/img/menu_background.jpg", "menu_background", "image")
Gotham.Preload.image("/assets/img/menu_background2.jpg", "menu_background2", "image")
Gotham.Preload.image("/assets/img/about_background.jpg", "about_background", "image")
Gotham.Preload.mp3("./assets/audio/menu_theme.mp3", "menu_theme")
Gotham.Preload.mp3("./assets/audio/button_click_1.mp3", "button_click_1")
# Settings
Gotham.Preload.image("/assets/img/settings_background.jpg", "settings_background", "image")
Gotham.Preload.image("/assets/img/settings_close.png", "settings_close", "image")
Gotham.Preload.image("/assets/img/slider_background.png", "slider_background", "image")
#NodeList
Gotham.Preload.image("/assets/img/nodelist_background.jpg", "nodelist_background", "image")
#Terminal
Gotham.Preload.image("/assets/img/terminal_background.png", "terminal_background", "image")
networkPreload: ->
socket = GothamGame.Network
Gotham.Preload.network("GetNodes", Gotham.Database.table("node"), socket)
Gotham.Preload.network("GetCables", Gotham.Database.table("cable"), socket)
Gotham.Preload.network("GetUser", Gotham.Database.table("user"), socket)
Gotham.Preload.network("GetMission", Gotham.Database.table("mission"), socket)
startGame: ->
# Create Scenes
scene_World = new GothamGame.Scenes.World 0xffffff, true #0x333333, true
scene_Menu = new GothamGame.Scenes.Menu 0x000000, true
# Add Scenes to renderer
GothamGame.Renderer.addScene("World", scene_World)
GothamGame.Renderer.addScene("Menu", scene_Menu)
# Transfer all flying loading documents from Loading Scene to Menu Scene
scene_Menu.documentContainer.addChild GothamGame.Renderer.getScene("Loading").documentContainer
# Set Menu Scene
GothamGame.Renderer.setScene("Menu")
startNetwork: (callback) ->
GothamGame.Network = new Gotham.Network location.hostname, 8081
GothamGame.Network.connect()
console.log "Connecting to #{location.hostname}:8081 ..."
GothamGame.Network.onConnect = ->
console.log "Connected!"
callback(GothamGame.Network)
GothamGame.Network.onReconnecting = ->
console.log "Attempting to reconnect"
GothamGame.Network.onReconnect = ->
GothamGame.Network.Socket.emit 'ReconnectLogin', {"username" : "per", "password": "<PASSWORD>"}
console.log "Reconnected!"
return
# Preload Google Fonts
setup.preloadFonts ->
# Start networking, Callback to preload when done
setup.startNetwork ->
# Preload Assets
setup.preload()
GothamGame.Network.Socket.emit 'Login', {"username" : "per", "password": "<PASSWORD>"}
GothamGame.Network.Socket.on 'Login', (reply) ->
if reply.status == 200
# Start Network Preloading
setup.networkPreload()
scene_Loading = new GothamGame.Scenes.Loading 0x3490CF, true
GothamGame.Renderer.addScene("Loading", scene_Loading)
# Set Start Scene
GothamGame.Renderer.setScene("Loading")
Gotham.Preload.onLoad = (source,type, name, percent) ->
scene_Loading.addAsset name, type, Math.round(percent)
#console.log("Preload: " + percent + "%")
Gotham.Preload.onComplete = () ->
console.log "Preload: Complete.. Starting Game"
#Gotham.Tween.clear()
if not setup.started
setup.startGame()
setup.started = true
| true | Gotham = require '../../GameFramework/src/Gotham.coffee'
GothamGame = require './GothamGame.coffee'
require './dependencies/jquery-ui.min'
window.moment = require './dependencies/moment.min'
setup =
started: false
preloadFonts: (_c)->
# Just return if user is IE:
userAgent = userAgent or navigator.userAgent
if userAgent.indexOf('MSIE ') > -1 or userAgent.indexOf('Trident/') > -1
_c()
return false
done = false
# Export Google WebFont Config
window.WebFontConfig =
# Load some fonts from google
google:
families: ['Inconsolata', 'Pacifico', 'Orbitron', 'Droid Serif']
# ... you can do something here if you'd like
active: () ->
if not done
done = true
_c()
# Create a timeout if WebFonts hangs.
setTimeout window.WebFontConfig.active(), 5000
# Create script tag matching protocol
s = document.createElement 'script'
s.src = "#{if document.location.protocol is 'https:' then 'https' else 'http'}://ajax.googleapis.com/ajax/libs/webfont/1/webfont.js"
s.type = 'text/javascript'
s.async = 'true'
# Insert it before the first script tag
s0 = (document.getElementsByTagName 'script')[0]
s0.parentNode.insertBefore s, s0
preload: ->
# User Management
Gotham.Preload.image("/assets/img/user_management_background.jpg", "user_management_background", "image")
Gotham.Preload.image("/assets/img/user_management_frame.png", "user_management_frame", "image")
Gotham.Preload.image("/assets/img/user_management_network_item.png","user_management_network_item", "image")
Gotham.Preload.image("/assets/img/user_mangement_host.png","user_mangement_host", "image")
# Shop Background
Gotham.Preload.image("/assets/img/shop_background.jpg", "shop_background", "image")
# Mission
Gotham.Preload.image("/assets/img/mission_background.jpg","mission_background", "image")
Gotham.Preload.image("/assets/img/iron_button.jpg","iron_button", "image")
Gotham.Preload.image("/assets/img/mission_spacer.png","mission_spacer", "image")
Gotham.Preload.image("/assets/img/mission_item.png","mission_item", "image")
Gotham.Preload.image("/assets/img/user_management_frame.png", "mission_frame", "image")
# World Map
Gotham.Preload.image("/assets/img/node_details.png","node_details", "image")
Gotham.Preload.image("/assets/img/map_marker.png", "map_marker", "image")
Gotham.Preload.image("/assets/img/map_marker_deactivated.png", "map_marker_deactivated", "image")
Gotham.Preload.json("/assets/json/json.json", "map")
Gotham.Preload.image("/assets/img/sea_background.png", "sea_background", "image")
Gotham.Preload.image("/assets/img/sun.png", "sun", "image")
# Bar
Gotham.Preload.image("/assets/img/bottombar.png", "bottomBar", "image")
Gotham.Preload.image("/assets/img/sidebar.png", "sidebar", "image")
Gotham.Preload.image("/assets/img/topbar.png", "topBar", "image")
# Bar icons
Gotham.Preload.image("/assets/img/home.png", "home", "image")
Gotham.Preload.image("/assets/img/mission.png", "mission", "image")
Gotham.Preload.image("/assets/img/menu.png", "menu", "image")
Gotham.Preload.image("/assets/img/shop.png", "shop", "image")
Gotham.Preload.image("/assets/img/settings.png", "settings", "image")
Gotham.Preload.image("/assets/img/help.png", "help", "image")
Gotham.Preload.image("/assets/img/attack.png", "attack", "image")
Gotham.Preload.image("/assets/img/cable.png", "cable", "image")
Gotham.Preload.image("/assets/img/user.png", "user", "image")
# Menu
Gotham.Preload.image("/assets/img/menu_button.png", "menu_button", "image")
Gotham.Preload.image("/assets/img/menu_button_hover.png", "menu_button_hover", "image")
Gotham.Preload.image("/assets/img/menu_background.jpg", "menu_background", "image")
Gotham.Preload.image("/assets/img/menu_background2.jpg", "menu_background2", "image")
Gotham.Preload.image("/assets/img/about_background.jpg", "about_background", "image")
Gotham.Preload.mp3("./assets/audio/menu_theme.mp3", "menu_theme")
Gotham.Preload.mp3("./assets/audio/button_click_1.mp3", "button_click_1")
# Settings
Gotham.Preload.image("/assets/img/settings_background.jpg", "settings_background", "image")
Gotham.Preload.image("/assets/img/settings_close.png", "settings_close", "image")
Gotham.Preload.image("/assets/img/slider_background.png", "slider_background", "image")
#NodeList
Gotham.Preload.image("/assets/img/nodelist_background.jpg", "nodelist_background", "image")
#Terminal
Gotham.Preload.image("/assets/img/terminal_background.png", "terminal_background", "image")
networkPreload: ->
socket = GothamGame.Network
Gotham.Preload.network("GetNodes", Gotham.Database.table("node"), socket)
Gotham.Preload.network("GetCables", Gotham.Database.table("cable"), socket)
Gotham.Preload.network("GetUser", Gotham.Database.table("user"), socket)
Gotham.Preload.network("GetMission", Gotham.Database.table("mission"), socket)
startGame: ->
# Create Scenes
scene_World = new GothamGame.Scenes.World 0xffffff, true #0x333333, true
scene_Menu = new GothamGame.Scenes.Menu 0x000000, true
# Add Scenes to renderer
GothamGame.Renderer.addScene("World", scene_World)
GothamGame.Renderer.addScene("Menu", scene_Menu)
# Transfer all flying loading documents from Loading Scene to Menu Scene
scene_Menu.documentContainer.addChild GothamGame.Renderer.getScene("Loading").documentContainer
# Set Menu Scene
GothamGame.Renderer.setScene("Menu")
startNetwork: (callback) ->
GothamGame.Network = new Gotham.Network location.hostname, 8081
GothamGame.Network.connect()
console.log "Connecting to #{location.hostname}:8081 ..."
GothamGame.Network.onConnect = ->
console.log "Connected!"
callback(GothamGame.Network)
GothamGame.Network.onReconnecting = ->
console.log "Attempting to reconnect"
GothamGame.Network.onReconnect = ->
GothamGame.Network.Socket.emit 'ReconnectLogin', {"username" : "per", "password": "PI:PASSWORD:<PASSWORD>END_PI"}
console.log "Reconnected!"
return
# Preload Google Fonts
setup.preloadFonts ->
# Start networking, Callback to preload when done
setup.startNetwork ->
# Preload Assets
setup.preload()
GothamGame.Network.Socket.emit 'Login', {"username" : "per", "password": "PI:PASSWORD:<PASSWORD>END_PI"}
GothamGame.Network.Socket.on 'Login', (reply) ->
if reply.status == 200
# Start Network Preloading
setup.networkPreload()
scene_Loading = new GothamGame.Scenes.Loading 0x3490CF, true
GothamGame.Renderer.addScene("Loading", scene_Loading)
# Set Start Scene
GothamGame.Renderer.setScene("Loading")
Gotham.Preload.onLoad = (source,type, name, percent) ->
scene_Loading.addAsset name, type, Math.round(percent)
#console.log("Preload: " + percent + "%")
Gotham.Preload.onComplete = () ->
console.log "Preload: Complete.. Starting Game"
#Gotham.Tween.clear()
if not setup.started
setup.startGame()
setup.started = true
|
[
{
"context": "stream: 'pretty' }]\n kurento:\n url: 'ws://127.0.0.1:8888/kurento'\n options:\n # access_tok",
"end": 423,
"score": 0.9993833899497986,
"start": 414,
"tag": "IP_ADDRESS",
"value": "127.0.0.1"
},
{
"context": "/kurento'\n options:\n # access_token: 'weanOshEtph7'\n failAfter: 1\n strict: true\n se",
"end": 490,
"score": 0.9990170001983643,
"start": 478,
"tag": "PASSWORD",
"value": "weanOshEtph7"
}
] | lib/config.coffee | lugati-eu/altexo-signal-server | 1 | nconf = require 'nconf'
nconf.argv()
nconf.file(config) if (config = nconf.get('config'))
module.exports = ->
nconf.defaults {
host: '0.0.0.0'
port: 80
path: '/al_chat'
auth:
me: 'http://unix:/tmp/altexo-accounts.sock:/users/auth/me/'
sentry:
url: false
logger:
name: 'altexo-signal'
streams: [{ level: 'trace', stream: 'pretty' }]
kurento:
url: 'ws://127.0.0.1:8888/kurento'
options:
# access_token: 'weanOshEtph7'
failAfter: 1
strict: true
setup: {}
}
| 179018 | nconf = require 'nconf'
nconf.argv()
nconf.file(config) if (config = nconf.get('config'))
module.exports = ->
nconf.defaults {
host: '0.0.0.0'
port: 80
path: '/al_chat'
auth:
me: 'http://unix:/tmp/altexo-accounts.sock:/users/auth/me/'
sentry:
url: false
logger:
name: 'altexo-signal'
streams: [{ level: 'trace', stream: 'pretty' }]
kurento:
url: 'ws://127.0.0.1:8888/kurento'
options:
# access_token: '<PASSWORD>'
failAfter: 1
strict: true
setup: {}
}
| true | nconf = require 'nconf'
nconf.argv()
nconf.file(config) if (config = nconf.get('config'))
module.exports = ->
nconf.defaults {
host: '0.0.0.0'
port: 80
path: '/al_chat'
auth:
me: 'http://unix:/tmp/altexo-accounts.sock:/users/auth/me/'
sentry:
url: false
logger:
name: 'altexo-signal'
streams: [{ level: 'trace', stream: 'pretty' }]
kurento:
url: 'ws://127.0.0.1:8888/kurento'
options:
# access_token: 'PI:PASSWORD:<PASSWORD>END_PI'
failAfter: 1
strict: true
setup: {}
}
|
[
{
"context": "tel', type: 'text',\n placeholder: 'hoge@example.jp'\n @div =>\n @label 'パスワード'",
"end": 1035,
"score": 0.9999053478240967,
"start": 1020,
"tag": "EMAIL",
"value": "hoge@example.jp"
},
{
"context": ", type: 'password',\n placeholder: 'password'\n @div =>\n @button click:",
"end": 1222,
"score": 0.9992576241493225,
"start": 1214,
"tag": "PASSWORD",
"value": "password"
},
{
"context": "ame) ->\n @userId.text userId\n @userName.text userName\n @logoutButton.show()\n\n unsetTopPanel: ->\n ",
"end": 3640,
"score": 0.9847267866134644,
"start": 3632,
"tag": "USERNAME",
"value": "userName"
},
{
"context": "nel: ->\n @userId.text '-'\n @userName.text '未ログイン'\n @logoutButton.hide()\n\n showAlert: (messag",
"end": 3731,
"score": 0.47058627009391785,
"start": 3729,
"tag": "PASSWORD",
"value": "ログ"
}
] | lib/niconico-view.coffee | raccy/niconico | 0 | {$, $$$, ScrollView} = require 'atom-space-pen-views'
NiconicoApi = require './niconico-api'
NicovideCommentView = require './Niconico-comment-view'
fs = require 'fs'
module.exports =
class NiconicoView extends ScrollView
@content: ->
@div {
class: 'niconico-view native-key-bindings block'
overflow: 'auto'
tabindex: -1
}, =>
@div outlet: 'topPanel', =>
@text 'ID:'
@span outlet: 'userId', '-'
@text ' '
@span outlet: 'userName', '未ログイン'
@button
outlet: 'logoutButton', click: 'clickLogout', style: 'display:none',
'ログアウト'
@div outlet: 'alertPanel', style: 'display:none', class: 'alert'
@div outlet: 'loginPanel', style: 'display:none', =>
# @h2 'ニコニコ動画ログイン'
@form =>
@fieldset =>
@legend 'ニコニコ動画ログイン'
@div =>
@label 'メールアドレス'
@input
outlet: 'loginMail', name: 'mail_tel', type: 'text',
placeholder: 'hoge@example.jp'
@div =>
@label 'パスワード'
@input
outlet: 'loginPassword', name: 'password', type: 'password',
placeholder: 'password'
@div =>
@button click: 'clickLogin', 'ログイン'
@div outlet: 'menuPanel', style: 'display:none', =>
@form =>
@fieldset =>
@legend 'クイック視聴'
@input
outlet: 'quickMovie', name: 'quick_movie', type: 'text',
placeholder: 'lv... / co... / sm...'
@button click: 'clickQuickPlay', '視聴'
@div =>
@h3 '生放送中一覧'
@ul =>
@li '現在放送中の番組はありません。'
@p 'あとは、マイリスト一覧とか選択できるようにしたいっす。'
@div outlet: 'mylistPanel', style: 'display:none'
@div outlet: 'playPanel', style: 'display:none', =>
@button click: 'closePlay', '閉じる'
@span class: 'movie-title'
@span class: 'owner-name'
@subview 'commentView', new NicovideCommentView()
@div outlet: 'processPanel', style: 'display:none', class: 'overlayout'
# constructor: ({@rtmpPlayer, cookieStoreFile}) ->
# super
# @niconicoApi = new NiconicoApi(cookieStoreFile)
# console.log "constructor! NiconicoView"
setCookieStoreFile: (cookieStoreFile) ->
@niconicoApi.setCookieStoreFile(cookieStoreFile)
# TODO: セッション切れた後の処理をしないと
initialize: ({@rtmpPlayer, cookieStoreFile}) ->
@niconicoApi = new NiconicoApi(cookieStoreFile)
@active = null
attached: ->
@active = true
detached: ->
@active = false
# Returns an object that can be retrieved when package is activated
serialize: ->
# Tear down any state and detach
destroy: ->
@niconicoApi.destroy()
# @element.remove()
isActive: ->
@active
getTitle: ->
"ニコニコ動画"
render: ->
@startProcess 'ログイン状態を確認しています。'
@niconicoApi.getMyTop (err, data) =>
@stopProcess()
if !err?
if data.userId?
@setTopPanel(data.userId, data.userName)
@showMenu()
else
@showLogin()
else
@showAlert err
@showLogin()
# ニコニコ動画にログイン
showLogin: ->
@loginPanel.show()
showMenu: ->
@menuPanel.show()
# 初期状態に戻す
clearAll: ->
@clearAlert()
@loginPanel.hide()
@menuPanel.hide()
@mylistPanel.hide()
@playPanel.hide()
@unsetTopPanel()
@stopProcess()
startProcess: (message) ->
@processPanel.text message
@processPanel.show()
stopProcess: ->
@processPanel.hide()
setTopPanel: (userId, userName) ->
@userId.text userId
@userName.text userName
@logoutButton.show()
unsetTopPanel: ->
@userId.text '-'
@userName.text '未ログイン'
@logoutButton.hide()
showAlert: (message) ->
@alertPanel.text message
@alertPanel.show()
clearAlert: ->
@alertPanel.hide()
@alertPanel.text ''
# クリックイベント
clickLogin: (event, element) ->
@clearAlert()
unless @loginMail.val()
@showAlert 'メールアドレスを入力して下さい。'
return
unless @loginPassword.val()
@showAlert 'パスワードを入力して下さい。'
return
@startProcess 'ログイン中です・・・'
@niconicoApi.login @loginMail.val(), @loginPassword.val(), (err, data) =>
@stopProcess()
if err
# パスワードだけ初期化
@loginPassword.val('')
@showAlert err
else
@loginPanel.hide()
@loginMail.val('')
@loginPassword.val('')
# 再度rederからやり直す
@render()
clickLogout: (event, element) ->
@startProcess 'ログアウト中です・・・'
@niconicoApi.logout =>
@clearAll()
@showLogin()
clickQuickPlay: (event, element) ->
@clearAlert()
movieId = @quickMovie.val()
console.log "#{movieId} を再生します。"
if !movieId
@showAlert '番組IDを入力して下さい。'
else if /^lv\d+$/.test movieId
@startProcess '番組情報を取得中'
@niconicoApi.getLiveStatus movieId, (err, data) =>
@stopProcess()
if err
@showAlert err
else
@playMovie data
else
@showAlert '未実装です。'
playMovie: (data) ->
console.log data
rtmpdumpArgs = [
'-v',
'-r', "#{data.rtmp.url}/#{data.stream.id}",
'-C', "S:#{data.rtmp.ticket}",
'-N', data.rtmp.contents,
]
# @rtmpPlayer.play(rtmpdumpArgs)
@playPanel.find('.movie-title').text(data.stream.title)
@playPanel.find('.owner-name').text(data.stream.owner_name)
@commentView.start(data.comment)
@playPanel.show()
closePlay: ->
@rtmpPlayer.stop()
@commentView.stop()
@playPanel.hide()
| 55655 | {$, $$$, ScrollView} = require 'atom-space-pen-views'
NiconicoApi = require './niconico-api'
NicovideCommentView = require './Niconico-comment-view'
fs = require 'fs'
module.exports =
class NiconicoView extends ScrollView
@content: ->
@div {
class: 'niconico-view native-key-bindings block'
overflow: 'auto'
tabindex: -1
}, =>
@div outlet: 'topPanel', =>
@text 'ID:'
@span outlet: 'userId', '-'
@text ' '
@span outlet: 'userName', '未ログイン'
@button
outlet: 'logoutButton', click: 'clickLogout', style: 'display:none',
'ログアウト'
@div outlet: 'alertPanel', style: 'display:none', class: 'alert'
@div outlet: 'loginPanel', style: 'display:none', =>
# @h2 'ニコニコ動画ログイン'
@form =>
@fieldset =>
@legend 'ニコニコ動画ログイン'
@div =>
@label 'メールアドレス'
@input
outlet: 'loginMail', name: 'mail_tel', type: 'text',
placeholder: '<EMAIL>'
@div =>
@label 'パスワード'
@input
outlet: 'loginPassword', name: 'password', type: 'password',
placeholder: '<PASSWORD>'
@div =>
@button click: 'clickLogin', 'ログイン'
@div outlet: 'menuPanel', style: 'display:none', =>
@form =>
@fieldset =>
@legend 'クイック視聴'
@input
outlet: 'quickMovie', name: 'quick_movie', type: 'text',
placeholder: 'lv... / co... / sm...'
@button click: 'clickQuickPlay', '視聴'
@div =>
@h3 '生放送中一覧'
@ul =>
@li '現在放送中の番組はありません。'
@p 'あとは、マイリスト一覧とか選択できるようにしたいっす。'
@div outlet: 'mylistPanel', style: 'display:none'
@div outlet: 'playPanel', style: 'display:none', =>
@button click: 'closePlay', '閉じる'
@span class: 'movie-title'
@span class: 'owner-name'
@subview 'commentView', new NicovideCommentView()
@div outlet: 'processPanel', style: 'display:none', class: 'overlayout'
# constructor: ({@rtmpPlayer, cookieStoreFile}) ->
# super
# @niconicoApi = new NiconicoApi(cookieStoreFile)
# console.log "constructor! NiconicoView"
setCookieStoreFile: (cookieStoreFile) ->
@niconicoApi.setCookieStoreFile(cookieStoreFile)
# TODO: セッション切れた後の処理をしないと
initialize: ({@rtmpPlayer, cookieStoreFile}) ->
@niconicoApi = new NiconicoApi(cookieStoreFile)
@active = null
attached: ->
@active = true
detached: ->
@active = false
# Returns an object that can be retrieved when package is activated
serialize: ->
# Tear down any state and detach
destroy: ->
@niconicoApi.destroy()
# @element.remove()
isActive: ->
@active
getTitle: ->
"ニコニコ動画"
render: ->
@startProcess 'ログイン状態を確認しています。'
@niconicoApi.getMyTop (err, data) =>
@stopProcess()
if !err?
if data.userId?
@setTopPanel(data.userId, data.userName)
@showMenu()
else
@showLogin()
else
@showAlert err
@showLogin()
# ニコニコ動画にログイン
showLogin: ->
@loginPanel.show()
showMenu: ->
@menuPanel.show()
# 初期状態に戻す
clearAll: ->
@clearAlert()
@loginPanel.hide()
@menuPanel.hide()
@mylistPanel.hide()
@playPanel.hide()
@unsetTopPanel()
@stopProcess()
startProcess: (message) ->
@processPanel.text message
@processPanel.show()
stopProcess: ->
@processPanel.hide()
setTopPanel: (userId, userName) ->
@userId.text userId
@userName.text userName
@logoutButton.show()
unsetTopPanel: ->
@userId.text '-'
@userName.text '未<PASSWORD>イン'
@logoutButton.hide()
showAlert: (message) ->
@alertPanel.text message
@alertPanel.show()
clearAlert: ->
@alertPanel.hide()
@alertPanel.text ''
# クリックイベント
clickLogin: (event, element) ->
@clearAlert()
unless @loginMail.val()
@showAlert 'メールアドレスを入力して下さい。'
return
unless @loginPassword.val()
@showAlert 'パスワードを入力して下さい。'
return
@startProcess 'ログイン中です・・・'
@niconicoApi.login @loginMail.val(), @loginPassword.val(), (err, data) =>
@stopProcess()
if err
# パスワードだけ初期化
@loginPassword.val('')
@showAlert err
else
@loginPanel.hide()
@loginMail.val('')
@loginPassword.val('')
# 再度rederからやり直す
@render()
clickLogout: (event, element) ->
@startProcess 'ログアウト中です・・・'
@niconicoApi.logout =>
@clearAll()
@showLogin()
clickQuickPlay: (event, element) ->
@clearAlert()
movieId = @quickMovie.val()
console.log "#{movieId} を再生します。"
if !movieId
@showAlert '番組IDを入力して下さい。'
else if /^lv\d+$/.test movieId
@startProcess '番組情報を取得中'
@niconicoApi.getLiveStatus movieId, (err, data) =>
@stopProcess()
if err
@showAlert err
else
@playMovie data
else
@showAlert '未実装です。'
playMovie: (data) ->
console.log data
rtmpdumpArgs = [
'-v',
'-r', "#{data.rtmp.url}/#{data.stream.id}",
'-C', "S:#{data.rtmp.ticket}",
'-N', data.rtmp.contents,
]
# @rtmpPlayer.play(rtmpdumpArgs)
@playPanel.find('.movie-title').text(data.stream.title)
@playPanel.find('.owner-name').text(data.stream.owner_name)
@commentView.start(data.comment)
@playPanel.show()
closePlay: ->
@rtmpPlayer.stop()
@commentView.stop()
@playPanel.hide()
| true | {$, $$$, ScrollView} = require 'atom-space-pen-views'
NiconicoApi = require './niconico-api'
NicovideCommentView = require './Niconico-comment-view'
fs = require 'fs'
module.exports =
class NiconicoView extends ScrollView
@content: ->
@div {
class: 'niconico-view native-key-bindings block'
overflow: 'auto'
tabindex: -1
}, =>
@div outlet: 'topPanel', =>
@text 'ID:'
@span outlet: 'userId', '-'
@text ' '
@span outlet: 'userName', '未ログイン'
@button
outlet: 'logoutButton', click: 'clickLogout', style: 'display:none',
'ログアウト'
@div outlet: 'alertPanel', style: 'display:none', class: 'alert'
@div outlet: 'loginPanel', style: 'display:none', =>
# @h2 'ニコニコ動画ログイン'
@form =>
@fieldset =>
@legend 'ニコニコ動画ログイン'
@div =>
@label 'メールアドレス'
@input
outlet: 'loginMail', name: 'mail_tel', type: 'text',
placeholder: 'PI:EMAIL:<EMAIL>END_PI'
@div =>
@label 'パスワード'
@input
outlet: 'loginPassword', name: 'password', type: 'password',
placeholder: 'PI:PASSWORD:<PASSWORD>END_PI'
@div =>
@button click: 'clickLogin', 'ログイン'
@div outlet: 'menuPanel', style: 'display:none', =>
@form =>
@fieldset =>
@legend 'クイック視聴'
@input
outlet: 'quickMovie', name: 'quick_movie', type: 'text',
placeholder: 'lv... / co... / sm...'
@button click: 'clickQuickPlay', '視聴'
@div =>
@h3 '生放送中一覧'
@ul =>
@li '現在放送中の番組はありません。'
@p 'あとは、マイリスト一覧とか選択できるようにしたいっす。'
@div outlet: 'mylistPanel', style: 'display:none'
@div outlet: 'playPanel', style: 'display:none', =>
@button click: 'closePlay', '閉じる'
@span class: 'movie-title'
@span class: 'owner-name'
@subview 'commentView', new NicovideCommentView()
@div outlet: 'processPanel', style: 'display:none', class: 'overlayout'
# constructor: ({@rtmpPlayer, cookieStoreFile}) ->
# super
# @niconicoApi = new NiconicoApi(cookieStoreFile)
# console.log "constructor! NiconicoView"
setCookieStoreFile: (cookieStoreFile) ->
@niconicoApi.setCookieStoreFile(cookieStoreFile)
# TODO: セッション切れた後の処理をしないと
initialize: ({@rtmpPlayer, cookieStoreFile}) ->
@niconicoApi = new NiconicoApi(cookieStoreFile)
@active = null
attached: ->
@active = true
detached: ->
@active = false
# Returns an object that can be retrieved when package is activated
serialize: ->
# Tear down any state and detach
destroy: ->
@niconicoApi.destroy()
# @element.remove()
isActive: ->
@active
getTitle: ->
"ニコニコ動画"
render: ->
@startProcess 'ログイン状態を確認しています。'
@niconicoApi.getMyTop (err, data) =>
@stopProcess()
if !err?
if data.userId?
@setTopPanel(data.userId, data.userName)
@showMenu()
else
@showLogin()
else
@showAlert err
@showLogin()
# ニコニコ動画にログイン
showLogin: ->
@loginPanel.show()
showMenu: ->
@menuPanel.show()
# 初期状態に戻す
clearAll: ->
@clearAlert()
@loginPanel.hide()
@menuPanel.hide()
@mylistPanel.hide()
@playPanel.hide()
@unsetTopPanel()
@stopProcess()
startProcess: (message) ->
@processPanel.text message
@processPanel.show()
stopProcess: ->
@processPanel.hide()
setTopPanel: (userId, userName) ->
@userId.text userId
@userName.text userName
@logoutButton.show()
unsetTopPanel: ->
@userId.text '-'
@userName.text '未PI:PASSWORD:<PASSWORD>END_PIイン'
@logoutButton.hide()
showAlert: (message) ->
@alertPanel.text message
@alertPanel.show()
clearAlert: ->
@alertPanel.hide()
@alertPanel.text ''
# クリックイベント
clickLogin: (event, element) ->
@clearAlert()
unless @loginMail.val()
@showAlert 'メールアドレスを入力して下さい。'
return
unless @loginPassword.val()
@showAlert 'パスワードを入力して下さい。'
return
@startProcess 'ログイン中です・・・'
@niconicoApi.login @loginMail.val(), @loginPassword.val(), (err, data) =>
@stopProcess()
if err
# パスワードだけ初期化
@loginPassword.val('')
@showAlert err
else
@loginPanel.hide()
@loginMail.val('')
@loginPassword.val('')
# 再度rederからやり直す
@render()
clickLogout: (event, element) ->
@startProcess 'ログアウト中です・・・'
@niconicoApi.logout =>
@clearAll()
@showLogin()
clickQuickPlay: (event, element) ->
@clearAlert()
movieId = @quickMovie.val()
console.log "#{movieId} を再生します。"
if !movieId
@showAlert '番組IDを入力して下さい。'
else if /^lv\d+$/.test movieId
@startProcess '番組情報を取得中'
@niconicoApi.getLiveStatus movieId, (err, data) =>
@stopProcess()
if err
@showAlert err
else
@playMovie data
else
@showAlert '未実装です。'
playMovie: (data) ->
console.log data
rtmpdumpArgs = [
'-v',
'-r', "#{data.rtmp.url}/#{data.stream.id}",
'-C', "S:#{data.rtmp.ticket}",
'-N', data.rtmp.contents,
]
# @rtmpPlayer.play(rtmpdumpArgs)
@playPanel.find('.movie-title').text(data.stream.title)
@playPanel.find('.owner-name').text(data.stream.owner_name)
@commentView.start(data.comment)
@playPanel.show()
closePlay: ->
@rtmpPlayer.stop()
@commentView.stop()
@playPanel.hide()
|
[
{
"context": "er: ({uuid, token}, callback) =>\n memberToken = token\n memberUuid = uuid\n\n @_getRoomGroupStatusU",
"end": 674,
"score": 0.5467296242713928,
"start": 669,
"tag": "KEY",
"value": "token"
}
] | src/services/meshblu-authenticator-peter-party-service.coffee | octoblu/meshblu-authenticator-peter-party | 0 | Chance = require 'chance'
_ = require 'lodash'
MeshbluHttp = require 'meshblu-http'
RefResolver = require 'meshblu-json-schema-resolver'
{PeterCreator, PeterPartyToItselfSubscriber, PeterPartyToPeterSubscriber} = require 'peter-party-planner'
class MeshbluAuthenticatorPeterPartyService
constructor: ({@meshbluConfig, @redirectUri}) ->
@chance = new Chance()
@creator = new PeterCreator {ownerUUID: @meshbluConfig.uuid, peterPartyUUID: @meshbluConfig.uuid, @meshbluConfig}
@partySubscriber = new PeterPartyToPeterSubscriber {peterPartyUUID: @meshbluConfig.uuid, @meshbluConfig}
addMember: ({uuid, token}, callback) =>
memberToken = token
memberUuid = uuid
@_getRoomGroupStatusUuid (error, roomGroupStatusUuid) =>
return callback error if error?
@_grantMemberViewPermissionToRoomGroupStatus {memberUuid, memberToken, roomGroupStatusUuid}, (error) =>
return callback error if error?
@_updateMember {memberUuid, memberToken, roomGroupStatusUuid}, callback
register: (callback) =>
@creator.create name: @chance.name({middle_initial: true, prefix: true, suffix: true}), (error, peter) =>
return callback @_createError({message: "Error creating peter", error}) if error?
selfSubscriber = new PeterPartyToItselfSubscriber {@meshbluConfig, peterPartyUUID: peter.uuid}
selfSubscriber.subscribe (error) =>
return callback @_createError({message: "Error subscribing Peter to himself", error}) if error?
@partySubscriber.subscribe peter.uuid, (error) =>
return callback @_createError({message: "Error subscribing the Party to Peter", error}) if error?
return callback null, _.defaults peter, resolveSrv: true
_createError: ({code, message, error}) =>
message = "#{message}: (#{error.message})" if error?
code = error.code unless code?
error = new Error message
error.code = code if code?
return error
_getRoomGroupStatusUuid: (callback) =>
meshbluHttp = new MeshbluHttp @meshbluConfig
resolver = new RefResolver {@meshbluConfig}
meshbluHttp.device @meshbluConfig.uuid, (error, device) =>
return callback @_createError({ message: "Error getting user group device", error}) if error?
resolver.resolve device, (error, resolved) =>
return callback @_createError({ message: "Error resolving user group $ref", error}) if error?
roomGroupStatusUuid = _.get(resolved, 'genisys.customerDevices.roomGroupStatus.uuid')
return callback @_createError({ message: "Error getting room group status uuid", code: 404}) if _.isEmpty(roomGroupStatusUuid)
return callback null, roomGroupStatusUuid
_grantMemberViewPermissionToRoomGroupStatus: ({ memberUuid, roomGroupStatusUuid }, callback) =>
meshbluHttp = new MeshbluHttp @meshbluConfig
resolver = new RefResolver {@meshbluConfig}
meshbluHttp.device @meshbluConfig.uuid, (error, device) =>
return callback @_createError({ message: "Error getting user group device", error}) if error?
resolver.resolve device, (error, resolved) =>
return callback @_createError({ message: "Error resolving user group $ref", error}) if error?
customerId = _.get(resolved, 'genisys.customerDevices.customer.uuid')
update = {
$addToSet:
'meshblu.whitelists.discover.view': { uuid: memberUuid }
'meshblu.whitelists.configure.sent': { uuid: memberUuid }
}
meshbluHttp.updateDangerously roomGroupStatusUuid, update, {as: customerId}, (error) =>
return callback @_createError({message: "Error updating room group status discover whitelist", error}) if error?
return callback()
_updateMember: ({ memberUuid, memberToken, roomGroupStatusUuid }, callback) =>
meshbluHttp = new MeshbluHttp _.defaults({uuid: memberUuid, token: memberToken}, @meshbluConfig)
update = {
$addToSet:
'meshblu.whitelists.configure.sent': {uuid: @meshbluConfig.uuid}
'meshblu.whitelists.configure.update': {uuid: @meshbluConfig.uuid}
'meshblu.whitelists.broadcast.sent': {uuid: @meshbluConfig.uuid}
$set:
userGroup: @meshbluConfig.uuid
'genisys.devices.user-group.uuid': @meshbluConfig.uuid
'genisys.devices.room-group-status.uuid': roomGroupStatusUuid
}
meshbluHttp.updateDangerously memberUuid, update, (error) =>
return callback @_createError({message: "Error updating peter's whitelists", error}) if error?
selfSubscriber = new PeterPartyToItselfSubscriber {@meshbluConfig, peterPartyUUID: memberUuid}
selfSubscriber.subscribe (error) =>
return callback @_createError({message: "Error subscribing Peter to himself", error}) if error?
@partySubscriber.subscribe memberUuid, (error) =>
return callback @_createError({message: "Error subscribing the Party to Peter", error}) if error?
return callback()
module.exports = MeshbluAuthenticatorPeterPartyService
| 152086 | Chance = require 'chance'
_ = require 'lodash'
MeshbluHttp = require 'meshblu-http'
RefResolver = require 'meshblu-json-schema-resolver'
{PeterCreator, PeterPartyToItselfSubscriber, PeterPartyToPeterSubscriber} = require 'peter-party-planner'
class MeshbluAuthenticatorPeterPartyService
constructor: ({@meshbluConfig, @redirectUri}) ->
@chance = new Chance()
@creator = new PeterCreator {ownerUUID: @meshbluConfig.uuid, peterPartyUUID: @meshbluConfig.uuid, @meshbluConfig}
@partySubscriber = new PeterPartyToPeterSubscriber {peterPartyUUID: @meshbluConfig.uuid, @meshbluConfig}
addMember: ({uuid, token}, callback) =>
memberToken = <KEY>
memberUuid = uuid
@_getRoomGroupStatusUuid (error, roomGroupStatusUuid) =>
return callback error if error?
@_grantMemberViewPermissionToRoomGroupStatus {memberUuid, memberToken, roomGroupStatusUuid}, (error) =>
return callback error if error?
@_updateMember {memberUuid, memberToken, roomGroupStatusUuid}, callback
register: (callback) =>
@creator.create name: @chance.name({middle_initial: true, prefix: true, suffix: true}), (error, peter) =>
return callback @_createError({message: "Error creating peter", error}) if error?
selfSubscriber = new PeterPartyToItselfSubscriber {@meshbluConfig, peterPartyUUID: peter.uuid}
selfSubscriber.subscribe (error) =>
return callback @_createError({message: "Error subscribing Peter to himself", error}) if error?
@partySubscriber.subscribe peter.uuid, (error) =>
return callback @_createError({message: "Error subscribing the Party to Peter", error}) if error?
return callback null, _.defaults peter, resolveSrv: true
_createError: ({code, message, error}) =>
message = "#{message}: (#{error.message})" if error?
code = error.code unless code?
error = new Error message
error.code = code if code?
return error
_getRoomGroupStatusUuid: (callback) =>
meshbluHttp = new MeshbluHttp @meshbluConfig
resolver = new RefResolver {@meshbluConfig}
meshbluHttp.device @meshbluConfig.uuid, (error, device) =>
return callback @_createError({ message: "Error getting user group device", error}) if error?
resolver.resolve device, (error, resolved) =>
return callback @_createError({ message: "Error resolving user group $ref", error}) if error?
roomGroupStatusUuid = _.get(resolved, 'genisys.customerDevices.roomGroupStatus.uuid')
return callback @_createError({ message: "Error getting room group status uuid", code: 404}) if _.isEmpty(roomGroupStatusUuid)
return callback null, roomGroupStatusUuid
_grantMemberViewPermissionToRoomGroupStatus: ({ memberUuid, roomGroupStatusUuid }, callback) =>
meshbluHttp = new MeshbluHttp @meshbluConfig
resolver = new RefResolver {@meshbluConfig}
meshbluHttp.device @meshbluConfig.uuid, (error, device) =>
return callback @_createError({ message: "Error getting user group device", error}) if error?
resolver.resolve device, (error, resolved) =>
return callback @_createError({ message: "Error resolving user group $ref", error}) if error?
customerId = _.get(resolved, 'genisys.customerDevices.customer.uuid')
update = {
$addToSet:
'meshblu.whitelists.discover.view': { uuid: memberUuid }
'meshblu.whitelists.configure.sent': { uuid: memberUuid }
}
meshbluHttp.updateDangerously roomGroupStatusUuid, update, {as: customerId}, (error) =>
return callback @_createError({message: "Error updating room group status discover whitelist", error}) if error?
return callback()
_updateMember: ({ memberUuid, memberToken, roomGroupStatusUuid }, callback) =>
meshbluHttp = new MeshbluHttp _.defaults({uuid: memberUuid, token: memberToken}, @meshbluConfig)
update = {
$addToSet:
'meshblu.whitelists.configure.sent': {uuid: @meshbluConfig.uuid}
'meshblu.whitelists.configure.update': {uuid: @meshbluConfig.uuid}
'meshblu.whitelists.broadcast.sent': {uuid: @meshbluConfig.uuid}
$set:
userGroup: @meshbluConfig.uuid
'genisys.devices.user-group.uuid': @meshbluConfig.uuid
'genisys.devices.room-group-status.uuid': roomGroupStatusUuid
}
meshbluHttp.updateDangerously memberUuid, update, (error) =>
return callback @_createError({message: "Error updating peter's whitelists", error}) if error?
selfSubscriber = new PeterPartyToItselfSubscriber {@meshbluConfig, peterPartyUUID: memberUuid}
selfSubscriber.subscribe (error) =>
return callback @_createError({message: "Error subscribing Peter to himself", error}) if error?
@partySubscriber.subscribe memberUuid, (error) =>
return callback @_createError({message: "Error subscribing the Party to Peter", error}) if error?
return callback()
module.exports = MeshbluAuthenticatorPeterPartyService
| true | Chance = require 'chance'
_ = require 'lodash'
MeshbluHttp = require 'meshblu-http'
RefResolver = require 'meshblu-json-schema-resolver'
{PeterCreator, PeterPartyToItselfSubscriber, PeterPartyToPeterSubscriber} = require 'peter-party-planner'
class MeshbluAuthenticatorPeterPartyService
constructor: ({@meshbluConfig, @redirectUri}) ->
@chance = new Chance()
@creator = new PeterCreator {ownerUUID: @meshbluConfig.uuid, peterPartyUUID: @meshbluConfig.uuid, @meshbluConfig}
@partySubscriber = new PeterPartyToPeterSubscriber {peterPartyUUID: @meshbluConfig.uuid, @meshbluConfig}
addMember: ({uuid, token}, callback) =>
memberToken = PI:KEY:<KEY>END_PI
memberUuid = uuid
@_getRoomGroupStatusUuid (error, roomGroupStatusUuid) =>
return callback error if error?
@_grantMemberViewPermissionToRoomGroupStatus {memberUuid, memberToken, roomGroupStatusUuid}, (error) =>
return callback error if error?
@_updateMember {memberUuid, memberToken, roomGroupStatusUuid}, callback
register: (callback) =>
@creator.create name: @chance.name({middle_initial: true, prefix: true, suffix: true}), (error, peter) =>
return callback @_createError({message: "Error creating peter", error}) if error?
selfSubscriber = new PeterPartyToItselfSubscriber {@meshbluConfig, peterPartyUUID: peter.uuid}
selfSubscriber.subscribe (error) =>
return callback @_createError({message: "Error subscribing Peter to himself", error}) if error?
@partySubscriber.subscribe peter.uuid, (error) =>
return callback @_createError({message: "Error subscribing the Party to Peter", error}) if error?
return callback null, _.defaults peter, resolveSrv: true
_createError: ({code, message, error}) =>
message = "#{message}: (#{error.message})" if error?
code = error.code unless code?
error = new Error message
error.code = code if code?
return error
_getRoomGroupStatusUuid: (callback) =>
meshbluHttp = new MeshbluHttp @meshbluConfig
resolver = new RefResolver {@meshbluConfig}
meshbluHttp.device @meshbluConfig.uuid, (error, device) =>
return callback @_createError({ message: "Error getting user group device", error}) if error?
resolver.resolve device, (error, resolved) =>
return callback @_createError({ message: "Error resolving user group $ref", error}) if error?
roomGroupStatusUuid = _.get(resolved, 'genisys.customerDevices.roomGroupStatus.uuid')
return callback @_createError({ message: "Error getting room group status uuid", code: 404}) if _.isEmpty(roomGroupStatusUuid)
return callback null, roomGroupStatusUuid
_grantMemberViewPermissionToRoomGroupStatus: ({ memberUuid, roomGroupStatusUuid }, callback) =>
meshbluHttp = new MeshbluHttp @meshbluConfig
resolver = new RefResolver {@meshbluConfig}
meshbluHttp.device @meshbluConfig.uuid, (error, device) =>
return callback @_createError({ message: "Error getting user group device", error}) if error?
resolver.resolve device, (error, resolved) =>
return callback @_createError({ message: "Error resolving user group $ref", error}) if error?
customerId = _.get(resolved, 'genisys.customerDevices.customer.uuid')
update = {
$addToSet:
'meshblu.whitelists.discover.view': { uuid: memberUuid }
'meshblu.whitelists.configure.sent': { uuid: memberUuid }
}
meshbluHttp.updateDangerously roomGroupStatusUuid, update, {as: customerId}, (error) =>
return callback @_createError({message: "Error updating room group status discover whitelist", error}) if error?
return callback()
_updateMember: ({ memberUuid, memberToken, roomGroupStatusUuid }, callback) =>
meshbluHttp = new MeshbluHttp _.defaults({uuid: memberUuid, token: memberToken}, @meshbluConfig)
update = {
$addToSet:
'meshblu.whitelists.configure.sent': {uuid: @meshbluConfig.uuid}
'meshblu.whitelists.configure.update': {uuid: @meshbluConfig.uuid}
'meshblu.whitelists.broadcast.sent': {uuid: @meshbluConfig.uuid}
$set:
userGroup: @meshbluConfig.uuid
'genisys.devices.user-group.uuid': @meshbluConfig.uuid
'genisys.devices.room-group-status.uuid': roomGroupStatusUuid
}
meshbluHttp.updateDangerously memberUuid, update, (error) =>
return callback @_createError({message: "Error updating peter's whitelists", error}) if error?
selfSubscriber = new PeterPartyToItselfSubscriber {@meshbluConfig, peterPartyUUID: memberUuid}
selfSubscriber.subscribe (error) =>
return callback @_createError({message: "Error subscribing Peter to himself", error}) if error?
@partySubscriber.subscribe memberUuid, (error) =>
return callback @_createError({message: "Error subscribing the Party to Peter", error}) if error?
return callback()
module.exports = MeshbluAuthenticatorPeterPartyService
|
[
{
"context": "ts\n store: \"#{tmp}/a_store\"\n password: 'mysecret'\n await store.init()\n store.init()\n .sho",
"end": 357,
"score": 0.9993584156036377,
"start": 349,
"tag": "PASSWORD",
"value": "mysecret"
},
{
"context": "ts\n store: \"#{tmp}/a_store\"\n password: 'mysecret'\n await store.init()\n await store.set a_key",
"end": 559,
"score": 0.9993619322776794,
"start": 551,
"tag": "PASSWORD",
"value": "mysecret"
},
{
"context": "ts\n store: \"#{tmp}/a_store\"\n password: 'mysecret'\n await store.init()\n await store.set\n ",
"end": 797,
"score": 0.9993448257446289,
"start": 789,
"tag": "PASSWORD",
"value": "mysecret"
},
{
"context": "ts\n store: \"#{tmp}/a_store\"\n password: 'mysecret'\n await store.init()\n await store.set\n ",
"end": 1133,
"score": 0.9992823600769043,
"start": 1125,
"tag": "PASSWORD",
"value": "mysecret"
},
{
"context": "ts\n store: \"#{tmp}/a_store\"\n password: 'mysecret'\n await store.init()\n await store.set\n ",
"end": 1447,
"score": 0.9993069171905518,
"start": 1439,
"tag": "PASSWORD",
"value": "mysecret"
},
{
"context": "ts\n store: \"#{tmp}/a_store\"\n password: 'mysecret'\n await store.init()\n await store.set",
"end": 1759,
"score": 0.9996168613433838,
"start": 1757,
"tag": "PASSWORD",
"value": "my"
},
{
"context": "ts\n store: \"#{tmp}/a_store\"\n password: 'mysecret'\n await store.init()\n await store.set\n ",
"end": 2142,
"score": 0.9993729591369629,
"start": 2134,
"tag": "PASSWORD",
"value": "mysecret"
}
] | test/secrets/index.coffee | wdavidw/node-masson | 7 |
nikita = require 'nikita'
secrets = require '../../lib/secrets'
describe 'command configure', ->
tmp = '/tmp/masson-test/'
beforeEach ->
require('module')._cache = {}
nikita.fs.mkdir tmp
afterEach ->
nikita.fs.remove tmp, recursive: true
it 'init', ->
store = secrets
store: "#{tmp}/a_store"
password: 'mysecret'
await store.init()
store.init()
.should.be.rejectedWith 'Store already created'
it 'setget all', ->
store = secrets
store: "#{tmp}/a_store"
password: 'mysecret'
await store.init()
await store.set a_key: 'a value'
values = await store.get()
values.a_key.should.eql 'a value'
it 'get key', ->
store = secrets
store: "#{tmp}/a_store"
password: 'mysecret'
await store.init()
await store.set
a_key: 'a value'
b: key: 'b value'
value = await store.get 'a_key'
value.should.eql 'a value'
value = await store.get 'b.key'
value.should.eql 'b value'
it 'get keys', ->
store = secrets
store: "#{tmp}/a_store"
password: 'mysecret'
await store.init()
await store.set
some: keys:
a: 'a value'
b: 'b value'
values = await store.get 'some.keys'
values.should.eql
a: 'a value'
b: 'b value'
it 'set key', ->
store = secrets
store: "#{tmp}/a_store"
password: 'mysecret'
await store.init()
await store.set
a_key: 'a value'
await store.set 'b.key', 'b value'
values = await store.get()
values.should.eql
a_key: 'a value'
b: key: 'b value'
it 'set keys', ->
store = secrets
store: "#{tmp}/a_store"
password: 'mysecret'
await store.init()
await store.set
a_key: 'a value'
await store.set 'keys',
a: 'a value'
b: 'b value'
values = await store.get()
values.should.eql
a_key: 'a value'
keys:
a: 'a value'
b: 'b value'
it 'unset key', ->
store = secrets
store: "#{tmp}/a_store"
password: 'mysecret'
await store.init()
await store.set
some: keys:
a: 'a value'
b: 'b value'
await store.unset 'some.keys.a'
values = await store.get()
values.should.eql
some: keys:
b: 'b value'
| 106872 |
nikita = require 'nikita'
secrets = require '../../lib/secrets'
describe 'command configure', ->
tmp = '/tmp/masson-test/'
beforeEach ->
require('module')._cache = {}
nikita.fs.mkdir tmp
afterEach ->
nikita.fs.remove tmp, recursive: true
it 'init', ->
store = secrets
store: "#{tmp}/a_store"
password: '<PASSWORD>'
await store.init()
store.init()
.should.be.rejectedWith 'Store already created'
it 'setget all', ->
store = secrets
store: "#{tmp}/a_store"
password: '<PASSWORD>'
await store.init()
await store.set a_key: 'a value'
values = await store.get()
values.a_key.should.eql 'a value'
it 'get key', ->
store = secrets
store: "#{tmp}/a_store"
password: '<PASSWORD>'
await store.init()
await store.set
a_key: 'a value'
b: key: 'b value'
value = await store.get 'a_key'
value.should.eql 'a value'
value = await store.get 'b.key'
value.should.eql 'b value'
it 'get keys', ->
store = secrets
store: "#{tmp}/a_store"
password: '<PASSWORD>'
await store.init()
await store.set
some: keys:
a: 'a value'
b: 'b value'
values = await store.get 'some.keys'
values.should.eql
a: 'a value'
b: 'b value'
it 'set key', ->
store = secrets
store: "#{tmp}/a_store"
password: '<PASSWORD>'
await store.init()
await store.set
a_key: 'a value'
await store.set 'b.key', 'b value'
values = await store.get()
values.should.eql
a_key: 'a value'
b: key: 'b value'
it 'set keys', ->
store = secrets
store: "#{tmp}/a_store"
password: '<PASSWORD>secret'
await store.init()
await store.set
a_key: 'a value'
await store.set 'keys',
a: 'a value'
b: 'b value'
values = await store.get()
values.should.eql
a_key: 'a value'
keys:
a: 'a value'
b: 'b value'
it 'unset key', ->
store = secrets
store: "#{tmp}/a_store"
password: '<PASSWORD>'
await store.init()
await store.set
some: keys:
a: 'a value'
b: 'b value'
await store.unset 'some.keys.a'
values = await store.get()
values.should.eql
some: keys:
b: 'b value'
| true |
nikita = require 'nikita'
secrets = require '../../lib/secrets'
describe 'command configure', ->
tmp = '/tmp/masson-test/'
beforeEach ->
require('module')._cache = {}
nikita.fs.mkdir tmp
afterEach ->
nikita.fs.remove tmp, recursive: true
it 'init', ->
store = secrets
store: "#{tmp}/a_store"
password: 'PI:PASSWORD:<PASSWORD>END_PI'
await store.init()
store.init()
.should.be.rejectedWith 'Store already created'
it 'setget all', ->
store = secrets
store: "#{tmp}/a_store"
password: 'PI:PASSWORD:<PASSWORD>END_PI'
await store.init()
await store.set a_key: 'a value'
values = await store.get()
values.a_key.should.eql 'a value'
it 'get key', ->
store = secrets
store: "#{tmp}/a_store"
password: 'PI:PASSWORD:<PASSWORD>END_PI'
await store.init()
await store.set
a_key: 'a value'
b: key: 'b value'
value = await store.get 'a_key'
value.should.eql 'a value'
value = await store.get 'b.key'
value.should.eql 'b value'
it 'get keys', ->
store = secrets
store: "#{tmp}/a_store"
password: 'PI:PASSWORD:<PASSWORD>END_PI'
await store.init()
await store.set
some: keys:
a: 'a value'
b: 'b value'
values = await store.get 'some.keys'
values.should.eql
a: 'a value'
b: 'b value'
it 'set key', ->
store = secrets
store: "#{tmp}/a_store"
password: 'PI:PASSWORD:<PASSWORD>END_PI'
await store.init()
await store.set
a_key: 'a value'
await store.set 'b.key', 'b value'
values = await store.get()
values.should.eql
a_key: 'a value'
b: key: 'b value'
it 'set keys', ->
store = secrets
store: "#{tmp}/a_store"
password: 'PI:PASSWORD:<PASSWORD>END_PIsecret'
await store.init()
await store.set
a_key: 'a value'
await store.set 'keys',
a: 'a value'
b: 'b value'
values = await store.get()
values.should.eql
a_key: 'a value'
keys:
a: 'a value'
b: 'b value'
it 'unset key', ->
store = secrets
store: "#{tmp}/a_store"
password: 'PI:PASSWORD:<PASSWORD>END_PI'
await store.init()
await store.set
some: keys:
a: 'a value'
b: 'b value'
await store.unset 'some.keys.a'
values = await store.get()
values.should.eql
some: keys:
b: 'b value'
|
[
{
"context": "# Copyright (c) ppy Pty Ltd <contact@ppy.sh>. Licensed under the GNU Affero General Public Li",
"end": 43,
"score": 0.9999130368232727,
"start": 29,
"tag": "EMAIL",
"value": "contact@ppy.sh"
}
] | resources/assets/coffee/react/status-page/page.coffee | osu-katakuna/osu-katakuna-web | 5 | # Copyright (c) ppy Pty Ltd <contact@ppy.sh>. Licensed under the GNU Affero General Public License v3.0.
# See the LICENCE file in the repository root for full licence text.
import { Map } from './map'
import { Incident } from './incident'
import { Incidents } from './incidents'
import { Uptime } from './uptime'
import * as React from 'react'
import { div, span, br, strong, h1, h4, h5 } from 'react-dom-factories'
el = React.createElement
export class Page extends React.Component
constructor: (props) ->
super props
@chartArea = React.createRef()
@state =
status: window.osuStatus
charts: window.osuStatus.uptime.graphs
# mode
graph: 'users' # users or score
componentDidMount: =>
@_stats()
componentDidUpdate: =>
@_stats()
_changeViewMode: (mode, time, e) ->
s = {}
s[mode] = time
@setState(s)
_yAxisTickValues: (data) ->
rankRange = d3.extent data, (d) => d.y
@_allTicks = [1, 2.5, 5]
while _.last(@_allTicks) <= _.last(rankRange)
@_allTicks.push (10 * @_allTicks[@_allTicks.length - 3])
ticks = [@_allTicks[0]]
for tick in @_allTicks
tick = Math.trunc(tick)
if tick < rankRange[1]
ticks[0] = tick
else
ticks.push tick
break if tick < rankRange[0]
if ticks[0] != 0
ticks.unshift(0)
ticks
_stats: ->
if _.isEmpty(@state.status.online.graphs.online) && _.isEmpty(@state.status.online.graphs.score)
return
data = []
if @state.graph == 'users'
data = @state.status.online.graphs.online
else if @state.graph == 'score'
data = @state.status.online.graphs.score
data = data.map (players, j) =>
x: j - data.length + 1
y: players
unless @_statsChart
tickValues =
x: [-12, -9, -6, -3, 0]
domains =
x: d3.extent(tickValues.x)
formats =
x: (d) =>
if d == 0
osu.trans('common.time.now')
else
osu.transChoice('common.time.hours_ago', -d)
y: (d) =>
osu.formatNumber(d)
infoBoxFormats =
x: (d) -> "#{formats.x(d)}"
scales =
x: d3.scaleLinear()
y: d3.scaleLinear()
options =
formats: formats
infoBoxFormats: infoBoxFormats
scales: scales
tickValues: tickValues
domains: domains
circleLine: true
modifiers: ['status-page']
@_statsChart = new LineChart(@chartArea.current, options)
@_statsChart.margins.bottom = 65
@_statsChart.xAxis.tickPadding 5
$(window).on 'throttled-resize.profilePagePerformance', @_statsChart.resize
yTickValues = @_yAxisTickValues data
@_statsChart.options.tickValues.y = yTickValues
@_statsChart.options.domains.y = d3.extent(yTickValues)
@_statsChart.loadData(data)
render: =>
status = @state.status
activeIncidents = false
status.incidents.map (incident) =>
if incident.active
activeIncidents = true
div
className: 'osu-layout__row osu-layout__row--page--compact'
div null,
div className: 'status-header',
span className: 'status-header__logo',
null
div className: 'status-header__text',
h1 className: 'status-header__title',
strong null,
['osu!']
osu.trans("status_page.header.title")
h4 className: 'status-header__desc',
osu.trans('status_page.header.description')
div className: "status-incidents osu-layout__row--page-compact #{(if activeIncidents then '' else 'hidden')}",
h1 className: 'status-incidents__title',
osu.trans('status_page.incidents.title')
div null,
status.incidents.map (incident, id) =>
if incident.active
el Incident,
key: id
description: incident.description
active: incident.active
status: incident.status
date: incident.date
by: incident.by
el Map,
servers: @state.status.servers
div className: 'osu-layout__row--page-compact',
h1 className: 'status-info__title',
(if @state.graph == 'users' then osu.trans('status_page.online.title.users') else osu.trans('status_page.online.title.score'))
div className: 'chart', ref: @chartArea
div className: 'status-info__container',
div className: 'status-info__border',
null
div
className: "status-info__data #{(if @state.graph == 'users' then 'status-info__data--active' else '')}"
onClick: @_changeViewMode.bind(@, 'graph', 'users')
h4 className: 'status-info__data-title',
osu.trans('status_page.online.current')
h1 className: 'status-info__data-amount',
osu.formatNumber(@state.status.online.current)
div className: 'status-info__separator',
null
div
className: "status-info__data #{(if @state.graph == 'score' then 'status-info__data--active' else '')}"
onClick: @_changeViewMode.bind(@, 'graph', 'score')
h4 className: 'status-info__data-title',
osu.trans('status_page.online.score')
h1 className: 'status-info__data-amount',
osu.formatNumber(@state.status.online.score)
div className: 'osu-layout__col-container osu-layout__col-container--with-gutter',
el Incidents,
incidents: @state.status.incidents
el Uptime,
charts: @state.charts
| 83801 | # Copyright (c) ppy Pty Ltd <<EMAIL>>. Licensed under the GNU Affero General Public License v3.0.
# See the LICENCE file in the repository root for full licence text.
import { Map } from './map'
import { Incident } from './incident'
import { Incidents } from './incidents'
import { Uptime } from './uptime'
import * as React from 'react'
import { div, span, br, strong, h1, h4, h5 } from 'react-dom-factories'
el = React.createElement
export class Page extends React.Component
constructor: (props) ->
super props
@chartArea = React.createRef()
@state =
status: window.osuStatus
charts: window.osuStatus.uptime.graphs
# mode
graph: 'users' # users or score
componentDidMount: =>
@_stats()
componentDidUpdate: =>
@_stats()
_changeViewMode: (mode, time, e) ->
s = {}
s[mode] = time
@setState(s)
_yAxisTickValues: (data) ->
rankRange = d3.extent data, (d) => d.y
@_allTicks = [1, 2.5, 5]
while _.last(@_allTicks) <= _.last(rankRange)
@_allTicks.push (10 * @_allTicks[@_allTicks.length - 3])
ticks = [@_allTicks[0]]
for tick in @_allTicks
tick = Math.trunc(tick)
if tick < rankRange[1]
ticks[0] = tick
else
ticks.push tick
break if tick < rankRange[0]
if ticks[0] != 0
ticks.unshift(0)
ticks
_stats: ->
if _.isEmpty(@state.status.online.graphs.online) && _.isEmpty(@state.status.online.graphs.score)
return
data = []
if @state.graph == 'users'
data = @state.status.online.graphs.online
else if @state.graph == 'score'
data = @state.status.online.graphs.score
data = data.map (players, j) =>
x: j - data.length + 1
y: players
unless @_statsChart
tickValues =
x: [-12, -9, -6, -3, 0]
domains =
x: d3.extent(tickValues.x)
formats =
x: (d) =>
if d == 0
osu.trans('common.time.now')
else
osu.transChoice('common.time.hours_ago', -d)
y: (d) =>
osu.formatNumber(d)
infoBoxFormats =
x: (d) -> "#{formats.x(d)}"
scales =
x: d3.scaleLinear()
y: d3.scaleLinear()
options =
formats: formats
infoBoxFormats: infoBoxFormats
scales: scales
tickValues: tickValues
domains: domains
circleLine: true
modifiers: ['status-page']
@_statsChart = new LineChart(@chartArea.current, options)
@_statsChart.margins.bottom = 65
@_statsChart.xAxis.tickPadding 5
$(window).on 'throttled-resize.profilePagePerformance', @_statsChart.resize
yTickValues = @_yAxisTickValues data
@_statsChart.options.tickValues.y = yTickValues
@_statsChart.options.domains.y = d3.extent(yTickValues)
@_statsChart.loadData(data)
render: =>
status = @state.status
activeIncidents = false
status.incidents.map (incident) =>
if incident.active
activeIncidents = true
div
className: 'osu-layout__row osu-layout__row--page--compact'
div null,
div className: 'status-header',
span className: 'status-header__logo',
null
div className: 'status-header__text',
h1 className: 'status-header__title',
strong null,
['osu!']
osu.trans("status_page.header.title")
h4 className: 'status-header__desc',
osu.trans('status_page.header.description')
div className: "status-incidents osu-layout__row--page-compact #{(if activeIncidents then '' else 'hidden')}",
h1 className: 'status-incidents__title',
osu.trans('status_page.incidents.title')
div null,
status.incidents.map (incident, id) =>
if incident.active
el Incident,
key: id
description: incident.description
active: incident.active
status: incident.status
date: incident.date
by: incident.by
el Map,
servers: @state.status.servers
div className: 'osu-layout__row--page-compact',
h1 className: 'status-info__title',
(if @state.graph == 'users' then osu.trans('status_page.online.title.users') else osu.trans('status_page.online.title.score'))
div className: 'chart', ref: @chartArea
div className: 'status-info__container',
div className: 'status-info__border',
null
div
className: "status-info__data #{(if @state.graph == 'users' then 'status-info__data--active' else '')}"
onClick: @_changeViewMode.bind(@, 'graph', 'users')
h4 className: 'status-info__data-title',
osu.trans('status_page.online.current')
h1 className: 'status-info__data-amount',
osu.formatNumber(@state.status.online.current)
div className: 'status-info__separator',
null
div
className: "status-info__data #{(if @state.graph == 'score' then 'status-info__data--active' else '')}"
onClick: @_changeViewMode.bind(@, 'graph', 'score')
h4 className: 'status-info__data-title',
osu.trans('status_page.online.score')
h1 className: 'status-info__data-amount',
osu.formatNumber(@state.status.online.score)
div className: 'osu-layout__col-container osu-layout__col-container--with-gutter',
el Incidents,
incidents: @state.status.incidents
el Uptime,
charts: @state.charts
| true | # Copyright (c) ppy Pty Ltd <PI:EMAIL:<EMAIL>END_PI>. Licensed under the GNU Affero General Public License v3.0.
# See the LICENCE file in the repository root for full licence text.
import { Map } from './map'
import { Incident } from './incident'
import { Incidents } from './incidents'
import { Uptime } from './uptime'
import * as React from 'react'
import { div, span, br, strong, h1, h4, h5 } from 'react-dom-factories'
el = React.createElement
export class Page extends React.Component
constructor: (props) ->
super props
@chartArea = React.createRef()
@state =
status: window.osuStatus
charts: window.osuStatus.uptime.graphs
# mode
graph: 'users' # users or score
componentDidMount: =>
@_stats()
componentDidUpdate: =>
@_stats()
_changeViewMode: (mode, time, e) ->
s = {}
s[mode] = time
@setState(s)
_yAxisTickValues: (data) ->
rankRange = d3.extent data, (d) => d.y
@_allTicks = [1, 2.5, 5]
while _.last(@_allTicks) <= _.last(rankRange)
@_allTicks.push (10 * @_allTicks[@_allTicks.length - 3])
ticks = [@_allTicks[0]]
for tick in @_allTicks
tick = Math.trunc(tick)
if tick < rankRange[1]
ticks[0] = tick
else
ticks.push tick
break if tick < rankRange[0]
if ticks[0] != 0
ticks.unshift(0)
ticks
_stats: ->
if _.isEmpty(@state.status.online.graphs.online) && _.isEmpty(@state.status.online.graphs.score)
return
data = []
if @state.graph == 'users'
data = @state.status.online.graphs.online
else if @state.graph == 'score'
data = @state.status.online.graphs.score
data = data.map (players, j) =>
x: j - data.length + 1
y: players
unless @_statsChart
tickValues =
x: [-12, -9, -6, -3, 0]
domains =
x: d3.extent(tickValues.x)
formats =
x: (d) =>
if d == 0
osu.trans('common.time.now')
else
osu.transChoice('common.time.hours_ago', -d)
y: (d) =>
osu.formatNumber(d)
infoBoxFormats =
x: (d) -> "#{formats.x(d)}"
scales =
x: d3.scaleLinear()
y: d3.scaleLinear()
options =
formats: formats
infoBoxFormats: infoBoxFormats
scales: scales
tickValues: tickValues
domains: domains
circleLine: true
modifiers: ['status-page']
@_statsChart = new LineChart(@chartArea.current, options)
@_statsChart.margins.bottom = 65
@_statsChart.xAxis.tickPadding 5
$(window).on 'throttled-resize.profilePagePerformance', @_statsChart.resize
yTickValues = @_yAxisTickValues data
@_statsChart.options.tickValues.y = yTickValues
@_statsChart.options.domains.y = d3.extent(yTickValues)
@_statsChart.loadData(data)
render: =>
status = @state.status
activeIncidents = false
status.incidents.map (incident) =>
if incident.active
activeIncidents = true
div
className: 'osu-layout__row osu-layout__row--page--compact'
div null,
div className: 'status-header',
span className: 'status-header__logo',
null
div className: 'status-header__text',
h1 className: 'status-header__title',
strong null,
['osu!']
osu.trans("status_page.header.title")
h4 className: 'status-header__desc',
osu.trans('status_page.header.description')
div className: "status-incidents osu-layout__row--page-compact #{(if activeIncidents then '' else 'hidden')}",
h1 className: 'status-incidents__title',
osu.trans('status_page.incidents.title')
div null,
status.incidents.map (incident, id) =>
if incident.active
el Incident,
key: id
description: incident.description
active: incident.active
status: incident.status
date: incident.date
by: incident.by
el Map,
servers: @state.status.servers
div className: 'osu-layout__row--page-compact',
h1 className: 'status-info__title',
(if @state.graph == 'users' then osu.trans('status_page.online.title.users') else osu.trans('status_page.online.title.score'))
div className: 'chart', ref: @chartArea
div className: 'status-info__container',
div className: 'status-info__border',
null
div
className: "status-info__data #{(if @state.graph == 'users' then 'status-info__data--active' else '')}"
onClick: @_changeViewMode.bind(@, 'graph', 'users')
h4 className: 'status-info__data-title',
osu.trans('status_page.online.current')
h1 className: 'status-info__data-amount',
osu.formatNumber(@state.status.online.current)
div className: 'status-info__separator',
null
div
className: "status-info__data #{(if @state.graph == 'score' then 'status-info__data--active' else '')}"
onClick: @_changeViewMode.bind(@, 'graph', 'score')
h4 className: 'status-info__data-title',
osu.trans('status_page.online.score')
h1 className: 'status-info__data-amount',
osu.formatNumber(@state.status.online.score)
div className: 'osu-layout__col-container osu-layout__col-container--with-gutter',
el Incidents,
incidents: @state.status.incidents
el Uptime,
charts: @state.charts
|
[
{
"context": "\\\\Local\\\\Google\\\\Chrome\\\\User\\ Data\\\\PepperFlash\\\\19.0.0.226\\\\manifest.json\"\n\n# console.log \"#{__dirname}/#{pa",
"end": 625,
"score": 0.9718058705329895,
"start": 615,
"tag": "IP_ADDRESS",
"value": "19.0.0.226"
}
] | lib/main.coffee | mulian/dashy-browser | 0 | #main.coffee
#Main JS from Electron-Browser
app = require 'app'
BrowserWindow = require 'browser-window'
fs = require 'fs'
{exec} = require 'child_process'
packageFile = require '../package.json'
{settings} = packageFile
# Report crashes to our server.
require('crash-reporter').start();
# Report crashes to our server.
# require('crash-reporter').start()
# Keep a global reference of the window object, if you don't, the window will
# be closed automatically when the JavaScript object is garbage collected.
mainWindow = null
# require "C:\\Users\\wii\\AppData\\Local\\Google\\Chrome\\User\ Data\\PepperFlash\\19.0.0.226\\manifest.json"
# console.log "#{__dirname}/#{packageFile.flash.path}"
# console.log packageFile.flash.version
# initiate Flash
app.commandLine.appendSwitch 'ppapi-flash-path', "#{__dirname}/../#{packageFile.flash.path}"
app.commandLine.appendSwitch 'ppapi-flash-version', packageFile.flash.version
# Quit when all windows are closed.
app.on 'window-all-closed', ->
# On OS X it is common for applications and their menu bar
# to stay active until the user quits explicitly with Cmd + Q
if process.platform != 'darwin'
app.quit()
# This method will be called when Electron has finished
# initialization and is ready to create browser windows.
# console.log "funzt"
app.on 'ready', ->
# Create the browser window.
mainWindow = new BrowserWindow {} =
width: 1920
height: 1080
# fullscreen: true
icon: "#{__dirname}/../gfx/Dashy.ico"
plugins: true
'web-preferences':
plugins: true
'always-on-top' : false
webContents = mainWindow.webContents
webContents.enableDeviceEmulation fitToView: true
mainWindow.loadUrl('file://' + __dirname + '/../index.html');
# Open the DevTools.
mainWindow.openDevTools() if settings.debug
session = webContents.session
#execute a Native File
executeFile = (path) ->
path = "open #{path}" if process.platform == 'darwin'
exec path, (error, stdout, stderr) ->
# console.log "stdout: #{stdout}"
# console.log "stderr: #{stderr}"
if error?
# console.log "exec error: #{error}"
webContents.send "error","Beim ausführen von #{path}."
else
webContents.send "info","Datei #{path} wird ausgeführt."
# on file Download
session.on 'will-download', (event, item, downloadWebContents) ->
console.log "will-download", item
downloadFolder = "#{settings.dirUpload.dir}/download"
if not fs.existsSync downloadFolder
fs.mkdirSync downloadFolder
item.setSavePath "#{downloadFolder}/#{item.getFilename()}"
item.on 'done', (e, state) ->
if state == "completed"
webContents.send "info", "Die Datei #{item.getFilename()} wurde erfolgreich heruntergeladen und wird geöffnet."
# console.log("Download successfully");
executeFile "#{downloadFolder}/#{item.getFilename()}"
event.preventDefault();
else
webContents.send "error", "beim herunterladen von #{item.getFilename()}."
# console.log e
# console.log state
# console.log "#{downloadFolder}/#{item.getFilename()}"
webContents.send "closeCurrentWindow"
# on Window Close
mainWindow.on 'closed', ->
# Dereference the window object, usually you would store windows
# in an array if your app supports multi windows, this is the time
# when you should delete the corresponding element.
mainWindow = null
| 6990 | #main.coffee
#Main JS from Electron-Browser
app = require 'app'
BrowserWindow = require 'browser-window'
fs = require 'fs'
{exec} = require 'child_process'
packageFile = require '../package.json'
{settings} = packageFile
# Report crashes to our server.
require('crash-reporter').start();
# Report crashes to our server.
# require('crash-reporter').start()
# Keep a global reference of the window object, if you don't, the window will
# be closed automatically when the JavaScript object is garbage collected.
mainWindow = null
# require "C:\\Users\\wii\\AppData\\Local\\Google\\Chrome\\User\ Data\\PepperFlash\\172.16.31.10\\manifest.json"
# console.log "#{__dirname}/#{packageFile.flash.path}"
# console.log packageFile.flash.version
# initiate Flash
app.commandLine.appendSwitch 'ppapi-flash-path', "#{__dirname}/../#{packageFile.flash.path}"
app.commandLine.appendSwitch 'ppapi-flash-version', packageFile.flash.version
# Quit when all windows are closed.
app.on 'window-all-closed', ->
# On OS X it is common for applications and their menu bar
# to stay active until the user quits explicitly with Cmd + Q
if process.platform != 'darwin'
app.quit()
# This method will be called when Electron has finished
# initialization and is ready to create browser windows.
# console.log "funzt"
app.on 'ready', ->
# Create the browser window.
mainWindow = new BrowserWindow {} =
width: 1920
height: 1080
# fullscreen: true
icon: "#{__dirname}/../gfx/Dashy.ico"
plugins: true
'web-preferences':
plugins: true
'always-on-top' : false
webContents = mainWindow.webContents
webContents.enableDeviceEmulation fitToView: true
mainWindow.loadUrl('file://' + __dirname + '/../index.html');
# Open the DevTools.
mainWindow.openDevTools() if settings.debug
session = webContents.session
#execute a Native File
executeFile = (path) ->
path = "open #{path}" if process.platform == 'darwin'
exec path, (error, stdout, stderr) ->
# console.log "stdout: #{stdout}"
# console.log "stderr: #{stderr}"
if error?
# console.log "exec error: #{error}"
webContents.send "error","Beim ausführen von #{path}."
else
webContents.send "info","Datei #{path} wird ausgeführt."
# on file Download
session.on 'will-download', (event, item, downloadWebContents) ->
console.log "will-download", item
downloadFolder = "#{settings.dirUpload.dir}/download"
if not fs.existsSync downloadFolder
fs.mkdirSync downloadFolder
item.setSavePath "#{downloadFolder}/#{item.getFilename()}"
item.on 'done', (e, state) ->
if state == "completed"
webContents.send "info", "Die Datei #{item.getFilename()} wurde erfolgreich heruntergeladen und wird geöffnet."
# console.log("Download successfully");
executeFile "#{downloadFolder}/#{item.getFilename()}"
event.preventDefault();
else
webContents.send "error", "beim herunterladen von #{item.getFilename()}."
# console.log e
# console.log state
# console.log "#{downloadFolder}/#{item.getFilename()}"
webContents.send "closeCurrentWindow"
# on Window Close
mainWindow.on 'closed', ->
# Dereference the window object, usually you would store windows
# in an array if your app supports multi windows, this is the time
# when you should delete the corresponding element.
mainWindow = null
| true | #main.coffee
#Main JS from Electron-Browser
app = require 'app'
BrowserWindow = require 'browser-window'
fs = require 'fs'
{exec} = require 'child_process'
packageFile = require '../package.json'
{settings} = packageFile
# Report crashes to our server.
require('crash-reporter').start();
# Report crashes to our server.
# require('crash-reporter').start()
# Keep a global reference of the window object, if you don't, the window will
# be closed automatically when the JavaScript object is garbage collected.
mainWindow = null
# require "C:\\Users\\wii\\AppData\\Local\\Google\\Chrome\\User\ Data\\PepperFlash\\PI:IP_ADDRESS:172.16.31.10END_PI\\manifest.json"
# console.log "#{__dirname}/#{packageFile.flash.path}"
# console.log packageFile.flash.version
# initiate Flash
app.commandLine.appendSwitch 'ppapi-flash-path', "#{__dirname}/../#{packageFile.flash.path}"
app.commandLine.appendSwitch 'ppapi-flash-version', packageFile.flash.version
# Quit when all windows are closed.
app.on 'window-all-closed', ->
# On OS X it is common for applications and their menu bar
# to stay active until the user quits explicitly with Cmd + Q
if process.platform != 'darwin'
app.quit()
# This method will be called when Electron has finished
# initialization and is ready to create browser windows.
# console.log "funzt"
app.on 'ready', ->
# Create the browser window.
mainWindow = new BrowserWindow {} =
width: 1920
height: 1080
# fullscreen: true
icon: "#{__dirname}/../gfx/Dashy.ico"
plugins: true
'web-preferences':
plugins: true
'always-on-top' : false
webContents = mainWindow.webContents
webContents.enableDeviceEmulation fitToView: true
mainWindow.loadUrl('file://' + __dirname + '/../index.html');
# Open the DevTools.
mainWindow.openDevTools() if settings.debug
session = webContents.session
#execute a Native File
executeFile = (path) ->
path = "open #{path}" if process.platform == 'darwin'
exec path, (error, stdout, stderr) ->
# console.log "stdout: #{stdout}"
# console.log "stderr: #{stderr}"
if error?
# console.log "exec error: #{error}"
webContents.send "error","Beim ausführen von #{path}."
else
webContents.send "info","Datei #{path} wird ausgeführt."
# on file Download
session.on 'will-download', (event, item, downloadWebContents) ->
console.log "will-download", item
downloadFolder = "#{settings.dirUpload.dir}/download"
if not fs.existsSync downloadFolder
fs.mkdirSync downloadFolder
item.setSavePath "#{downloadFolder}/#{item.getFilename()}"
item.on 'done', (e, state) ->
if state == "completed"
webContents.send "info", "Die Datei #{item.getFilename()} wurde erfolgreich heruntergeladen und wird geöffnet."
# console.log("Download successfully");
executeFile "#{downloadFolder}/#{item.getFilename()}"
event.preventDefault();
else
webContents.send "error", "beim herunterladen von #{item.getFilename()}."
# console.log e
# console.log state
# console.log "#{downloadFolder}/#{item.getFilename()}"
webContents.send "closeCurrentWindow"
# on Window Close
mainWindow.on 'closed', ->
# Dereference the window object, usually you would store windows
# in an array if your app supports multi windows, this is the time
# when you should delete the corresponding element.
mainWindow = null
|
[
{
"context": "'11111111-1')\n expect(user.password).to.eql('password')\n expect(work.value).to.eql(55556)\n re",
"end": 282,
"score": 0.9995297789573669,
"start": 274,
"tag": "PASSWORD",
"value": "password"
},
{
"context": "y(\"user\", \"hubot sii boleta 11.111.111-1 password 50000\")\n setTimeout(done, 100)\n\n it \"should rep",
"end": 683,
"score": 0.9992032051086426,
"start": 678,
"tag": "PASSWORD",
"value": "50000"
},
{
"context": " [\"user\", \"hubot sii boleta 11.111.111-1 password 50000\"]\n [\"hubot\", \"Boleta enviada\"]\n ])\n",
"end": 840,
"score": 0.9991931915283203,
"start": 835,
"tag": "PASSWORD",
"value": "50000"
}
] | test/test.coffee | lgaticaq/hubot-sii | 1 | Helper = require("hubot-test-helper")
expect = require("chai").expect
proxyquire = require("proxyquire")
siiStub =
byLastInvoice: (user, work) ->
return new Promise (resolve, reject) ->
expect(user.rut).to.equal('11111111-1')
expect(user.password).to.eql('password')
expect(work.value).to.eql(55556)
resolve()
proxyquire("./../src/script.coffee", {sii: siiStub})
helper = new Helper("./../src/index.coffee")
describe "hubot-sii", ->
room = null
beforeEach ->
room = helper.createRoom()
afterEach ->
room.destroy()
context "valid", ->
beforeEach (done) ->
room.user.say("user", "hubot sii boleta 11.111.111-1 password 50000")
setTimeout(done, 100)
it "should reply", ->
expect(room.messages).to.eql([
["user", "hubot sii boleta 11.111.111-1 password 50000"]
["hubot", "Boleta enviada"]
])
| 147223 | Helper = require("hubot-test-helper")
expect = require("chai").expect
proxyquire = require("proxyquire")
siiStub =
byLastInvoice: (user, work) ->
return new Promise (resolve, reject) ->
expect(user.rut).to.equal('11111111-1')
expect(user.password).to.eql('<PASSWORD>')
expect(work.value).to.eql(55556)
resolve()
proxyquire("./../src/script.coffee", {sii: siiStub})
helper = new Helper("./../src/index.coffee")
describe "hubot-sii", ->
room = null
beforeEach ->
room = helper.createRoom()
afterEach ->
room.destroy()
context "valid", ->
beforeEach (done) ->
room.user.say("user", "hubot sii boleta 11.111.111-1 password <PASSWORD>")
setTimeout(done, 100)
it "should reply", ->
expect(room.messages).to.eql([
["user", "hubot sii boleta 11.111.111-1 password <PASSWORD>"]
["hubot", "Boleta enviada"]
])
| true | Helper = require("hubot-test-helper")
expect = require("chai").expect
proxyquire = require("proxyquire")
siiStub =
byLastInvoice: (user, work) ->
return new Promise (resolve, reject) ->
expect(user.rut).to.equal('11111111-1')
expect(user.password).to.eql('PI:PASSWORD:<PASSWORD>END_PI')
expect(work.value).to.eql(55556)
resolve()
proxyquire("./../src/script.coffee", {sii: siiStub})
helper = new Helper("./../src/index.coffee")
describe "hubot-sii", ->
room = null
beforeEach ->
room = helper.createRoom()
afterEach ->
room.destroy()
context "valid", ->
beforeEach (done) ->
room.user.say("user", "hubot sii boleta 11.111.111-1 password PI:PASSWORD:<PASSWORD>END_PI")
setTimeout(done, 100)
it "should reply", ->
expect(room.messages).to.eql([
["user", "hubot sii boleta 11.111.111-1 password PI:PASSWORD:<PASSWORD>END_PI"]
["hubot", "Boleta enviada"]
])
|
[
{
"context": "js\n\n PXL.js\n Benjamin Blundell - ben@pxljs.com\n http://pxljs.",
"end": 215,
"score": 0.9998367428779602,
"start": 198,
"tag": "NAME",
"value": "Benjamin Blundell"
},
{
"context": " PXL.js\n Benjamin Blundell - ben@pxljs.com\n http://pxljs.com\n\n This so",
"end": 231,
"score": 0.9999305605888367,
"start": 218,
"tag": "EMAIL",
"value": "ben@pxljs.com"
}
] | src/util/signal.coffee | OniDaito/pxljs | 1 | ###
.__
_________ __| |
\____ \ \/ / |
| |_> > <| |__
| __/__/\_ \____/
|__| \/ js
PXL.js
Benjamin Blundell - ben@pxljs.com
http://pxljs.com
This software is released under the MIT Licence. See LICENCE.txt for details
- TODO
* There is no hierarchy or bubbling really. Anyone can listen
* Maybe rethink how we handle events generally? - Game engine style
Influenced by signals.js - an object where listeners and events may be added
###
util = require "./util"
{Vec2} = require "../math/math"
# ## Signal
class Signal
# **@constructor**
constructor : () ->
@listeners = []
@_pause = false
# **add**
# - **func** - a Function - Required
# - **context** - an Object - Required
# - returns this
add : (func, context) ->
@listeners.push {f: func, c: context, o: false, g: PXL.Context}
@
# **addOnce**
# - **func** - a Function - Required
# - **context** - an Object - Required
# - returns this
addOnce : (func, context) ->
@listeners.push {f: func, c: context, o: true, g: PXL.Context}
@
# **remove**
# - **func** - a Function - Required
# - **context** - an Object - Required
# - returns this
remove : (func, context) ->
@del func
@
# **pause**
# - **force** - a Boolean
# - returns this
pause : (force) ->
if force?
@_pause = force
else
@_pause = !@_pause
@
# **del**
# - **func** - a Function - Required
# - **context** - an Object - Required
# - returns this
del : (func, context) ->
for obj in @listeners
if obj.c == context
if obj.f == func
i = @listeners.indexOf(obj)
@listeners.splice(i, 1)
break
@
# **dispatch** the event. All arguments passed in are sent on to the final function
# - returns this
dispatch : () ->
if @_pause
return @
removals = []
for l in @listeners
PXL.Context.switchContext l.g
l.f.apply(l.c, arguments)
if l.o
removals.push l
for l in removals
@del l
@
module.exports =
Signal : Signal
| 181569 | ###
.__
_________ __| |
\____ \ \/ / |
| |_> > <| |__
| __/__/\_ \____/
|__| \/ js
PXL.js
<NAME> - <EMAIL>
http://pxljs.com
This software is released under the MIT Licence. See LICENCE.txt for details
- TODO
* There is no hierarchy or bubbling really. Anyone can listen
* Maybe rethink how we handle events generally? - Game engine style
Influenced by signals.js - an object where listeners and events may be added
###
util = require "./util"
{Vec2} = require "../math/math"
# ## Signal
class Signal
# **@constructor**
constructor : () ->
@listeners = []
@_pause = false
# **add**
# - **func** - a Function - Required
# - **context** - an Object - Required
# - returns this
add : (func, context) ->
@listeners.push {f: func, c: context, o: false, g: PXL.Context}
@
# **addOnce**
# - **func** - a Function - Required
# - **context** - an Object - Required
# - returns this
addOnce : (func, context) ->
@listeners.push {f: func, c: context, o: true, g: PXL.Context}
@
# **remove**
# - **func** - a Function - Required
# - **context** - an Object - Required
# - returns this
remove : (func, context) ->
@del func
@
# **pause**
# - **force** - a Boolean
# - returns this
pause : (force) ->
if force?
@_pause = force
else
@_pause = !@_pause
@
# **del**
# - **func** - a Function - Required
# - **context** - an Object - Required
# - returns this
del : (func, context) ->
for obj in @listeners
if obj.c == context
if obj.f == func
i = @listeners.indexOf(obj)
@listeners.splice(i, 1)
break
@
# **dispatch** the event. All arguments passed in are sent on to the final function
# - returns this
dispatch : () ->
if @_pause
return @
removals = []
for l in @listeners
PXL.Context.switchContext l.g
l.f.apply(l.c, arguments)
if l.o
removals.push l
for l in removals
@del l
@
module.exports =
Signal : Signal
| true | ###
.__
_________ __| |
\____ \ \/ / |
| |_> > <| |__
| __/__/\_ \____/
|__| \/ js
PXL.js
PI:NAME:<NAME>END_PI - PI:EMAIL:<EMAIL>END_PI
http://pxljs.com
This software is released under the MIT Licence. See LICENCE.txt for details
- TODO
* There is no hierarchy or bubbling really. Anyone can listen
* Maybe rethink how we handle events generally? - Game engine style
Influenced by signals.js - an object where listeners and events may be added
###
util = require "./util"
{Vec2} = require "../math/math"
# ## Signal
class Signal
# **@constructor**
constructor : () ->
@listeners = []
@_pause = false
# **add**
# - **func** - a Function - Required
# - **context** - an Object - Required
# - returns this
add : (func, context) ->
@listeners.push {f: func, c: context, o: false, g: PXL.Context}
@
# **addOnce**
# - **func** - a Function - Required
# - **context** - an Object - Required
# - returns this
addOnce : (func, context) ->
@listeners.push {f: func, c: context, o: true, g: PXL.Context}
@
# **remove**
# - **func** - a Function - Required
# - **context** - an Object - Required
# - returns this
remove : (func, context) ->
@del func
@
# **pause**
# - **force** - a Boolean
# - returns this
pause : (force) ->
if force?
@_pause = force
else
@_pause = !@_pause
@
# **del**
# - **func** - a Function - Required
# - **context** - an Object - Required
# - returns this
del : (func, context) ->
for obj in @listeners
if obj.c == context
if obj.f == func
i = @listeners.indexOf(obj)
@listeners.splice(i, 1)
break
@
# **dispatch** the event. All arguments passed in are sent on to the final function
# - returns this
dispatch : () ->
if @_pause
return @
removals = []
for l in @listeners
PXL.Context.switchContext l.g
l.f.apply(l.c, arguments)
if l.o
removals.push l
for l in removals
@del l
@
module.exports =
Signal : Signal
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.